blob: 9430744b6f8e02e7ca23ebed465438f9fd686980 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Rabin Vincent76512f22017-01-18 14:54:05 +010025#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
26
Herbert Xu1ce5a042015-04-22 15:06:30 +080027#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080028#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080029#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100030#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080031#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090033#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100035#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070037#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070038#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070039#include <linux/timex.h>
40#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070041#include "tcrypt.h"
42
43/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080044 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070045 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080046#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070047
48/*
Herbert Xuda7f0332008-07-31 17:08:25 +080049* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070050*/
51#define ENCRYPT 1
52#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070053
Horia Geant?f074f7b2015-08-27 18:38:36 +030054#define MAX_DIGEST_SIZE 64
55
Harald Welteebfd9bc2005-06-22 13:27:23 -070056/*
Luca Clementi263a8df2014-06-25 22:57:42 -070057 * return a string with the driver name
58 */
59#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
60
61/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070062 * Used by test_cipher_speed()
63 */
Herbert Xu6a179442005-06-22 13:29:03 -070064static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070065
Steffen Klasserta873a5f2009-06-19 19:46:53 +080066static char *alg = NULL;
67static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080068static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070069static int mode;
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +000070static u32 num_mb = 8;
Herbert Xuf139cfa2008-07-31 12:23:53 +080071static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070072
73static char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030074 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080075 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
76 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110077 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080078 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
Horia Geantă4d407b02018-09-19 17:54:21 +030079 "lzo", "cts", "sha3-224", "sha3-256", "sha3-384", "sha3-512", NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070080};
81
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +000082static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
83static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
84
85#define XBUFSIZE 8
86#define MAX_IVLEN 32
87
88static int testmgr_alloc_buf(char *buf[XBUFSIZE])
89{
90 int i;
91
92 for (i = 0; i < XBUFSIZE; i++) {
93 buf[i] = (void *)__get_free_page(GFP_KERNEL);
94 if (!buf[i])
95 goto err_free_buf;
96 }
97
98 return 0;
99
100err_free_buf:
101 while (i-- > 0)
102 free_page((unsigned long)buf[i]);
103
104 return -ENOMEM;
105}
106
107static void testmgr_free_buf(char *buf[XBUFSIZE])
108{
109 int i;
110
111 for (i = 0; i < XBUFSIZE; i++)
112 free_page((unsigned long)buf[i]);
113}
114
115static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
116 unsigned int buflen, const void *assoc,
117 unsigned int aad_size)
118{
119 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
120 int k, rem;
121
122 if (np > XBUFSIZE) {
123 rem = PAGE_SIZE;
124 np = XBUFSIZE;
125 } else {
126 rem = buflen % PAGE_SIZE;
127 }
128
129 sg_init_table(sg, np + 1);
130
131 sg_set_buf(&sg[0], assoc, aad_size);
132
133 if (rem)
134 np--;
135 for (k = 0; k < np; k++)
136 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
137
138 if (rem)
139 sg_set_buf(&sg[k + 1], xbuf[k], rem);
140}
141
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530142static inline int do_one_aead_op(struct aead_request *req, int ret)
143{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100144 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530145
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100146 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530147}
148
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000149struct test_mb_aead_data {
150 struct scatterlist sg[XBUFSIZE];
151 struct scatterlist sgout[XBUFSIZE];
152 struct aead_request *req;
153 struct crypto_wait wait;
154 char *xbuf[XBUFSIZE];
155 char *xoutbuf[XBUFSIZE];
156 char *axbuf[XBUFSIZE];
157};
158
159static int do_mult_aead_op(struct test_mb_aead_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -0700160 u32 num_mb, int *rc)
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000161{
Kees Cook4e234ee2018-04-26 19:57:28 -0700162 int i, err = 0;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000163
164 /* Fire up a bunch of concurrent requests */
165 for (i = 0; i < num_mb; i++) {
166 if (enc == ENCRYPT)
167 rc[i] = crypto_aead_encrypt(data[i].req);
168 else
169 rc[i] = crypto_aead_decrypt(data[i].req);
170 }
171
172 /* Wait for all requests to finish */
173 for (i = 0; i < num_mb; i++) {
174 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
175
176 if (rc[i]) {
177 pr_info("concurrent request %d error %d\n", i, rc[i]);
178 err = rc[i];
179 }
180 }
181
182 return err;
183}
184
185static int test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc,
186 int blen, int secs, u32 num_mb)
187{
188 unsigned long start, end;
189 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700190 int ret = 0;
191 int *rc;
192
193 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
194 if (!rc)
195 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000196
197 for (start = jiffies, end = start + secs * HZ, bcount = 0;
198 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700199 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000200 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700201 goto out;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000202 }
203
204 pr_cont("%d operations in %d seconds (%ld bytes)\n",
205 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700206
207out:
208 kfree(rc);
209 return ret;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000210}
211
212static int test_mb_aead_cycles(struct test_mb_aead_data *data, int enc,
213 int blen, u32 num_mb)
214{
215 unsigned long cycles = 0;
216 int ret = 0;
217 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700218 int *rc;
219
220 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
221 if (!rc)
222 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000223
224 /* Warm-up run. */
225 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700226 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000227 if (ret)
228 goto out;
229 }
230
231 /* The real thing. */
232 for (i = 0; i < 8; i++) {
233 cycles_t start, end;
234
235 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700236 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000237 end = get_cycles();
238
239 if (ret)
240 goto out;
241
242 cycles += end - start;
243 }
244
Kees Cook4e234ee2018-04-26 19:57:28 -0700245 pr_cont("1 operation in %lu cycles (%d bytes)\n",
246 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000247
Kees Cook4e234ee2018-04-26 19:57:28 -0700248out:
249 kfree(rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000250 return ret;
251}
252
253static void test_mb_aead_speed(const char *algo, int enc, int secs,
254 struct aead_speed_template *template,
255 unsigned int tcount, u8 authsize,
256 unsigned int aad_size, u8 *keysize, u32 num_mb)
257{
258 struct test_mb_aead_data *data;
259 struct crypto_aead *tfm;
260 unsigned int i, j, iv_len;
261 const char *key;
262 const char *e;
263 void *assoc;
264 u32 *b_size;
265 char *iv;
266 int ret;
267
268
269 if (aad_size >= PAGE_SIZE) {
270 pr_err("associate data length (%u) too big\n", aad_size);
271 return;
272 }
273
274 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
275 if (!iv)
276 return;
277
278 if (enc == ENCRYPT)
279 e = "encryption";
280 else
281 e = "decryption";
282
283 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
284 if (!data)
285 goto out_free_iv;
286
287 tfm = crypto_alloc_aead(algo, 0, 0);
288 if (IS_ERR(tfm)) {
289 pr_err("failed to load transform for %s: %ld\n",
290 algo, PTR_ERR(tfm));
291 goto out_free_data;
292 }
293
294 ret = crypto_aead_setauthsize(tfm, authsize);
295
296 for (i = 0; i < num_mb; ++i)
297 if (testmgr_alloc_buf(data[i].xbuf)) {
298 while (i--)
299 testmgr_free_buf(data[i].xbuf);
300 goto out_free_tfm;
301 }
302
303 for (i = 0; i < num_mb; ++i)
304 if (testmgr_alloc_buf(data[i].axbuf)) {
305 while (i--)
306 testmgr_free_buf(data[i].axbuf);
307 goto out_free_xbuf;
308 }
309
310 for (i = 0; i < num_mb; ++i)
311 if (testmgr_alloc_buf(data[i].xoutbuf)) {
312 while (i--)
Colin Ian Kingc6ba4f32018-01-02 15:43:04 +0000313 testmgr_free_buf(data[i].xoutbuf);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000314 goto out_free_axbuf;
315 }
316
317 for (i = 0; i < num_mb; ++i) {
318 data[i].req = aead_request_alloc(tfm, GFP_KERNEL);
319 if (!data[i].req) {
320 pr_err("alg: skcipher: Failed to allocate request for %s\n",
321 algo);
322 while (i--)
323 aead_request_free(data[i].req);
324 goto out_free_xoutbuf;
325 }
326 }
327
328 for (i = 0; i < num_mb; ++i) {
329 crypto_init_wait(&data[i].wait);
330 aead_request_set_callback(data[i].req,
331 CRYPTO_TFM_REQ_MAY_BACKLOG,
332 crypto_req_done, &data[i].wait);
333 }
334
335 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
336 get_driver_name(crypto_aead, tfm), e);
337
338 i = 0;
339 do {
340 b_size = aead_sizes;
341 do {
342 if (*b_size + authsize > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +0000343 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000344 authsize + *b_size,
345 XBUFSIZE * PAGE_SIZE);
346 goto out;
347 }
348
349 pr_info("test %u (%d bit key, %d byte blocks): ", i,
350 *keysize * 8, *b_size);
351
352 /* Set up tfm global state, i.e. the key */
353
354 memset(tvmem[0], 0xff, PAGE_SIZE);
355 key = tvmem[0];
356 for (j = 0; j < tcount; j++) {
357 if (template[j].klen == *keysize) {
358 key = template[j].key;
359 break;
360 }
361 }
362
363 crypto_aead_clear_flags(tfm, ~0);
364
365 ret = crypto_aead_setkey(tfm, key, *keysize);
366 if (ret) {
367 pr_err("setkey() failed flags=%x\n",
368 crypto_aead_get_flags(tfm));
369 goto out;
370 }
371
372 iv_len = crypto_aead_ivsize(tfm);
373 if (iv_len)
374 memset(iv, 0xff, iv_len);
375
376 /* Now setup per request stuff, i.e. buffers */
377
378 for (j = 0; j < num_mb; ++j) {
379 struct test_mb_aead_data *cur = &data[j];
380
381 assoc = cur->axbuf[0];
382 memset(assoc, 0xff, aad_size);
383
384 sg_init_aead(cur->sg, cur->xbuf,
385 *b_size + (enc ? 0 : authsize),
386 assoc, aad_size);
387
388 sg_init_aead(cur->sgout, cur->xoutbuf,
389 *b_size + (enc ? authsize : 0),
390 assoc, aad_size);
391
392 aead_request_set_ad(cur->req, aad_size);
393
394 if (!enc) {
395
396 aead_request_set_crypt(cur->req,
397 cur->sgout,
398 cur->sg,
399 *b_size, iv);
400 ret = crypto_aead_encrypt(cur->req);
401 ret = do_one_aead_op(cur->req, ret);
402
403 if (ret) {
404 pr_err("calculating auth failed failed (%d)\n",
405 ret);
406 break;
407 }
408 }
409
410 aead_request_set_crypt(cur->req, cur->sg,
411 cur->sgout, *b_size +
412 (enc ? 0 : authsize),
413 iv);
414
415 }
416
Horia Geantă2af63292018-07-23 17:18:48 +0300417 if (secs) {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000418 ret = test_mb_aead_jiffies(data, enc, *b_size,
419 secs, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300420 cond_resched();
421 } else {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000422 ret = test_mb_aead_cycles(data, enc, *b_size,
423 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300424 }
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000425
426 if (ret) {
427 pr_err("%s() failed return code=%d\n", e, ret);
428 break;
429 }
430 b_size++;
431 i++;
432 } while (*b_size);
433 keysize++;
434 } while (*keysize);
435
436out:
437 for (i = 0; i < num_mb; ++i)
438 aead_request_free(data[i].req);
439out_free_xoutbuf:
440 for (i = 0; i < num_mb; ++i)
441 testmgr_free_buf(data[i].xoutbuf);
442out_free_axbuf:
443 for (i = 0; i < num_mb; ++i)
444 testmgr_free_buf(data[i].axbuf);
445out_free_xbuf:
446 for (i = 0; i < num_mb; ++i)
447 testmgr_free_buf(data[i].xbuf);
448out_free_tfm:
449 crypto_free_aead(tfm);
450out_free_data:
451 kfree(data);
452out_free_iv:
453 kfree(iv);
454}
455
Tim Chen53f52d72013-12-11 14:28:47 -0800456static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700457 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800458{
459 unsigned long start, end;
460 int bcount;
461 int ret;
462
Mark Rustad3e3dc252014-07-25 02:53:38 -0700463 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800464 time_before(jiffies, end); bcount++) {
465 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530466 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800467 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530468 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800469
470 if (ret)
471 return ret;
472 }
473
474 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700475 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800476 return 0;
477}
478
479static int test_aead_cycles(struct aead_request *req, int enc, int blen)
480{
481 unsigned long cycles = 0;
482 int ret = 0;
483 int i;
484
Tim Chen53f52d72013-12-11 14:28:47 -0800485 /* Warm-up run. */
486 for (i = 0; i < 4; i++) {
487 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530488 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800489 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530490 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800491
492 if (ret)
493 goto out;
494 }
495
496 /* The real thing. */
497 for (i = 0; i < 8; i++) {
498 cycles_t start, end;
499
500 start = get_cycles();
501 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530502 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800503 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530504 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800505 end = get_cycles();
506
507 if (ret)
508 goto out;
509
510 cycles += end - start;
511 }
512
513out:
Tim Chen53f52d72013-12-11 14:28:47 -0800514 if (ret == 0)
515 printk("1 operation in %lu cycles (%d bytes)\n",
516 (cycles + 4) / 8, blen);
517
518 return ret;
519}
520
Mark Rustad3e3dc252014-07-25 02:53:38 -0700521static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800522 struct aead_speed_template *template,
523 unsigned int tcount, u8 authsize,
524 unsigned int aad_size, u8 *keysize)
525{
526 unsigned int i, j;
527 struct crypto_aead *tfm;
528 int ret = -ENOMEM;
529 const char *key;
530 struct aead_request *req;
531 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800532 struct scatterlist *sgout;
533 const char *e;
534 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200535 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800536 char *xbuf[XBUFSIZE];
537 char *xoutbuf[XBUFSIZE];
538 char *axbuf[XBUFSIZE];
539 unsigned int *b_size;
540 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100541 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800542
Cristian Stoica96692a732015-01-28 13:07:32 +0200543 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
544 if (!iv)
545 return;
546
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200547 if (aad_size >= PAGE_SIZE) {
548 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200549 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200550 }
551
Tim Chen53f52d72013-12-11 14:28:47 -0800552 if (enc == ENCRYPT)
553 e = "encryption";
554 else
555 e = "decryption";
556
557 if (testmgr_alloc_buf(xbuf))
558 goto out_noxbuf;
559 if (testmgr_alloc_buf(axbuf))
560 goto out_noaxbuf;
561 if (testmgr_alloc_buf(xoutbuf))
562 goto out_nooutbuf;
563
Herbert Xua3f21852015-05-27 16:03:51 +0800564 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800565 if (!sg)
566 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800567 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800568
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800569 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800570
571 if (IS_ERR(tfm)) {
572 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
573 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200574 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800575 }
576
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100577 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700578 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
579 get_driver_name(crypto_aead, tfm), e);
580
Tim Chen53f52d72013-12-11 14:28:47 -0800581 req = aead_request_alloc(tfm, GFP_KERNEL);
582 if (!req) {
583 pr_err("alg: aead: Failed to allocate request for %s\n",
584 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200585 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800586 }
587
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530588 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100589 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530590
Tim Chen53f52d72013-12-11 14:28:47 -0800591 i = 0;
592 do {
593 b_size = aead_sizes;
594 do {
595 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200596 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800597
598 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
599 pr_err("template (%u) too big for tvmem (%lu)\n",
600 *keysize + *b_size,
601 TVMEMSIZE * PAGE_SIZE);
602 goto out;
603 }
604
605 key = tvmem[0];
606 for (j = 0; j < tcount; j++) {
607 if (template[j].klen == *keysize) {
608 key = template[j].key;
609 break;
610 }
611 }
612 ret = crypto_aead_setkey(tfm, key, *keysize);
613 ret = crypto_aead_setauthsize(tfm, authsize);
614
615 iv_len = crypto_aead_ivsize(tfm);
616 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200617 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800618
619 crypto_aead_clear_flags(tfm, ~0);
620 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
621 i, *keysize * 8, *b_size);
622
623
624 memset(tvmem[0], 0xff, PAGE_SIZE);
625
626 if (ret) {
627 pr_err("setkey() failed flags=%x\n",
628 crypto_aead_get_flags(tfm));
629 goto out;
630 }
631
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200632 sg_init_aead(sg, xbuf, *b_size + (enc ? 0 : authsize),
633 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800634
Herbert Xu31267272015-06-17 14:05:26 +0800635 sg_init_aead(sgout, xoutbuf,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200636 *b_size + (enc ? authsize : 0), assoc,
637 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800638
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000639 aead_request_set_ad(req, aad_size);
640
641 if (!enc) {
642
643 /*
644 * For decryption we need a proper auth so
645 * we do the encryption path once with buffers
646 * reversed (input <-> output) to calculate it
647 */
648 aead_request_set_crypt(req, sgout, sg,
649 *b_size, iv);
650 ret = do_one_aead_op(req,
651 crypto_aead_encrypt(req));
652
653 if (ret) {
654 pr_err("calculating auth failed failed (%d)\n",
655 ret);
656 break;
657 }
658 }
659
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300660 aead_request_set_crypt(req, sg, sgout,
661 *b_size + (enc ? 0 : authsize),
662 iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800663
Horia Geantă2af63292018-07-23 17:18:48 +0300664 if (secs) {
Mark Rustad3e3dc252014-07-25 02:53:38 -0700665 ret = test_aead_jiffies(req, enc, *b_size,
666 secs);
Horia Geantă2af63292018-07-23 17:18:48 +0300667 cond_resched();
668 } else {
Tim Chen53f52d72013-12-11 14:28:47 -0800669 ret = test_aead_cycles(req, enc, *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +0300670 }
Tim Chen53f52d72013-12-11 14:28:47 -0800671
672 if (ret) {
673 pr_err("%s() failed return code=%d\n", e, ret);
674 break;
675 }
676 b_size++;
677 i++;
678 } while (*b_size);
679 keysize++;
680 } while (*keysize);
681
682out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200683 aead_request_free(req);
684out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800685 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200686out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800687 kfree(sg);
688out_nosg:
689 testmgr_free_buf(xoutbuf);
690out_nooutbuf:
691 testmgr_free_buf(axbuf);
692out_noaxbuf:
693 testmgr_free_buf(xbuf);
694out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200695 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800696}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800697
David S. Millerbeb63da2010-05-19 14:11:21 +1000698static void test_hash_sg_init(struct scatterlist *sg)
699{
700 int i;
701
702 sg_init_table(sg, TVMEMSIZE);
703 for (i = 0; i < TVMEMSIZE; i++) {
704 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
705 memset(tvmem[i], 0xff, PAGE_SIZE);
706 }
707}
708
David S. Millerbeb63da2010-05-19 14:11:21 +1000709static inline int do_one_ahash_op(struct ahash_request *req, int ret)
710{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100711 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da2010-05-19 14:11:21 +1000712
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100713 return crypto_wait_req(ret, wait);
David S. Millerbeb63da2010-05-19 14:11:21 +1000714}
715
Herbert Xu72259de2016-06-28 20:33:52 +0800716struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000717 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800718 char result[64];
719 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100720 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800721 char *xbuf[XBUFSIZE];
722};
Megha Dey087bcd22016-06-23 18:40:47 -0700723
Kees Cook4e234ee2018-04-26 19:57:28 -0700724static inline int do_mult_ahash_op(struct test_mb_ahash_data *data, u32 num_mb,
725 int *rc)
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000726{
Kees Cook4e234ee2018-04-26 19:57:28 -0700727 int i, err = 0;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000728
729 /* Fire up a bunch of concurrent requests */
730 for (i = 0; i < num_mb; i++)
731 rc[i] = crypto_ahash_digest(data[i].req);
732
733 /* Wait for all requests to finish */
734 for (i = 0; i < num_mb; i++) {
735 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
736
737 if (rc[i]) {
738 pr_info("concurrent request %d error %d\n", i, rc[i]);
739 err = rc[i];
740 }
741 }
742
743 return err;
744}
745
746static int test_mb_ahash_jiffies(struct test_mb_ahash_data *data, int blen,
747 int secs, u32 num_mb)
748{
749 unsigned long start, end;
750 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700751 int ret = 0;
752 int *rc;
753
754 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
755 if (!rc)
756 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000757
758 for (start = jiffies, end = start + secs * HZ, bcount = 0;
759 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700760 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000761 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700762 goto out;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000763 }
764
765 pr_cont("%d operations in %d seconds (%ld bytes)\n",
766 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700767
768out:
769 kfree(rc);
770 return ret;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000771}
772
773static int test_mb_ahash_cycles(struct test_mb_ahash_data *data, int blen,
774 u32 num_mb)
775{
776 unsigned long cycles = 0;
777 int ret = 0;
778 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700779 int *rc;
780
781 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
782 if (!rc)
783 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000784
785 /* Warm-up run. */
786 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700787 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000788 if (ret)
789 goto out;
790 }
791
792 /* The real thing. */
793 for (i = 0; i < 8; i++) {
794 cycles_t start, end;
795
796 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700797 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000798 end = get_cycles();
799
800 if (ret)
801 goto out;
802
803 cycles += end - start;
804 }
805
Kees Cook4e234ee2018-04-26 19:57:28 -0700806 pr_cont("1 operation in %lu cycles (%d bytes)\n",
807 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000808
Kees Cook4e234ee2018-04-26 19:57:28 -0700809out:
810 kfree(rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000811 return ret;
812}
813
814static void test_mb_ahash_speed(const char *algo, unsigned int secs,
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000815 struct hash_speed *speed, u32 num_mb)
Megha Dey087bcd22016-06-23 18:40:47 -0700816{
Herbert Xu72259de2016-06-28 20:33:52 +0800817 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700818 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800819 unsigned int i, j, k;
820 int ret;
821
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000822 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
Herbert Xu72259de2016-06-28 20:33:52 +0800823 if (!data)
824 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700825
826 tfm = crypto_alloc_ahash(algo, 0, 0);
827 if (IS_ERR(tfm)) {
828 pr_err("failed to load transform for %s: %ld\n",
829 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800830 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700831 }
Herbert Xu72259de2016-06-28 20:33:52 +0800832
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000833 for (i = 0; i < num_mb; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800834 if (testmgr_alloc_buf(data[i].xbuf))
835 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700836
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100837 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700838
Herbert Xu72259de2016-06-28 20:33:52 +0800839 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
840 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200841 pr_err("alg: hash: Failed to allocate request for %s\n",
842 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800843 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700844 }
Megha Dey087bcd22016-06-23 18:40:47 -0700845
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100846 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
847 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000848
849 sg_init_table(data[i].sg, XBUFSIZE);
850 for (j = 0; j < XBUFSIZE; j++) {
851 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
852 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
853 }
Megha Dey087bcd22016-06-23 18:40:47 -0700854 }
855
Herbert Xu72259de2016-06-28 20:33:52 +0800856 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
857 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700858
859 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800860 /* For some reason this only tests digests. */
861 if (speed[i].blen != speed[i].plen)
862 continue;
863
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000864 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200865 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000866 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200867 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700868 }
869
870 if (speed[i].klen)
871 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
872
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000873 for (k = 0; k < num_mb; k++)
Herbert Xu72259de2016-06-28 20:33:52 +0800874 ahash_request_set_crypt(data[k].req, data[k].sg,
875 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700876
Herbert Xu72259de2016-06-28 20:33:52 +0800877 pr_info("test%3u "
878 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700879 i, speed[i].blen, speed[i].plen,
880 speed[i].blen / speed[i].plen);
881
Horia Geantă2af63292018-07-23 17:18:48 +0300882 if (secs) {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000883 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs,
884 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300885 cond_resched();
886 } else {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000887 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300888 }
Herbert Xu72259de2016-06-28 20:33:52 +0800889
Herbert Xu72259de2016-06-28 20:33:52 +0800890
891 if (ret) {
892 pr_err("At least one hashing failed ret=%d\n", ret);
893 break;
894 }
Megha Dey087bcd22016-06-23 18:40:47 -0700895 }
Megha Dey087bcd22016-06-23 18:40:47 -0700896
897out:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000898 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800899 ahash_request_free(data[k].req);
900
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000901 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800902 testmgr_free_buf(data[k].xbuf);
903
904 crypto_free_ahash(tfm);
905
906free_data:
907 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700908}
909
David S. Millerbeb63da2010-05-19 14:11:21 +1000910static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700911 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000912{
913 unsigned long start, end;
914 int bcount;
915 int ret;
916
Mark Rustad3e3dc252014-07-25 02:53:38 -0700917 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000918 time_before(jiffies, end); bcount++) {
919 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
920 if (ret)
921 return ret;
922 }
923
924 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700925 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000926
927 return 0;
928}
929
930static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700931 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000932{
933 unsigned long start, end;
934 int bcount, pcount;
935 int ret;
936
937 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700938 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000939
Mark Rustad3e3dc252014-07-25 02:53:38 -0700940 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000941 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800942 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000943 if (ret)
944 return ret;
945 for (pcount = 0; pcount < blen; pcount += plen) {
946 ret = do_one_ahash_op(req, crypto_ahash_update(req));
947 if (ret)
948 return ret;
949 }
950 /* we assume there is enough space in 'out' for the result */
951 ret = do_one_ahash_op(req, crypto_ahash_final(req));
952 if (ret)
953 return ret;
954 }
955
956 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700957 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000958
959 return 0;
960}
961
962static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
963 char *out)
964{
965 unsigned long cycles = 0;
966 int ret, i;
967
968 /* Warm-up run. */
969 for (i = 0; i < 4; i++) {
970 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
971 if (ret)
972 goto out;
973 }
974
975 /* The real thing. */
976 for (i = 0; i < 8; i++) {
977 cycles_t start, end;
978
979 start = get_cycles();
980
981 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
982 if (ret)
983 goto out;
984
985 end = get_cycles();
986
987 cycles += end - start;
988 }
989
990out:
991 if (ret)
992 return ret;
993
994 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
995 cycles / 8, cycles / (8 * blen));
996
997 return 0;
998}
999
1000static int test_ahash_cycles(struct ahash_request *req, int blen,
1001 int plen, char *out)
1002{
1003 unsigned long cycles = 0;
1004 int i, pcount, ret;
1005
1006 if (plen == blen)
1007 return test_ahash_cycles_digest(req, blen, out);
1008
1009 /* Warm-up run. */
1010 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +08001011 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001012 if (ret)
1013 goto out;
1014 for (pcount = 0; pcount < blen; pcount += plen) {
1015 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1016 if (ret)
1017 goto out;
1018 }
1019 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1020 if (ret)
1021 goto out;
1022 }
1023
1024 /* The real thing. */
1025 for (i = 0; i < 8; i++) {
1026 cycles_t start, end;
1027
1028 start = get_cycles();
1029
Herbert Xu43a96072015-04-22 11:02:27 +08001030 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001031 if (ret)
1032 goto out;
1033 for (pcount = 0; pcount < blen; pcount += plen) {
1034 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1035 if (ret)
1036 goto out;
1037 }
1038 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1039 if (ret)
1040 goto out;
1041
1042 end = get_cycles();
1043
1044 cycles += end - start;
1045 }
1046
1047out:
1048 if (ret)
1049 return ret;
1050
1051 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1052 cycles / 8, cycles / (8 * blen));
1053
1054 return 0;
1055}
1056
Herbert Xu06605112016-02-01 21:36:49 +08001057static void test_ahash_speed_common(const char *algo, unsigned int secs,
1058 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +10001059{
1060 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001061 struct crypto_wait wait;
David S. Millerbeb63da2010-05-19 14:11:21 +10001062 struct ahash_request *req;
1063 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +03001064 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +10001065 int i, ret;
1066
Herbert Xu06605112016-02-01 21:36:49 +08001067 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +10001068 if (IS_ERR(tfm)) {
1069 pr_err("failed to load transform for %s: %ld\n",
1070 algo, PTR_ERR(tfm));
1071 return;
1072 }
1073
Luca Clementi263a8df2014-06-25 22:57:42 -07001074 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
1075 get_driver_name(crypto_ahash, tfm));
1076
Horia Geant?f074f7b2015-08-27 18:38:36 +03001077 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
1078 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
1079 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +10001080 goto out;
1081 }
1082
1083 test_hash_sg_init(sg);
1084 req = ahash_request_alloc(tfm, GFP_KERNEL);
1085 if (!req) {
1086 pr_err("ahash request allocation failure\n");
1087 goto out;
1088 }
1089
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001090 crypto_init_wait(&wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001091 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001092 crypto_req_done, &wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001093
Horia Geant?f074f7b2015-08-27 18:38:36 +03001094 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
1095 if (!output)
1096 goto out_nomem;
1097
David S. Millerbeb63da2010-05-19 14:11:21 +10001098 for (i = 0; speed[i].blen != 0; i++) {
1099 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1100 pr_err("template (%u) too big for tvmem (%lu)\n",
1101 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1102 break;
1103 }
1104
Horia Geantă331351f2018-09-12 16:20:48 +03001105 if (speed[i].klen)
1106 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
1107
David S. Millerbeb63da2010-05-19 14:11:21 +10001108 pr_info("test%3u "
1109 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
1110 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1111
1112 ahash_request_set_crypt(req, sg, output, speed[i].plen);
1113
Horia Geantă2af63292018-07-23 17:18:48 +03001114 if (secs) {
David S. Millerbeb63da2010-05-19 14:11:21 +10001115 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001116 speed[i].plen, output, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001117 cond_resched();
1118 } else {
David S. Millerbeb63da2010-05-19 14:11:21 +10001119 ret = test_ahash_cycles(req, speed[i].blen,
1120 speed[i].plen, output);
Horia Geantă2af63292018-07-23 17:18:48 +03001121 }
David S. Millerbeb63da2010-05-19 14:11:21 +10001122
1123 if (ret) {
1124 pr_err("hashing failed ret=%d\n", ret);
1125 break;
1126 }
1127 }
1128
Horia Geant?f074f7b2015-08-27 18:38:36 +03001129 kfree(output);
1130
1131out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +10001132 ahash_request_free(req);
1133
1134out:
1135 crypto_free_ahash(tfm);
1136}
1137
Herbert Xu06605112016-02-01 21:36:49 +08001138static void test_ahash_speed(const char *algo, unsigned int secs,
1139 struct hash_speed *speed)
1140{
1141 return test_ahash_speed_common(algo, secs, speed, 0);
1142}
1143
1144static void test_hash_speed(const char *algo, unsigned int secs,
1145 struct hash_speed *speed)
1146{
1147 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
1148}
1149
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001150struct test_mb_skcipher_data {
1151 struct scatterlist sg[XBUFSIZE];
1152 struct skcipher_request *req;
1153 struct crypto_wait wait;
1154 char *xbuf[XBUFSIZE];
1155};
1156
1157static int do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -07001158 u32 num_mb, int *rc)
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001159{
Kees Cook4e234ee2018-04-26 19:57:28 -07001160 int i, err = 0;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001161
1162 /* Fire up a bunch of concurrent requests */
1163 for (i = 0; i < num_mb; i++) {
1164 if (enc == ENCRYPT)
1165 rc[i] = crypto_skcipher_encrypt(data[i].req);
1166 else
1167 rc[i] = crypto_skcipher_decrypt(data[i].req);
1168 }
1169
1170 /* Wait for all requests to finish */
1171 for (i = 0; i < num_mb; i++) {
1172 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
1173
1174 if (rc[i]) {
1175 pr_info("concurrent request %d error %d\n", i, rc[i]);
1176 err = rc[i];
1177 }
1178 }
1179
1180 return err;
1181}
1182
1183static int test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc,
1184 int blen, int secs, u32 num_mb)
1185{
1186 unsigned long start, end;
1187 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -07001188 int ret = 0;
1189 int *rc;
1190
1191 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1192 if (!rc)
1193 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001194
1195 for (start = jiffies, end = start + secs * HZ, bcount = 0;
1196 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001197 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001198 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -07001199 goto out;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001200 }
1201
1202 pr_cont("%d operations in %d seconds (%ld bytes)\n",
1203 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -07001204
1205out:
1206 kfree(rc);
1207 return ret;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001208}
1209
1210static int test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc,
1211 int blen, u32 num_mb)
1212{
1213 unsigned long cycles = 0;
1214 int ret = 0;
1215 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -07001216 int *rc;
1217
1218 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1219 if (!rc)
1220 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001221
1222 /* Warm-up run. */
1223 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001224 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001225 if (ret)
1226 goto out;
1227 }
1228
1229 /* The real thing. */
1230 for (i = 0; i < 8; i++) {
1231 cycles_t start, end;
1232
1233 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -07001234 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001235 end = get_cycles();
1236
1237 if (ret)
1238 goto out;
1239
1240 cycles += end - start;
1241 }
1242
Kees Cook4e234ee2018-04-26 19:57:28 -07001243 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1244 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001245
Kees Cook4e234ee2018-04-26 19:57:28 -07001246out:
1247 kfree(rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001248 return ret;
1249}
1250
1251static void test_mb_skcipher_speed(const char *algo, int enc, int secs,
1252 struct cipher_speed_template *template,
1253 unsigned int tcount, u8 *keysize, u32 num_mb)
1254{
1255 struct test_mb_skcipher_data *data;
1256 struct crypto_skcipher *tfm;
1257 unsigned int i, j, iv_len;
1258 const char *key;
1259 const char *e;
1260 u32 *b_size;
1261 char iv[128];
1262 int ret;
1263
1264 if (enc == ENCRYPT)
1265 e = "encryption";
1266 else
1267 e = "decryption";
1268
1269 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
1270 if (!data)
1271 return;
1272
1273 tfm = crypto_alloc_skcipher(algo, 0, 0);
1274 if (IS_ERR(tfm)) {
1275 pr_err("failed to load transform for %s: %ld\n",
1276 algo, PTR_ERR(tfm));
1277 goto out_free_data;
1278 }
1279
1280 for (i = 0; i < num_mb; ++i)
1281 if (testmgr_alloc_buf(data[i].xbuf)) {
1282 while (i--)
1283 testmgr_free_buf(data[i].xbuf);
1284 goto out_free_tfm;
1285 }
1286
1287
1288 for (i = 0; i < num_mb; ++i)
1289 if (testmgr_alloc_buf(data[i].xbuf)) {
1290 while (i--)
1291 testmgr_free_buf(data[i].xbuf);
1292 goto out_free_tfm;
1293 }
1294
1295
1296 for (i = 0; i < num_mb; ++i) {
1297 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL);
1298 if (!data[i].req) {
1299 pr_err("alg: skcipher: Failed to allocate request for %s\n",
1300 algo);
1301 while (i--)
1302 skcipher_request_free(data[i].req);
1303 goto out_free_xbuf;
1304 }
1305 }
1306
1307 for (i = 0; i < num_mb; ++i) {
1308 skcipher_request_set_callback(data[i].req,
1309 CRYPTO_TFM_REQ_MAY_BACKLOG,
1310 crypto_req_done, &data[i].wait);
1311 crypto_init_wait(&data[i].wait);
1312 }
1313
1314 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
1315 get_driver_name(crypto_skcipher, tfm), e);
1316
1317 i = 0;
1318 do {
1319 b_size = block_sizes;
1320 do {
1321 if (*b_size > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +00001322 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001323 *b_size, XBUFSIZE * PAGE_SIZE);
1324 goto out;
1325 }
1326
1327 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1328 *keysize * 8, *b_size);
1329
1330 /* Set up tfm global state, i.e. the key */
1331
1332 memset(tvmem[0], 0xff, PAGE_SIZE);
1333 key = tvmem[0];
1334 for (j = 0; j < tcount; j++) {
1335 if (template[j].klen == *keysize) {
1336 key = template[j].key;
1337 break;
1338 }
1339 }
1340
1341 crypto_skcipher_clear_flags(tfm, ~0);
1342
1343 ret = crypto_skcipher_setkey(tfm, key, *keysize);
1344 if (ret) {
1345 pr_err("setkey() failed flags=%x\n",
1346 crypto_skcipher_get_flags(tfm));
1347 goto out;
1348 }
1349
1350 iv_len = crypto_skcipher_ivsize(tfm);
1351 if (iv_len)
1352 memset(&iv, 0xff, iv_len);
1353
1354 /* Now setup per request stuff, i.e. buffers */
1355
1356 for (j = 0; j < num_mb; ++j) {
1357 struct test_mb_skcipher_data *cur = &data[j];
1358 unsigned int k = *b_size;
1359 unsigned int pages = DIV_ROUND_UP(k, PAGE_SIZE);
1360 unsigned int p = 0;
1361
1362 sg_init_table(cur->sg, pages);
1363
1364 while (k > PAGE_SIZE) {
1365 sg_set_buf(cur->sg + p, cur->xbuf[p],
1366 PAGE_SIZE);
1367 memset(cur->xbuf[p], 0xff, PAGE_SIZE);
1368 p++;
1369 k -= PAGE_SIZE;
1370 }
1371
1372 sg_set_buf(cur->sg + p, cur->xbuf[p], k);
1373 memset(cur->xbuf[p], 0xff, k);
1374
1375 skcipher_request_set_crypt(cur->req, cur->sg,
1376 cur->sg, *b_size,
1377 iv);
1378 }
1379
Horia Geantă2af63292018-07-23 17:18:48 +03001380 if (secs) {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001381 ret = test_mb_acipher_jiffies(data, enc,
1382 *b_size, secs,
1383 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001384 cond_resched();
1385 } else {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001386 ret = test_mb_acipher_cycles(data, enc,
1387 *b_size, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001388 }
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001389
1390 if (ret) {
1391 pr_err("%s() failed flags=%x\n", e,
1392 crypto_skcipher_get_flags(tfm));
1393 break;
1394 }
1395 b_size++;
1396 i++;
1397 } while (*b_size);
1398 keysize++;
1399 } while (*keysize);
1400
1401out:
1402 for (i = 0; i < num_mb; ++i)
1403 skcipher_request_free(data[i].req);
1404out_free_xbuf:
1405 for (i = 0; i < num_mb; ++i)
1406 testmgr_free_buf(data[i].xbuf);
1407out_free_tfm:
1408 crypto_free_skcipher(tfm);
1409out_free_data:
1410 kfree(data);
1411}
1412
Herbert Xu7166e582016-06-29 18:03:50 +08001413static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001414{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001415 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001416
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001417 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001418}
1419
Herbert Xu7166e582016-06-29 18:03:50 +08001420static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001421 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001422{
1423 unsigned long start, end;
1424 int bcount;
1425 int ret;
1426
Mark Rustad3e3dc252014-07-25 02:53:38 -07001427 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001428 time_before(jiffies, end); bcount++) {
1429 if (enc)
1430 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001431 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001432 else
1433 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001434 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001435
1436 if (ret)
1437 return ret;
1438 }
1439
1440 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001441 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001442 return 0;
1443}
1444
Herbert Xu7166e582016-06-29 18:03:50 +08001445static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001446 int blen)
1447{
1448 unsigned long cycles = 0;
1449 int ret = 0;
1450 int i;
1451
1452 /* Warm-up run. */
1453 for (i = 0; i < 4; i++) {
1454 if (enc)
1455 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001456 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001457 else
1458 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001459 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001460
1461 if (ret)
1462 goto out;
1463 }
1464
1465 /* The real thing. */
1466 for (i = 0; i < 8; i++) {
1467 cycles_t start, end;
1468
1469 start = get_cycles();
1470 if (enc)
1471 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001472 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001473 else
1474 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001475 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001476 end = get_cycles();
1477
1478 if (ret)
1479 goto out;
1480
1481 cycles += end - start;
1482 }
1483
1484out:
1485 if (ret == 0)
1486 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1487 (cycles + 4) / 8, blen);
1488
1489 return ret;
1490}
1491
Herbert Xu7166e582016-06-29 18:03:50 +08001492static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
1493 struct cipher_speed_template *template,
1494 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001495{
Nicolas Royerde1975332012-07-01 19:19:47 +02001496 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001497 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001498 const char *key;
1499 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +08001500 struct skcipher_request *req;
1501 struct crypto_skcipher *tfm;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001502 const char *e;
1503 u32 *b_size;
1504
1505 if (enc == ENCRYPT)
1506 e = "encryption";
1507 else
1508 e = "decryption";
1509
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001510 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001511
Herbert Xu7166e582016-06-29 18:03:50 +08001512 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001513
1514 if (IS_ERR(tfm)) {
1515 pr_err("failed to load transform for %s: %ld\n", algo,
1516 PTR_ERR(tfm));
1517 return;
1518 }
1519
Luca Clementi263a8df2014-06-25 22:57:42 -07001520 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
Herbert Xu7166e582016-06-29 18:03:50 +08001521 get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -07001522
Herbert Xu7166e582016-06-29 18:03:50 +08001523 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001524 if (!req) {
1525 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1526 algo);
1527 goto out;
1528 }
1529
Herbert Xu7166e582016-06-29 18:03:50 +08001530 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001531 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001532
1533 i = 0;
1534 do {
1535 b_size = block_sizes;
1536
1537 do {
1538 struct scatterlist sg[TVMEMSIZE];
1539
1540 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1541 pr_err("template (%u) too big for "
1542 "tvmem (%lu)\n", *keysize + *b_size,
1543 TVMEMSIZE * PAGE_SIZE);
1544 goto out_free_req;
1545 }
1546
1547 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1548 *keysize * 8, *b_size);
1549
1550 memset(tvmem[0], 0xff, PAGE_SIZE);
1551
1552 /* set key, plain text and IV */
1553 key = tvmem[0];
1554 for (j = 0; j < tcount; j++) {
1555 if (template[j].klen == *keysize) {
1556 key = template[j].key;
1557 break;
1558 }
1559 }
1560
Herbert Xu7166e582016-06-29 18:03:50 +08001561 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001562
Herbert Xu7166e582016-06-29 18:03:50 +08001563 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001564 if (ret) {
1565 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +08001566 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001567 goto out_free_req;
1568 }
1569
Nicolas Royerde1975332012-07-01 19:19:47 +02001570 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001571 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1572
Nicolas Royerde1975332012-07-01 19:19:47 +02001573 if (k > PAGE_SIZE) {
1574 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001575 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001576 k -= PAGE_SIZE;
1577 j = 1;
1578 while (k > PAGE_SIZE) {
1579 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1580 memset(tvmem[j], 0xff, PAGE_SIZE);
1581 j++;
1582 k -= PAGE_SIZE;
1583 }
1584 sg_set_buf(sg + j, tvmem[j], k);
1585 memset(tvmem[j], 0xff, k);
1586 } else {
1587 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001588 }
1589
Herbert Xu7166e582016-06-29 18:03:50 +08001590 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001591 if (iv_len)
1592 memset(&iv, 0xff, iv_len);
1593
Herbert Xu7166e582016-06-29 18:03:50 +08001594 skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001595
Horia Geantă2af63292018-07-23 17:18:48 +03001596 if (secs) {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001597 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001598 *b_size, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001599 cond_resched();
1600 } else {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001601 ret = test_acipher_cycles(req, enc,
1602 *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +03001603 }
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001604
1605 if (ret) {
1606 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +08001607 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001608 break;
1609 }
1610 b_size++;
1611 i++;
1612 } while (*b_size);
1613 keysize++;
1614 } while (*keysize);
1615
1616out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +08001617 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001618out:
Herbert Xu7166e582016-06-29 18:03:50 +08001619 crypto_free_skcipher(tfm);
1620}
1621
1622static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
1623 struct cipher_speed_template *template,
1624 unsigned int tcount, u8 *keysize)
1625{
1626 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1627 true);
1628}
1629
1630static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
1631 struct cipher_speed_template *template,
1632 unsigned int tcount, u8 *keysize)
1633{
1634 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1635 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001636}
1637
Herbert Xuef2736f2005-06-22 13:26:03 -07001638static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001639{
1640 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001641
Linus Torvalds1da177e2005-04-16 15:20:36 -07001642 while (*name) {
1643 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001644 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001645 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001646 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001647 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001648}
1649
Herbert Xu01b32322008-07-31 15:41:55 +08001650static inline int tcrypt_test(const char *alg)
1651{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001652 int ret;
1653
Rabin Vincent76512f22017-01-18 14:54:05 +01001654 pr_debug("testing %s\n", alg);
1655
Jarod Wilson4e033a62009-05-27 15:10:21 +10001656 ret = alg_test(alg, alg, 0, 0);
1657 /* non-fips algs return -EINVAL in fips mode */
1658 if (fips_enabled && ret == -EINVAL)
1659 ret = 0;
1660 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001661}
1662
Kees Cook4e234ee2018-04-26 19:57:28 -07001663static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
Herbert Xu01b32322008-07-31 15:41:55 +08001664{
1665 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001666 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001667
1668 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001669 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001670 if (alg) {
1671 if (!crypto_has_alg(alg, type,
1672 mask ?: CRYPTO_ALG_TYPE_MASK))
1673 ret = -ENOENT;
1674 break;
1675 }
1676
Herbert Xu01b32322008-07-31 15:41:55 +08001677 for (i = 1; i < 200; i++)
Kees Cook4e234ee2018-04-26 19:57:28 -07001678 ret += do_test(NULL, 0, 0, i, num_mb);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001679 break;
1680
1681 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001682 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001683 break;
1684
1685 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001686 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001687 break;
1688
1689 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001690 ret += tcrypt_test("ecb(des)");
1691 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001692 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001693 break;
1694
1695 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001696 ret += tcrypt_test("ecb(des3_ede)");
1697 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001698 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001699 break;
1700
1701 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001702 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001703 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001704
Linus Torvalds1da177e2005-04-16 15:20:36 -07001705 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001706 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001707 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001708
Linus Torvalds1da177e2005-04-16 15:20:36 -07001709 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001710 ret += tcrypt_test("ecb(blowfish)");
1711 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001712 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001713 break;
1714
1715 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001716 ret += tcrypt_test("ecb(twofish)");
1717 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001718 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001719 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001720 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001721 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001722
Linus Torvalds1da177e2005-04-16 15:20:36 -07001723 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001724 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001725 ret += tcrypt_test("cbc(serpent)");
1726 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001727 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001728 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001729 break;
1730
1731 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001732 ret += tcrypt_test("ecb(aes)");
1733 ret += tcrypt_test("cbc(aes)");
1734 ret += tcrypt_test("lrw(aes)");
1735 ret += tcrypt_test("xts(aes)");
1736 ret += tcrypt_test("ctr(aes)");
1737 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001738 break;
1739
1740 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001741 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001742 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001743
Linus Torvalds1da177e2005-04-16 15:20:36 -07001744 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001745 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001746 break;
1747
1748 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001749 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001750 break;
1751
1752 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001753 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001754 ret += tcrypt_test("cbc(cast5)");
1755 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001756 break;
1757
1758 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001759 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001760 ret += tcrypt_test("cbc(cast6)");
1761 ret += tcrypt_test("ctr(cast6)");
1762 ret += tcrypt_test("lrw(cast6)");
1763 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001764 break;
1765
1766 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001767 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001768 break;
1769
1770 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001771 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001772 break;
1773
1774 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001775 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001776 break;
1777
1778 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001779 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001780 break;
1781
1782 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001783 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001784 break;
1785
1786 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001787 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001788 break;
1789
1790 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001791 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001792 break;
1793
1794 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001795 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001796 break;
1797
1798 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001799 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001800 break;
1801
1802 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001803 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001804 break;
1805
1806 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001807 ret += tcrypt_test("ecb(anubis)");
1808 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001809 break;
1810
1811 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001812 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001813 break;
1814
1815 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001816 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001817 break;
1818
1819 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001820 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001821 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001822
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001823 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001824 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001825 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001826
David Howells90831632006-12-16 12:13:14 +11001827 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001828 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001829 break;
1830
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001831 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001832 ret += tcrypt_test("ecb(camellia)");
1833 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001834 ret += tcrypt_test("ctr(camellia)");
1835 ret += tcrypt_test("lrw(camellia)");
1836 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001837 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001838
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001839 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001840 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001841 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001842
Tan Swee Heng2407d602007-11-23 19:45:00 +08001843 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001844 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001845 break;
1846
Herbert Xu8df213d2007-12-02 14:55:47 +11001847 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001848 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001849 break;
1850
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001851 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001852 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001853 break;
1854
Joy Latten93cc74e2007-12-12 20:24:22 +08001855 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001856 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001857 break;
1858
Kevin Coffman76cb9522008-03-24 21:26:16 +08001859 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001860 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001861 break;
1862
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001863 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001864 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001865 break;
1866
1867 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001868 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001869 break;
1870
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001871 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001872 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001873 break;
1874
1875 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001876 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001877 break;
1878
1879 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001880 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001881 break;
1882
Jarod Wilson5d667322009-05-04 19:23:40 +08001883 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001884 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001885 break;
1886
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001887 case 46:
1888 ret += tcrypt_test("ghash");
1889 break;
1890
Herbert Xu684115212013-09-07 12:56:26 +10001891 case 47:
1892 ret += tcrypt_test("crct10dif");
1893 break;
1894
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301895 case 48:
1896 ret += tcrypt_test("sha3-224");
1897 break;
1898
1899 case 49:
1900 ret += tcrypt_test("sha3-256");
1901 break;
1902
1903 case 50:
1904 ret += tcrypt_test("sha3-384");
1905 break;
1906
1907 case 51:
1908 ret += tcrypt_test("sha3-512");
1909 break;
1910
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001911 case 52:
1912 ret += tcrypt_test("sm3");
1913 break;
1914
Linus Torvalds1da177e2005-04-16 15:20:36 -07001915 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001916 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001917 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001918
Linus Torvalds1da177e2005-04-16 15:20:36 -07001919 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001920 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001921 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001922
Linus Torvalds1da177e2005-04-16 15:20:36 -07001923 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001924 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001925 break;
1926
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001927 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001928 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001929 break;
1930
1931 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001932 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001933 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001934
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001935 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001936 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001937 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001938
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001939 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001940 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001941 break;
1942
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001943 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001944 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001945 break;
1946
1947 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001948 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001949 break;
1950
Shane Wangf1939f72009-09-02 20:05:22 +10001951 case 109:
Eric Biggers0917b872018-06-18 10:22:40 -07001952 ret += tcrypt_test("vmac64(aes)");
Shane Wangf1939f72009-09-02 20:05:22 +10001953 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001954
raveendra padasalagi98eca722016-07-01 11:16:54 +05301955 case 111:
1956 ret += tcrypt_test("hmac(sha3-224)");
1957 break;
1958
1959 case 112:
1960 ret += tcrypt_test("hmac(sha3-256)");
1961 break;
1962
1963 case 113:
1964 ret += tcrypt_test("hmac(sha3-384)");
1965 break;
1966
1967 case 114:
1968 ret += tcrypt_test("hmac(sha3-512)");
1969 break;
1970
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001971 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001972 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001973 break;
1974
Adrian Hoban69435b92010-11-04 15:02:04 -04001975 case 151:
1976 ret += tcrypt_test("rfc4106(gcm(aes))");
1977 break;
1978
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001979 case 152:
1980 ret += tcrypt_test("rfc4543(gcm(aes))");
1981 break;
1982
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001983 case 153:
1984 ret += tcrypt_test("cmac(aes)");
1985 break;
1986
1987 case 154:
1988 ret += tcrypt_test("cmac(des3_ede)");
1989 break;
1990
Horia Geantabbf9c892013-11-28 15:11:16 +02001991 case 155:
1992 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1993 break;
1994
Horia Geantabca4feb2014-03-14 17:46:51 +02001995 case 156:
1996 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1997 break;
1998
1999 case 157:
2000 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
2001 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05302002 case 181:
2003 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
2004 break;
2005 case 182:
2006 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
2007 break;
2008 case 183:
2009 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
2010 break;
2011 case 184:
2012 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
2013 break;
2014 case 185:
2015 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
2016 break;
2017 case 186:
2018 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
2019 break;
2020 case 187:
2021 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
2022 break;
2023 case 188:
2024 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
2025 break;
2026 case 189:
2027 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
2028 break;
2029 case 190:
2030 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
2031 break;
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002032 case 191:
2033 ret += tcrypt_test("ecb(sm4)");
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002034 ret += tcrypt_test("cbc(sm4)");
2035 ret += tcrypt_test("ctr(sm4)");
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002036 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07002037 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10002038 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002039 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002040 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002041 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002042 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002043 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002044 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002045 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11002046 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002047 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11002048 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002049 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08002050 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002051 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08002052 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002053 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002054 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2055 speed_template_16_24_32);
2056 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2057 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10002058 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2059 speed_template_16_24_32);
2060 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2061 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002062 break;
2063
2064 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10002065 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002066 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002067 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002068 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002069 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002070 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002071 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002072 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002073 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002074 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002075 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002076 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03002077 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
2078 des3_speed_template, DES3_SPEED_VECTORS,
2079 speed_template_24);
2080 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
2081 des3_speed_template, DES3_SPEED_VECTORS,
2082 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002083 break;
2084
2085 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10002086 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002087 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002088 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002089 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002090 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002091 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002092 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002093 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03002094 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2095 speed_template_16_24_32);
2096 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2097 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03002098 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2099 speed_template_32_40_48);
2100 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2101 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03002102 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2103 speed_template_32_48_64);
2104 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2105 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002106 break;
2107
2108 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10002109 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002110 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002111 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002112 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002113 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002114 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002115 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002116 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03002117 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2118 speed_template_8_32);
2119 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2120 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002121 break;
2122
2123 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10002124 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002125 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002126 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002127 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002128 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002129 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002130 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002131 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002132 break;
2133
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002134 case 205:
2135 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002136 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002137 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002138 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002139 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002140 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002141 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002142 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02002143 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2144 speed_template_16_24_32);
2145 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2146 speed_template_16_24_32);
2147 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2148 speed_template_32_40_48);
2149 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2150 speed_template_32_40_48);
2151 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2152 speed_template_32_48_64);
2153 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2154 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002155 break;
2156
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002157 case 206:
2158 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002159 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002160 break;
2161
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002162 case 207:
2163 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2164 speed_template_16_32);
2165 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2166 speed_template_16_32);
2167 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2168 speed_template_16_32);
2169 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2170 speed_template_16_32);
2171 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2172 speed_template_16_32);
2173 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2174 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002175 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2176 speed_template_32_48);
2177 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2178 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002179 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2180 speed_template_32_64);
2181 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2182 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002183 break;
2184
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002185 case 208:
2186 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2187 speed_template_8);
2188 break;
2189
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002190 case 209:
2191 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2192 speed_template_8_16);
2193 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2194 speed_template_8_16);
2195 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2196 speed_template_8_16);
2197 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2198 speed_template_8_16);
2199 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2200 speed_template_8_16);
2201 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2202 speed_template_8_16);
2203 break;
2204
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002205 case 210:
2206 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2207 speed_template_16_32);
2208 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2209 speed_template_16_32);
2210 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2211 speed_template_16_32);
2212 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2213 speed_template_16_32);
2214 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2215 speed_template_16_32);
2216 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2217 speed_template_16_32);
2218 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2219 speed_template_32_48);
2220 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2221 speed_template_32_48);
2222 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2223 speed_template_32_64);
2224 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2225 speed_template_32_64);
2226 break;
2227
Tim Chen53f52d72013-12-11 14:28:47 -08002228 case 211:
2229 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002230 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05302231 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01002232 NULL, 0, 16, 8, speed_template_16_24_32);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002233 test_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec,
2234 NULL, 0, 16, 16, aead_speed_template_20);
2235 test_aead_speed("gcm(aes)", DECRYPT, sec,
2236 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08002237 break;
2238
Herbert Xu4e4aab62015-06-17 14:04:21 +08002239 case 212:
2240 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002241 NULL, 0, 16, 16, aead_speed_template_19);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002242 test_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec,
2243 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08002244 break;
2245
Martin Willi2dce0632015-07-16 19:13:59 +02002246 case 213:
2247 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
2248 NULL, 0, 16, 8, aead_speed_template_36);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002249 test_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT, sec,
2250 NULL, 0, 16, 8, aead_speed_template_36);
Martin Willi2dce0632015-07-16 19:13:59 +02002251 break;
2252
2253 case 214:
2254 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
2255 speed_template_32);
2256 break;
2257
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +00002258 case 215:
2259 test_mb_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec, NULL,
2260 0, 16, 16, aead_speed_template_20, num_mb);
2261 test_mb_aead_speed("gcm(aes)", ENCRYPT, sec, NULL, 0, 16, 8,
2262 speed_template_16_24_32, num_mb);
2263 test_mb_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec, NULL,
2264 0, 16, 16, aead_speed_template_20, num_mb);
2265 test_mb_aead_speed("gcm(aes)", DECRYPT, sec, NULL, 0, 16, 8,
2266 speed_template_16_24_32, num_mb);
2267 break;
2268
2269 case 216:
2270 test_mb_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec, NULL, 0,
2271 16, 16, aead_speed_template_19, num_mb);
2272 test_mb_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec, NULL, 0,
2273 16, 16, aead_speed_template_19, num_mb);
2274 break;
2275
2276 case 217:
2277 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT,
2278 sec, NULL, 0, 16, 8, aead_speed_template_36,
2279 num_mb);
2280 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT,
2281 sec, NULL, 0, 16, 8, aead_speed_template_36,
2282 num_mb);
2283 break;
2284
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002285 case 218:
2286 test_cipher_speed("ecb(sm4)", ENCRYPT, sec, NULL, 0,
2287 speed_template_16);
2288 test_cipher_speed("ecb(sm4)", DECRYPT, sec, NULL, 0,
2289 speed_template_16);
2290 test_cipher_speed("cbc(sm4)", ENCRYPT, sec, NULL, 0,
2291 speed_template_16);
2292 test_cipher_speed("cbc(sm4)", DECRYPT, sec, NULL, 0,
2293 speed_template_16);
2294 test_cipher_speed("ctr(sm4)", ENCRYPT, sec, NULL, 0,
2295 speed_template_16);
2296 test_cipher_speed("ctr(sm4)", DECRYPT, sec, NULL, 0,
2297 speed_template_16);
2298 break;
Michal Ludvige8057922006-05-30 22:04:19 +10002299 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08002300 if (alg) {
2301 test_hash_speed(alg, sec, generic_hash_speed_template);
2302 break;
2303 }
Michal Ludvige8057922006-05-30 22:04:19 +10002304 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002305 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10002306 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002307 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002308 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002309 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10002310 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002311 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002312 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002313 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10002314 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002315 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002316 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002317 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10002318 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002319 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002320 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002321 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10002322 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002323 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002324 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002325 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10002326 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002327 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002328 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002329 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10002330 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002331 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002332 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002333 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10002334 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002335 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002336 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002337 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10002338 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002339 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002340 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002341 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10002342 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002343 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002344 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002345 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10002346 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002347 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002348 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002349 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10002350 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002351 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002352 /* fall through */
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08002353 case 313:
2354 test_hash_speed("sha224", sec, generic_hash_speed_template);
2355 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002356 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002357 case 314:
2358 test_hash_speed("rmd128", sec, generic_hash_speed_template);
2359 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002360 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002361 case 315:
2362 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2363 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002364 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002365 case 316:
2366 test_hash_speed("rmd256", sec, generic_hash_speed_template);
2367 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002368 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002369 case 317:
2370 test_hash_speed("rmd320", sec, generic_hash_speed_template);
2371 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002372 /* fall through */
Huang Ying18bcc912010-03-10 18:30:32 +08002373 case 318:
2374 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
2375 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002376 /* fall through */
Tim Chene3899e42012-09-27 15:44:24 -07002377 case 319:
2378 test_hash_speed("crc32c", sec, generic_hash_speed_template);
2379 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002380 /* fall through */
Herbert Xu684115212013-09-07 12:56:26 +10002381 case 320:
2382 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
2383 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002384 /* fall through */
Martin Willi2dce0632015-07-16 19:13:59 +02002385 case 321:
2386 test_hash_speed("poly1305", sec, poly1305_speed_template);
2387 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002388 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302389 case 322:
2390 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
2391 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002392 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302393 case 323:
2394 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
2395 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002396 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302397 case 324:
2398 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
2399 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002400 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302401 case 325:
2402 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
2403 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002404 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002405 case 326:
2406 test_hash_speed("sm3", sec, generic_hash_speed_template);
2407 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002408 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002409 case 399:
2410 break;
2411
David S. Millerbeb63da2010-05-19 14:11:21 +10002412 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08002413 if (alg) {
2414 test_ahash_speed(alg, sec, generic_hash_speed_template);
2415 break;
2416 }
David S. Millerbeb63da2010-05-19 14:11:21 +10002417 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002418 case 401:
2419 test_ahash_speed("md4", sec, generic_hash_speed_template);
2420 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002421 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002422 case 402:
2423 test_ahash_speed("md5", sec, generic_hash_speed_template);
2424 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002425 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002426 case 403:
2427 test_ahash_speed("sha1", sec, generic_hash_speed_template);
2428 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002429 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002430 case 404:
2431 test_ahash_speed("sha256", sec, generic_hash_speed_template);
2432 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002433 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002434 case 405:
2435 test_ahash_speed("sha384", sec, generic_hash_speed_template);
2436 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002437 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002438 case 406:
2439 test_ahash_speed("sha512", sec, generic_hash_speed_template);
2440 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002441 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002442 case 407:
2443 test_ahash_speed("wp256", sec, generic_hash_speed_template);
2444 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002445 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002446 case 408:
2447 test_ahash_speed("wp384", sec, generic_hash_speed_template);
2448 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002449 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002450 case 409:
2451 test_ahash_speed("wp512", sec, generic_hash_speed_template);
2452 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002453 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002454 case 410:
2455 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
2456 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002457 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002458 case 411:
2459 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
2460 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002461 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002462 case 412:
2463 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
2464 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002465 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002466 case 413:
2467 test_ahash_speed("sha224", sec, generic_hash_speed_template);
2468 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002469 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002470 case 414:
2471 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
2472 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002473 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002474 case 415:
2475 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
2476 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002477 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002478 case 416:
2479 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
2480 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002481 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002482 case 417:
2483 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
2484 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002485 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302486 case 418:
2487 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
2488 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002489 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302490 case 419:
2491 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
2492 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002493 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302494 case 420:
2495 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
2496 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002497 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302498 case 421:
2499 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
2500 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002501 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002502 case 422:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002503 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template,
2504 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002505 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002506 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002507 case 423:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002508 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template,
2509 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002510 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002511 /* fall through */
Megha Dey14009c42016-06-27 10:20:09 -07002512 case 424:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002513 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template,
2514 num_mb);
Megha Dey14009c42016-06-27 10:20:09 -07002515 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002516 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002517 case 425:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002518 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template,
2519 num_mb);
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002520 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002521 /* fall through */
David S. Millerbeb63da2010-05-19 14:11:21 +10002522 case 499:
2523 break;
2524
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002525 case 500:
2526 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2527 speed_template_16_24_32);
2528 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2529 speed_template_16_24_32);
2530 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2531 speed_template_16_24_32);
2532 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2533 speed_template_16_24_32);
2534 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2535 speed_template_32_40_48);
2536 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2537 speed_template_32_40_48);
2538 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002539 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002540 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002541 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002542 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2543 speed_template_16_24_32);
2544 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2545 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002546 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2547 speed_template_16_24_32);
2548 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2549 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02002550 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2551 speed_template_16_24_32);
2552 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2553 speed_template_16_24_32);
2554 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2555 speed_template_16_24_32);
2556 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2557 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02002558 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
2559 speed_template_20_28_36);
2560 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
2561 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002562 break;
2563
2564 case 501:
2565 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2566 des3_speed_template, DES3_SPEED_VECTORS,
2567 speed_template_24);
2568 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2569 des3_speed_template, DES3_SPEED_VECTORS,
2570 speed_template_24);
2571 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2572 des3_speed_template, DES3_SPEED_VECTORS,
2573 speed_template_24);
2574 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2575 des3_speed_template, DES3_SPEED_VECTORS,
2576 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002577 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2578 des3_speed_template, DES3_SPEED_VECTORS,
2579 speed_template_24);
2580 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2581 des3_speed_template, DES3_SPEED_VECTORS,
2582 speed_template_24);
2583 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2584 des3_speed_template, DES3_SPEED_VECTORS,
2585 speed_template_24);
2586 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2587 des3_speed_template, DES3_SPEED_VECTORS,
2588 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002589 break;
2590
2591 case 502:
2592 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2593 speed_template_8);
2594 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2595 speed_template_8);
2596 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2597 speed_template_8);
2598 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2599 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002600 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2601 speed_template_8);
2602 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2603 speed_template_8);
2604 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2605 speed_template_8);
2606 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2607 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002608 break;
2609
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002610 case 503:
2611 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2612 speed_template_16_32);
2613 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2614 speed_template_16_32);
2615 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2616 speed_template_16_32);
2617 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2618 speed_template_16_32);
2619 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2620 speed_template_16_32);
2621 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2622 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002623 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2624 speed_template_32_48);
2625 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2626 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002627 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2628 speed_template_32_64);
2629 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2630 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002631 break;
2632
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002633 case 504:
2634 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2635 speed_template_16_24_32);
2636 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2637 speed_template_16_24_32);
2638 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2639 speed_template_16_24_32);
2640 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2641 speed_template_16_24_32);
2642 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2643 speed_template_16_24_32);
2644 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2645 speed_template_16_24_32);
2646 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2647 speed_template_32_40_48);
2648 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2649 speed_template_32_40_48);
2650 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2651 speed_template_32_48_64);
2652 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2653 speed_template_32_48_64);
2654 break;
2655
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002656 case 505:
2657 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2658 speed_template_8);
2659 break;
2660
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002661 case 506:
2662 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2663 speed_template_8_16);
2664 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2665 speed_template_8_16);
2666 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2667 speed_template_8_16);
2668 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2669 speed_template_8_16);
2670 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2671 speed_template_8_16);
2672 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2673 speed_template_8_16);
2674 break;
2675
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002676 case 507:
2677 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2678 speed_template_16_32);
2679 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2680 speed_template_16_32);
2681 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2682 speed_template_16_32);
2683 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2684 speed_template_16_32);
2685 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2686 speed_template_16_32);
2687 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2688 speed_template_16_32);
2689 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2690 speed_template_32_48);
2691 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2692 speed_template_32_48);
2693 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2694 speed_template_32_64);
2695 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2696 speed_template_32_64);
2697 break;
2698
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002699 case 508:
2700 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2701 speed_template_16_32);
2702 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2703 speed_template_16_32);
2704 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2705 speed_template_16_32);
2706 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2707 speed_template_16_32);
2708 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2709 speed_template_16_32);
2710 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2711 speed_template_16_32);
2712 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2713 speed_template_32_48);
2714 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2715 speed_template_32_48);
2716 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2717 speed_template_32_64);
2718 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2719 speed_template_32_64);
2720 break;
2721
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002722 case 509:
2723 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2724 speed_template_8_32);
2725 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2726 speed_template_8_32);
2727 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2728 speed_template_8_32);
2729 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2730 speed_template_8_32);
2731 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2732 speed_template_8_32);
2733 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2734 speed_template_8_32);
2735 break;
2736
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00002737 case 600:
2738 test_mb_skcipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2739 speed_template_16_24_32, num_mb);
2740 test_mb_skcipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2741 speed_template_16_24_32, num_mb);
2742 test_mb_skcipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2743 speed_template_16_24_32, num_mb);
2744 test_mb_skcipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2745 speed_template_16_24_32, num_mb);
2746 test_mb_skcipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2747 speed_template_32_40_48, num_mb);
2748 test_mb_skcipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2749 speed_template_32_40_48, num_mb);
2750 test_mb_skcipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2751 speed_template_32_64, num_mb);
2752 test_mb_skcipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2753 speed_template_32_64, num_mb);
2754 test_mb_skcipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2755 speed_template_16_24_32, num_mb);
2756 test_mb_skcipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2757 speed_template_16_24_32, num_mb);
2758 test_mb_skcipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2759 speed_template_16_24_32, num_mb);
2760 test_mb_skcipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2761 speed_template_16_24_32, num_mb);
2762 test_mb_skcipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2763 speed_template_16_24_32, num_mb);
2764 test_mb_skcipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2765 speed_template_16_24_32, num_mb);
2766 test_mb_skcipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2767 speed_template_16_24_32, num_mb);
2768 test_mb_skcipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2769 speed_template_16_24_32, num_mb);
2770 test_mb_skcipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL,
2771 0, speed_template_20_28_36, num_mb);
2772 test_mb_skcipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL,
2773 0, speed_template_20_28_36, num_mb);
2774 break;
2775
2776 case 601:
2777 test_mb_skcipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2778 des3_speed_template, DES3_SPEED_VECTORS,
2779 speed_template_24, num_mb);
2780 test_mb_skcipher_speed("ecb(des3_ede)", DECRYPT, sec,
2781 des3_speed_template, DES3_SPEED_VECTORS,
2782 speed_template_24, num_mb);
2783 test_mb_skcipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2784 des3_speed_template, DES3_SPEED_VECTORS,
2785 speed_template_24, num_mb);
2786 test_mb_skcipher_speed("cbc(des3_ede)", DECRYPT, sec,
2787 des3_speed_template, DES3_SPEED_VECTORS,
2788 speed_template_24, num_mb);
2789 test_mb_skcipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2790 des3_speed_template, DES3_SPEED_VECTORS,
2791 speed_template_24, num_mb);
2792 test_mb_skcipher_speed("cfb(des3_ede)", DECRYPT, sec,
2793 des3_speed_template, DES3_SPEED_VECTORS,
2794 speed_template_24, num_mb);
2795 test_mb_skcipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2796 des3_speed_template, DES3_SPEED_VECTORS,
2797 speed_template_24, num_mb);
2798 test_mb_skcipher_speed("ofb(des3_ede)", DECRYPT, sec,
2799 des3_speed_template, DES3_SPEED_VECTORS,
2800 speed_template_24, num_mb);
2801 break;
2802
2803 case 602:
2804 test_mb_skcipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2805 speed_template_8, num_mb);
2806 test_mb_skcipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2807 speed_template_8, num_mb);
2808 test_mb_skcipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2809 speed_template_8, num_mb);
2810 test_mb_skcipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2811 speed_template_8, num_mb);
2812 test_mb_skcipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2813 speed_template_8, num_mb);
2814 test_mb_skcipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2815 speed_template_8, num_mb);
2816 test_mb_skcipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2817 speed_template_8, num_mb);
2818 test_mb_skcipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2819 speed_template_8, num_mb);
2820 break;
2821
2822 case 603:
2823 test_mb_skcipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2824 speed_template_16_32, num_mb);
2825 test_mb_skcipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2826 speed_template_16_32, num_mb);
2827 test_mb_skcipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2828 speed_template_16_32, num_mb);
2829 test_mb_skcipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2830 speed_template_16_32, num_mb);
2831 test_mb_skcipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2832 speed_template_16_32, num_mb);
2833 test_mb_skcipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2834 speed_template_16_32, num_mb);
2835 test_mb_skcipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2836 speed_template_32_48, num_mb);
2837 test_mb_skcipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2838 speed_template_32_48, num_mb);
2839 test_mb_skcipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2840 speed_template_32_64, num_mb);
2841 test_mb_skcipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2842 speed_template_32_64, num_mb);
2843 break;
2844
2845 case 604:
2846 test_mb_skcipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2847 speed_template_16_24_32, num_mb);
2848 test_mb_skcipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2849 speed_template_16_24_32, num_mb);
2850 test_mb_skcipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2851 speed_template_16_24_32, num_mb);
2852 test_mb_skcipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2853 speed_template_16_24_32, num_mb);
2854 test_mb_skcipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2855 speed_template_16_24_32, num_mb);
2856 test_mb_skcipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2857 speed_template_16_24_32, num_mb);
2858 test_mb_skcipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2859 speed_template_32_40_48, num_mb);
2860 test_mb_skcipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2861 speed_template_32_40_48, num_mb);
2862 test_mb_skcipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2863 speed_template_32_48_64, num_mb);
2864 test_mb_skcipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2865 speed_template_32_48_64, num_mb);
2866 break;
2867
2868 case 605:
2869 test_mb_skcipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2870 speed_template_8, num_mb);
2871 break;
2872
2873 case 606:
2874 test_mb_skcipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2875 speed_template_8_16, num_mb);
2876 test_mb_skcipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2877 speed_template_8_16, num_mb);
2878 test_mb_skcipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2879 speed_template_8_16, num_mb);
2880 test_mb_skcipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2881 speed_template_8_16, num_mb);
2882 test_mb_skcipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2883 speed_template_8_16, num_mb);
2884 test_mb_skcipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2885 speed_template_8_16, num_mb);
2886 break;
2887
2888 case 607:
2889 test_mb_skcipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2890 speed_template_16_32, num_mb);
2891 test_mb_skcipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2892 speed_template_16_32, num_mb);
2893 test_mb_skcipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2894 speed_template_16_32, num_mb);
2895 test_mb_skcipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2896 speed_template_16_32, num_mb);
2897 test_mb_skcipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2898 speed_template_16_32, num_mb);
2899 test_mb_skcipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2900 speed_template_16_32, num_mb);
2901 test_mb_skcipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2902 speed_template_32_48, num_mb);
2903 test_mb_skcipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2904 speed_template_32_48, num_mb);
2905 test_mb_skcipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2906 speed_template_32_64, num_mb);
2907 test_mb_skcipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2908 speed_template_32_64, num_mb);
2909 break;
2910
2911 case 608:
2912 test_mb_skcipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2913 speed_template_16_32, num_mb);
2914 test_mb_skcipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2915 speed_template_16_32, num_mb);
2916 test_mb_skcipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2917 speed_template_16_32, num_mb);
2918 test_mb_skcipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2919 speed_template_16_32, num_mb);
2920 test_mb_skcipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2921 speed_template_16_32, num_mb);
2922 test_mb_skcipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2923 speed_template_16_32, num_mb);
2924 test_mb_skcipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2925 speed_template_32_48, num_mb);
2926 test_mb_skcipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2927 speed_template_32_48, num_mb);
2928 test_mb_skcipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2929 speed_template_32_64, num_mb);
2930 test_mb_skcipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2931 speed_template_32_64, num_mb);
2932 break;
2933
2934 case 609:
2935 test_mb_skcipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2936 speed_template_8_32, num_mb);
2937 test_mb_skcipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2938 speed_template_8_32, num_mb);
2939 test_mb_skcipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2940 speed_template_8_32, num_mb);
2941 test_mb_skcipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2942 speed_template_8_32, num_mb);
2943 test_mb_skcipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2944 speed_template_8_32, num_mb);
2945 test_mb_skcipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2946 speed_template_8_32, num_mb);
2947 break;
2948
Linus Torvalds1da177e2005-04-16 15:20:36 -07002949 case 1000:
2950 test_available();
2951 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002952 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10002953
2954 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002955}
2956
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002957static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07002958{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002959 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08002960 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002961
Herbert Xuf139cfa2008-07-31 12:23:53 +08002962 for (i = 0; i < TVMEMSIZE; i++) {
2963 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2964 if (!tvmem[i])
2965 goto err_free_tv;
2966 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002967
Kees Cook4e234ee2018-04-26 19:57:28 -07002968 err = do_test(alg, type, mask, mode, num_mb);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002969
Jarod Wilson4e033a62009-05-27 15:10:21 +10002970 if (err) {
2971 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2972 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01002973 } else {
2974 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10002975 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002976
Jarod Wilson4e033a62009-05-27 15:10:21 +10002977 /* We intentionaly return -EAGAIN to prevent keeping the module,
2978 * unless we're running in fips mode. It does all its work from
2979 * init() and doesn't offer any runtime functionality, but in
2980 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002981 * => we don't need it in the memory, do we?
2982 * -- mludvig
2983 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002984 if (!fips_enabled)
2985 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002986
Herbert Xuf139cfa2008-07-31 12:23:53 +08002987err_free_tv:
2988 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2989 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002990
2991 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002992}
2993
2994/*
2995 * If an init function is provided, an exit function must also be provided
2996 * to allow module unload.
2997 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002998static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002999
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003000module_init(tcrypt_mod_init);
3001module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003002
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003003module_param(alg, charp, 0);
3004module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08003005module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003006module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07003007module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07003008MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3009 "(defaults to zero which uses CPU cycles instead)");
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00003010module_param(num_mb, uint, 0000);
3011MODULE_PARM_DESC(num_mb, "Number of concurrent requests to be used in mb speed tests (defaults to 8)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07003012
3013MODULE_LICENSE("GPL");
3014MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3015MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");