blob: ad78ab5b93cbc22249ba3ca22a2b79c2a6e14f9d [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Herbert Xuef2736f2005-06-22 13:26:03 -07002/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07003 * Quick & dirty crypto testing module.
4 *
5 * This will only exist until we have a better testing mechanism
6 * (e.g. a char device).
7 *
8 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
9 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +080010 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070011 *
Adrian Hoban69435b92010-11-04 15:02:04 -040012 * Updated RFC4106 AES-GCM testing.
13 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Adrian Hoban <adrian.hoban@intel.com>
15 * Gabriele Paoloni <gabriele.paoloni@intel.com>
16 * Tadeusz Struk (tadeusz.struk@intel.com)
17 * Copyright (c) 2010, Intel Corporation.
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 */
19
Rabin Vincent76512f22017-01-18 14:54:05 +010020#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
21
Herbert Xu1ce5a042015-04-22 15:06:30 +080022#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080023#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080024#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100025#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080026#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070027#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090028#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100030#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070031#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070033#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070034#include <linux/timex.h>
35#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include "tcrypt.h"
37
38/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080039 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070040 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080041#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070042
43/*
Herbert Xuda7f0332008-07-31 17:08:25 +080044* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070045*/
46#define ENCRYPT 1
47#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070048
Horia Geant?f074f7b2015-08-27 18:38:36 +030049#define MAX_DIGEST_SIZE 64
50
Harald Welteebfd9bc2005-06-22 13:27:23 -070051/*
Luca Clementi263a8df2014-06-25 22:57:42 -070052 * return a string with the driver name
53 */
54#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
55
56/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070057 * Used by test_cipher_speed()
58 */
Herbert Xu6a179442005-06-22 13:29:03 -070059static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070060
Steffen Klasserta873a5f2009-06-19 19:46:53 +080061static char *alg = NULL;
62static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080063static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070064static int mode;
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +000065static u32 num_mb = 8;
Herbert Xuf139cfa2008-07-31 12:23:53 +080066static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070067
68static char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030069 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080070 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
71 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110072 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080073 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
Dave Rodgman45ec9752019-03-07 16:30:44 -080074 "lzo", "lzo-rle", "cts", "sha3-224", "sha3-256", "sha3-384",
75 "sha3-512", "streebog256", "streebog512",
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +030076 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070077};
78
Ard Biesheuvelee5bbc92018-12-04 14:13:31 +010079static u32 block_sizes[] = { 16, 64, 256, 1024, 1472, 8192, 0 };
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +000080static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
81
82#define XBUFSIZE 8
83#define MAX_IVLEN 32
84
85static int testmgr_alloc_buf(char *buf[XBUFSIZE])
86{
87 int i;
88
89 for (i = 0; i < XBUFSIZE; i++) {
90 buf[i] = (void *)__get_free_page(GFP_KERNEL);
91 if (!buf[i])
92 goto err_free_buf;
93 }
94
95 return 0;
96
97err_free_buf:
98 while (i-- > 0)
99 free_page((unsigned long)buf[i]);
100
101 return -ENOMEM;
102}
103
104static void testmgr_free_buf(char *buf[XBUFSIZE])
105{
106 int i;
107
108 for (i = 0; i < XBUFSIZE; i++)
109 free_page((unsigned long)buf[i]);
110}
111
112static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
113 unsigned int buflen, const void *assoc,
114 unsigned int aad_size)
115{
116 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
117 int k, rem;
118
119 if (np > XBUFSIZE) {
120 rem = PAGE_SIZE;
121 np = XBUFSIZE;
122 } else {
123 rem = buflen % PAGE_SIZE;
124 }
125
126 sg_init_table(sg, np + 1);
127
128 sg_set_buf(&sg[0], assoc, aad_size);
129
130 if (rem)
131 np--;
132 for (k = 0; k < np; k++)
133 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
134
135 if (rem)
136 sg_set_buf(&sg[k + 1], xbuf[k], rem);
137}
138
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530139static inline int do_one_aead_op(struct aead_request *req, int ret)
140{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100141 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530142
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100143 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530144}
145
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000146struct test_mb_aead_data {
147 struct scatterlist sg[XBUFSIZE];
148 struct scatterlist sgout[XBUFSIZE];
149 struct aead_request *req;
150 struct crypto_wait wait;
151 char *xbuf[XBUFSIZE];
152 char *xoutbuf[XBUFSIZE];
153 char *axbuf[XBUFSIZE];
154};
155
156static int do_mult_aead_op(struct test_mb_aead_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -0700157 u32 num_mb, int *rc)
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000158{
Kees Cook4e234ee2018-04-26 19:57:28 -0700159 int i, err = 0;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000160
161 /* Fire up a bunch of concurrent requests */
162 for (i = 0; i < num_mb; i++) {
163 if (enc == ENCRYPT)
164 rc[i] = crypto_aead_encrypt(data[i].req);
165 else
166 rc[i] = crypto_aead_decrypt(data[i].req);
167 }
168
169 /* Wait for all requests to finish */
170 for (i = 0; i < num_mb; i++) {
171 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
172
173 if (rc[i]) {
174 pr_info("concurrent request %d error %d\n", i, rc[i]);
175 err = rc[i];
176 }
177 }
178
179 return err;
180}
181
182static int test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc,
183 int blen, int secs, u32 num_mb)
184{
185 unsigned long start, end;
186 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700187 int ret = 0;
188 int *rc;
189
190 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
191 if (!rc)
192 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000193
194 for (start = jiffies, end = start + secs * HZ, bcount = 0;
195 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700196 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000197 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700198 goto out;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000199 }
200
201 pr_cont("%d operations in %d seconds (%ld bytes)\n",
202 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700203
204out:
205 kfree(rc);
206 return ret;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000207}
208
209static int test_mb_aead_cycles(struct test_mb_aead_data *data, int enc,
210 int blen, u32 num_mb)
211{
212 unsigned long cycles = 0;
213 int ret = 0;
214 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700215 int *rc;
216
217 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
218 if (!rc)
219 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000220
221 /* Warm-up run. */
222 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700223 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000224 if (ret)
225 goto out;
226 }
227
228 /* The real thing. */
229 for (i = 0; i < 8; i++) {
230 cycles_t start, end;
231
232 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700233 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000234 end = get_cycles();
235
236 if (ret)
237 goto out;
238
239 cycles += end - start;
240 }
241
Kees Cook4e234ee2018-04-26 19:57:28 -0700242 pr_cont("1 operation in %lu cycles (%d bytes)\n",
243 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000244
Kees Cook4e234ee2018-04-26 19:57:28 -0700245out:
246 kfree(rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000247 return ret;
248}
249
250static void test_mb_aead_speed(const char *algo, int enc, int secs,
251 struct aead_speed_template *template,
252 unsigned int tcount, u8 authsize,
253 unsigned int aad_size, u8 *keysize, u32 num_mb)
254{
255 struct test_mb_aead_data *data;
256 struct crypto_aead *tfm;
257 unsigned int i, j, iv_len;
258 const char *key;
259 const char *e;
260 void *assoc;
261 u32 *b_size;
262 char *iv;
263 int ret;
264
265
266 if (aad_size >= PAGE_SIZE) {
267 pr_err("associate data length (%u) too big\n", aad_size);
268 return;
269 }
270
271 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
272 if (!iv)
273 return;
274
275 if (enc == ENCRYPT)
276 e = "encryption";
277 else
278 e = "decryption";
279
280 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
281 if (!data)
282 goto out_free_iv;
283
284 tfm = crypto_alloc_aead(algo, 0, 0);
285 if (IS_ERR(tfm)) {
286 pr_err("failed to load transform for %s: %ld\n",
287 algo, PTR_ERR(tfm));
288 goto out_free_data;
289 }
290
291 ret = crypto_aead_setauthsize(tfm, authsize);
292
293 for (i = 0; i < num_mb; ++i)
294 if (testmgr_alloc_buf(data[i].xbuf)) {
295 while (i--)
296 testmgr_free_buf(data[i].xbuf);
297 goto out_free_tfm;
298 }
299
300 for (i = 0; i < num_mb; ++i)
301 if (testmgr_alloc_buf(data[i].axbuf)) {
302 while (i--)
303 testmgr_free_buf(data[i].axbuf);
304 goto out_free_xbuf;
305 }
306
307 for (i = 0; i < num_mb; ++i)
308 if (testmgr_alloc_buf(data[i].xoutbuf)) {
309 while (i--)
Colin Ian Kingc6ba4f32018-01-02 15:43:04 +0000310 testmgr_free_buf(data[i].xoutbuf);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000311 goto out_free_axbuf;
312 }
313
314 for (i = 0; i < num_mb; ++i) {
315 data[i].req = aead_request_alloc(tfm, GFP_KERNEL);
316 if (!data[i].req) {
317 pr_err("alg: skcipher: Failed to allocate request for %s\n",
318 algo);
319 while (i--)
320 aead_request_free(data[i].req);
321 goto out_free_xoutbuf;
322 }
323 }
324
325 for (i = 0; i < num_mb; ++i) {
326 crypto_init_wait(&data[i].wait);
327 aead_request_set_callback(data[i].req,
328 CRYPTO_TFM_REQ_MAY_BACKLOG,
329 crypto_req_done, &data[i].wait);
330 }
331
332 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
333 get_driver_name(crypto_aead, tfm), e);
334
335 i = 0;
336 do {
337 b_size = aead_sizes;
338 do {
339 if (*b_size + authsize > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +0000340 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000341 authsize + *b_size,
342 XBUFSIZE * PAGE_SIZE);
343 goto out;
344 }
345
346 pr_info("test %u (%d bit key, %d byte blocks): ", i,
347 *keysize * 8, *b_size);
348
349 /* Set up tfm global state, i.e. the key */
350
351 memset(tvmem[0], 0xff, PAGE_SIZE);
352 key = tvmem[0];
353 for (j = 0; j < tcount; j++) {
354 if (template[j].klen == *keysize) {
355 key = template[j].key;
356 break;
357 }
358 }
359
360 crypto_aead_clear_flags(tfm, ~0);
361
362 ret = crypto_aead_setkey(tfm, key, *keysize);
363 if (ret) {
364 pr_err("setkey() failed flags=%x\n",
365 crypto_aead_get_flags(tfm));
366 goto out;
367 }
368
369 iv_len = crypto_aead_ivsize(tfm);
370 if (iv_len)
371 memset(iv, 0xff, iv_len);
372
373 /* Now setup per request stuff, i.e. buffers */
374
375 for (j = 0; j < num_mb; ++j) {
376 struct test_mb_aead_data *cur = &data[j];
377
378 assoc = cur->axbuf[0];
379 memset(assoc, 0xff, aad_size);
380
381 sg_init_aead(cur->sg, cur->xbuf,
382 *b_size + (enc ? 0 : authsize),
383 assoc, aad_size);
384
385 sg_init_aead(cur->sgout, cur->xoutbuf,
386 *b_size + (enc ? authsize : 0),
387 assoc, aad_size);
388
389 aead_request_set_ad(cur->req, aad_size);
390
391 if (!enc) {
392
393 aead_request_set_crypt(cur->req,
394 cur->sgout,
395 cur->sg,
396 *b_size, iv);
397 ret = crypto_aead_encrypt(cur->req);
398 ret = do_one_aead_op(cur->req, ret);
399
400 if (ret) {
401 pr_err("calculating auth failed failed (%d)\n",
402 ret);
403 break;
404 }
405 }
406
407 aead_request_set_crypt(cur->req, cur->sg,
408 cur->sgout, *b_size +
409 (enc ? 0 : authsize),
410 iv);
411
412 }
413
Horia Geantă2af63292018-07-23 17:18:48 +0300414 if (secs) {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000415 ret = test_mb_aead_jiffies(data, enc, *b_size,
416 secs, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300417 cond_resched();
418 } else {
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000419 ret = test_mb_aead_cycles(data, enc, *b_size,
420 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300421 }
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000422
423 if (ret) {
424 pr_err("%s() failed return code=%d\n", e, ret);
425 break;
426 }
427 b_size++;
428 i++;
429 } while (*b_size);
430 keysize++;
431 } while (*keysize);
432
433out:
434 for (i = 0; i < num_mb; ++i)
435 aead_request_free(data[i].req);
436out_free_xoutbuf:
437 for (i = 0; i < num_mb; ++i)
438 testmgr_free_buf(data[i].xoutbuf);
439out_free_axbuf:
440 for (i = 0; i < num_mb; ++i)
441 testmgr_free_buf(data[i].axbuf);
442out_free_xbuf:
443 for (i = 0; i < num_mb; ++i)
444 testmgr_free_buf(data[i].xbuf);
445out_free_tfm:
446 crypto_free_aead(tfm);
447out_free_data:
448 kfree(data);
449out_free_iv:
450 kfree(iv);
451}
452
Tim Chen53f52d72013-12-11 14:28:47 -0800453static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700454 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800455{
456 unsigned long start, end;
457 int bcount;
458 int ret;
459
Mark Rustad3e3dc252014-07-25 02:53:38 -0700460 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800461 time_before(jiffies, end); bcount++) {
462 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530463 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800464 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530465 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800466
467 if (ret)
468 return ret;
469 }
470
471 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700472 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800473 return 0;
474}
475
476static int test_aead_cycles(struct aead_request *req, int enc, int blen)
477{
478 unsigned long cycles = 0;
479 int ret = 0;
480 int i;
481
Tim Chen53f52d72013-12-11 14:28:47 -0800482 /* Warm-up run. */
483 for (i = 0; i < 4; i++) {
484 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530485 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800486 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530487 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800488
489 if (ret)
490 goto out;
491 }
492
493 /* The real thing. */
494 for (i = 0; i < 8; i++) {
495 cycles_t start, end;
496
497 start = get_cycles();
498 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530499 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800500 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530501 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800502 end = get_cycles();
503
504 if (ret)
505 goto out;
506
507 cycles += end - start;
508 }
509
510out:
Tim Chen53f52d72013-12-11 14:28:47 -0800511 if (ret == 0)
512 printk("1 operation in %lu cycles (%d bytes)\n",
513 (cycles + 4) / 8, blen);
514
515 return ret;
516}
517
Mark Rustad3e3dc252014-07-25 02:53:38 -0700518static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800519 struct aead_speed_template *template,
520 unsigned int tcount, u8 authsize,
521 unsigned int aad_size, u8 *keysize)
522{
523 unsigned int i, j;
524 struct crypto_aead *tfm;
525 int ret = -ENOMEM;
526 const char *key;
527 struct aead_request *req;
528 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800529 struct scatterlist *sgout;
530 const char *e;
531 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200532 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800533 char *xbuf[XBUFSIZE];
534 char *xoutbuf[XBUFSIZE];
535 char *axbuf[XBUFSIZE];
536 unsigned int *b_size;
537 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100538 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800539
Cristian Stoica96692a732015-01-28 13:07:32 +0200540 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
541 if (!iv)
542 return;
543
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200544 if (aad_size >= PAGE_SIZE) {
545 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200546 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200547 }
548
Tim Chen53f52d72013-12-11 14:28:47 -0800549 if (enc == ENCRYPT)
550 e = "encryption";
551 else
552 e = "decryption";
553
554 if (testmgr_alloc_buf(xbuf))
555 goto out_noxbuf;
556 if (testmgr_alloc_buf(axbuf))
557 goto out_noaxbuf;
558 if (testmgr_alloc_buf(xoutbuf))
559 goto out_nooutbuf;
560
Herbert Xua3f21852015-05-27 16:03:51 +0800561 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800562 if (!sg)
563 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800564 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800565
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800566 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800567
568 if (IS_ERR(tfm)) {
569 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
570 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200571 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800572 }
573
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100574 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700575 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
576 get_driver_name(crypto_aead, tfm), e);
577
Tim Chen53f52d72013-12-11 14:28:47 -0800578 req = aead_request_alloc(tfm, GFP_KERNEL);
579 if (!req) {
580 pr_err("alg: aead: Failed to allocate request for %s\n",
581 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200582 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800583 }
584
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530585 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100586 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530587
Tim Chen53f52d72013-12-11 14:28:47 -0800588 i = 0;
589 do {
590 b_size = aead_sizes;
591 do {
592 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200593 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800594
595 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
596 pr_err("template (%u) too big for tvmem (%lu)\n",
597 *keysize + *b_size,
598 TVMEMSIZE * PAGE_SIZE);
599 goto out;
600 }
601
602 key = tvmem[0];
603 for (j = 0; j < tcount; j++) {
604 if (template[j].klen == *keysize) {
605 key = template[j].key;
606 break;
607 }
608 }
609 ret = crypto_aead_setkey(tfm, key, *keysize);
610 ret = crypto_aead_setauthsize(tfm, authsize);
611
612 iv_len = crypto_aead_ivsize(tfm);
613 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200614 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800615
616 crypto_aead_clear_flags(tfm, ~0);
617 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
618 i, *keysize * 8, *b_size);
619
620
621 memset(tvmem[0], 0xff, PAGE_SIZE);
622
623 if (ret) {
624 pr_err("setkey() failed flags=%x\n",
625 crypto_aead_get_flags(tfm));
626 goto out;
627 }
628
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200629 sg_init_aead(sg, xbuf, *b_size + (enc ? 0 : authsize),
630 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800631
Herbert Xu31267272015-06-17 14:05:26 +0800632 sg_init_aead(sgout, xoutbuf,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200633 *b_size + (enc ? authsize : 0), assoc,
634 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800635
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000636 aead_request_set_ad(req, aad_size);
637
638 if (!enc) {
639
640 /*
641 * For decryption we need a proper auth so
642 * we do the encryption path once with buffers
643 * reversed (input <-> output) to calculate it
644 */
645 aead_request_set_crypt(req, sgout, sg,
646 *b_size, iv);
647 ret = do_one_aead_op(req,
648 crypto_aead_encrypt(req));
649
650 if (ret) {
651 pr_err("calculating auth failed failed (%d)\n",
652 ret);
653 break;
654 }
655 }
656
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300657 aead_request_set_crypt(req, sg, sgout,
658 *b_size + (enc ? 0 : authsize),
659 iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800660
Horia Geantă2af63292018-07-23 17:18:48 +0300661 if (secs) {
Mark Rustad3e3dc252014-07-25 02:53:38 -0700662 ret = test_aead_jiffies(req, enc, *b_size,
663 secs);
Horia Geantă2af63292018-07-23 17:18:48 +0300664 cond_resched();
665 } else {
Tim Chen53f52d72013-12-11 14:28:47 -0800666 ret = test_aead_cycles(req, enc, *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +0300667 }
Tim Chen53f52d72013-12-11 14:28:47 -0800668
669 if (ret) {
670 pr_err("%s() failed return code=%d\n", e, ret);
671 break;
672 }
673 b_size++;
674 i++;
675 } while (*b_size);
676 keysize++;
677 } while (*keysize);
678
679out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200680 aead_request_free(req);
681out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800682 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200683out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800684 kfree(sg);
685out_nosg:
686 testmgr_free_buf(xoutbuf);
687out_nooutbuf:
688 testmgr_free_buf(axbuf);
689out_noaxbuf:
690 testmgr_free_buf(xbuf);
691out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200692 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800693}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800694
David S. Millerbeb63da72010-05-19 14:11:21 +1000695static void test_hash_sg_init(struct scatterlist *sg)
696{
697 int i;
698
699 sg_init_table(sg, TVMEMSIZE);
700 for (i = 0; i < TVMEMSIZE; i++) {
701 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
702 memset(tvmem[i], 0xff, PAGE_SIZE);
703 }
704}
705
David S. Millerbeb63da72010-05-19 14:11:21 +1000706static inline int do_one_ahash_op(struct ahash_request *req, int ret)
707{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100708 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da72010-05-19 14:11:21 +1000709
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100710 return crypto_wait_req(ret, wait);
David S. Millerbeb63da72010-05-19 14:11:21 +1000711}
712
Herbert Xu72259de2016-06-28 20:33:52 +0800713struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000714 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800715 char result[64];
716 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100717 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800718 char *xbuf[XBUFSIZE];
719};
Megha Dey087bcd22016-06-23 18:40:47 -0700720
Kees Cook4e234ee2018-04-26 19:57:28 -0700721static inline int do_mult_ahash_op(struct test_mb_ahash_data *data, u32 num_mb,
722 int *rc)
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000723{
Kees Cook4e234ee2018-04-26 19:57:28 -0700724 int i, err = 0;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000725
726 /* Fire up a bunch of concurrent requests */
727 for (i = 0; i < num_mb; i++)
728 rc[i] = crypto_ahash_digest(data[i].req);
729
730 /* Wait for all requests to finish */
731 for (i = 0; i < num_mb; i++) {
732 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
733
734 if (rc[i]) {
735 pr_info("concurrent request %d error %d\n", i, rc[i]);
736 err = rc[i];
737 }
738 }
739
740 return err;
741}
742
743static int test_mb_ahash_jiffies(struct test_mb_ahash_data *data, int blen,
744 int secs, u32 num_mb)
745{
746 unsigned long start, end;
747 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700748 int ret = 0;
749 int *rc;
750
751 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
752 if (!rc)
753 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000754
755 for (start = jiffies, end = start + secs * HZ, bcount = 0;
756 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700757 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000758 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700759 goto out;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000760 }
761
762 pr_cont("%d operations in %d seconds (%ld bytes)\n",
763 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700764
765out:
766 kfree(rc);
767 return ret;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000768}
769
770static int test_mb_ahash_cycles(struct test_mb_ahash_data *data, int blen,
771 u32 num_mb)
772{
773 unsigned long cycles = 0;
774 int ret = 0;
775 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700776 int *rc;
777
778 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
779 if (!rc)
780 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000781
782 /* Warm-up run. */
783 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700784 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000785 if (ret)
786 goto out;
787 }
788
789 /* The real thing. */
790 for (i = 0; i < 8; i++) {
791 cycles_t start, end;
792
793 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700794 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000795 end = get_cycles();
796
797 if (ret)
798 goto out;
799
800 cycles += end - start;
801 }
802
Kees Cook4e234ee2018-04-26 19:57:28 -0700803 pr_cont("1 operation in %lu cycles (%d bytes)\n",
804 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000805
Kees Cook4e234ee2018-04-26 19:57:28 -0700806out:
807 kfree(rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000808 return ret;
809}
810
811static void test_mb_ahash_speed(const char *algo, unsigned int secs,
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000812 struct hash_speed *speed, u32 num_mb)
Megha Dey087bcd22016-06-23 18:40:47 -0700813{
Herbert Xu72259de2016-06-28 20:33:52 +0800814 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700815 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800816 unsigned int i, j, k;
817 int ret;
818
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000819 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
Herbert Xu72259de2016-06-28 20:33:52 +0800820 if (!data)
821 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700822
823 tfm = crypto_alloc_ahash(algo, 0, 0);
824 if (IS_ERR(tfm)) {
825 pr_err("failed to load transform for %s: %ld\n",
826 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800827 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700828 }
Herbert Xu72259de2016-06-28 20:33:52 +0800829
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000830 for (i = 0; i < num_mb; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800831 if (testmgr_alloc_buf(data[i].xbuf))
832 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700833
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100834 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700835
Herbert Xu72259de2016-06-28 20:33:52 +0800836 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
837 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200838 pr_err("alg: hash: Failed to allocate request for %s\n",
839 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800840 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700841 }
Megha Dey087bcd22016-06-23 18:40:47 -0700842
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100843 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
844 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000845
846 sg_init_table(data[i].sg, XBUFSIZE);
847 for (j = 0; j < XBUFSIZE; j++) {
848 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
849 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
850 }
Megha Dey087bcd22016-06-23 18:40:47 -0700851 }
852
Herbert Xu72259de2016-06-28 20:33:52 +0800853 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
854 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700855
856 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800857 /* For some reason this only tests digests. */
858 if (speed[i].blen != speed[i].plen)
859 continue;
860
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000861 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200862 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000863 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200864 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700865 }
866
867 if (speed[i].klen)
868 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
869
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000870 for (k = 0; k < num_mb; k++)
Herbert Xu72259de2016-06-28 20:33:52 +0800871 ahash_request_set_crypt(data[k].req, data[k].sg,
872 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700873
Herbert Xu72259de2016-06-28 20:33:52 +0800874 pr_info("test%3u "
875 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700876 i, speed[i].blen, speed[i].plen,
877 speed[i].blen / speed[i].plen);
878
Horia Geantă2af63292018-07-23 17:18:48 +0300879 if (secs) {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000880 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs,
881 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300882 cond_resched();
883 } else {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000884 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300885 }
Herbert Xu72259de2016-06-28 20:33:52 +0800886
Herbert Xu72259de2016-06-28 20:33:52 +0800887
888 if (ret) {
889 pr_err("At least one hashing failed ret=%d\n", ret);
890 break;
891 }
Megha Dey087bcd22016-06-23 18:40:47 -0700892 }
Megha Dey087bcd22016-06-23 18:40:47 -0700893
894out:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000895 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800896 ahash_request_free(data[k].req);
897
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000898 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800899 testmgr_free_buf(data[k].xbuf);
900
901 crypto_free_ahash(tfm);
902
903free_data:
904 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700905}
906
David S. Millerbeb63da72010-05-19 14:11:21 +1000907static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700908 char *out, int secs)
David S. Millerbeb63da72010-05-19 14:11:21 +1000909{
910 unsigned long start, end;
911 int bcount;
912 int ret;
913
Mark Rustad3e3dc252014-07-25 02:53:38 -0700914 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da72010-05-19 14:11:21 +1000915 time_before(jiffies, end); bcount++) {
916 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
917 if (ret)
918 return ret;
919 }
920
921 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700922 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da72010-05-19 14:11:21 +1000923
924 return 0;
925}
926
927static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700928 int plen, char *out, int secs)
David S. Millerbeb63da72010-05-19 14:11:21 +1000929{
930 unsigned long start, end;
931 int bcount, pcount;
932 int ret;
933
934 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700935 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da72010-05-19 14:11:21 +1000936
Mark Rustad3e3dc252014-07-25 02:53:38 -0700937 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da72010-05-19 14:11:21 +1000938 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800939 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da72010-05-19 14:11:21 +1000940 if (ret)
941 return ret;
942 for (pcount = 0; pcount < blen; pcount += plen) {
943 ret = do_one_ahash_op(req, crypto_ahash_update(req));
944 if (ret)
945 return ret;
946 }
947 /* we assume there is enough space in 'out' for the result */
948 ret = do_one_ahash_op(req, crypto_ahash_final(req));
949 if (ret)
950 return ret;
951 }
952
953 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700954 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da72010-05-19 14:11:21 +1000955
956 return 0;
957}
958
959static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
960 char *out)
961{
962 unsigned long cycles = 0;
963 int ret, i;
964
965 /* Warm-up run. */
966 for (i = 0; i < 4; i++) {
967 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
968 if (ret)
969 goto out;
970 }
971
972 /* The real thing. */
973 for (i = 0; i < 8; i++) {
974 cycles_t start, end;
975
976 start = get_cycles();
977
978 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
979 if (ret)
980 goto out;
981
982 end = get_cycles();
983
984 cycles += end - start;
985 }
986
987out:
988 if (ret)
989 return ret;
990
991 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
992 cycles / 8, cycles / (8 * blen));
993
994 return 0;
995}
996
997static int test_ahash_cycles(struct ahash_request *req, int blen,
998 int plen, char *out)
999{
1000 unsigned long cycles = 0;
1001 int i, pcount, ret;
1002
1003 if (plen == blen)
1004 return test_ahash_cycles_digest(req, blen, out);
1005
1006 /* Warm-up run. */
1007 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +08001008 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da72010-05-19 14:11:21 +10001009 if (ret)
1010 goto out;
1011 for (pcount = 0; pcount < blen; pcount += plen) {
1012 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1013 if (ret)
1014 goto out;
1015 }
1016 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1017 if (ret)
1018 goto out;
1019 }
1020
1021 /* The real thing. */
1022 for (i = 0; i < 8; i++) {
1023 cycles_t start, end;
1024
1025 start = get_cycles();
1026
Herbert Xu43a96072015-04-22 11:02:27 +08001027 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da72010-05-19 14:11:21 +10001028 if (ret)
1029 goto out;
1030 for (pcount = 0; pcount < blen; pcount += plen) {
1031 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1032 if (ret)
1033 goto out;
1034 }
1035 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1036 if (ret)
1037 goto out;
1038
1039 end = get_cycles();
1040
1041 cycles += end - start;
1042 }
1043
1044out:
1045 if (ret)
1046 return ret;
1047
1048 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1049 cycles / 8, cycles / (8 * blen));
1050
1051 return 0;
1052}
1053
Herbert Xu06605112016-02-01 21:36:49 +08001054static void test_ahash_speed_common(const char *algo, unsigned int secs,
1055 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da72010-05-19 14:11:21 +10001056{
1057 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001058 struct crypto_wait wait;
David S. Millerbeb63da72010-05-19 14:11:21 +10001059 struct ahash_request *req;
1060 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +03001061 char *output;
David S. Millerbeb63da72010-05-19 14:11:21 +10001062 int i, ret;
1063
Herbert Xu06605112016-02-01 21:36:49 +08001064 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da72010-05-19 14:11:21 +10001065 if (IS_ERR(tfm)) {
1066 pr_err("failed to load transform for %s: %ld\n",
1067 algo, PTR_ERR(tfm));
1068 return;
1069 }
1070
Luca Clementi263a8df2014-06-25 22:57:42 -07001071 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
1072 get_driver_name(crypto_ahash, tfm));
1073
Horia Geant?f074f7b2015-08-27 18:38:36 +03001074 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
1075 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
1076 MAX_DIGEST_SIZE);
David S. Millerbeb63da72010-05-19 14:11:21 +10001077 goto out;
1078 }
1079
1080 test_hash_sg_init(sg);
1081 req = ahash_request_alloc(tfm, GFP_KERNEL);
1082 if (!req) {
1083 pr_err("ahash request allocation failure\n");
1084 goto out;
1085 }
1086
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001087 crypto_init_wait(&wait);
David S. Millerbeb63da72010-05-19 14:11:21 +10001088 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001089 crypto_req_done, &wait);
David S. Millerbeb63da72010-05-19 14:11:21 +10001090
Horia Geant?f074f7b2015-08-27 18:38:36 +03001091 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
1092 if (!output)
1093 goto out_nomem;
1094
David S. Millerbeb63da72010-05-19 14:11:21 +10001095 for (i = 0; speed[i].blen != 0; i++) {
1096 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1097 pr_err("template (%u) too big for tvmem (%lu)\n",
1098 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1099 break;
1100 }
1101
Horia Geantă331351f2018-09-12 16:20:48 +03001102 if (speed[i].klen)
1103 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
1104
David S. Millerbeb63da72010-05-19 14:11:21 +10001105 pr_info("test%3u "
1106 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
1107 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1108
1109 ahash_request_set_crypt(req, sg, output, speed[i].plen);
1110
Horia Geantă2af63292018-07-23 17:18:48 +03001111 if (secs) {
David S. Millerbeb63da72010-05-19 14:11:21 +10001112 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001113 speed[i].plen, output, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001114 cond_resched();
1115 } else {
David S. Millerbeb63da72010-05-19 14:11:21 +10001116 ret = test_ahash_cycles(req, speed[i].blen,
1117 speed[i].plen, output);
Horia Geantă2af63292018-07-23 17:18:48 +03001118 }
David S. Millerbeb63da72010-05-19 14:11:21 +10001119
1120 if (ret) {
1121 pr_err("hashing failed ret=%d\n", ret);
1122 break;
1123 }
1124 }
1125
Horia Geant?f074f7b2015-08-27 18:38:36 +03001126 kfree(output);
1127
1128out_nomem:
David S. Millerbeb63da72010-05-19 14:11:21 +10001129 ahash_request_free(req);
1130
1131out:
1132 crypto_free_ahash(tfm);
1133}
1134
Herbert Xu06605112016-02-01 21:36:49 +08001135static void test_ahash_speed(const char *algo, unsigned int secs,
1136 struct hash_speed *speed)
1137{
1138 return test_ahash_speed_common(algo, secs, speed, 0);
1139}
1140
1141static void test_hash_speed(const char *algo, unsigned int secs,
1142 struct hash_speed *speed)
1143{
1144 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
1145}
1146
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001147struct test_mb_skcipher_data {
1148 struct scatterlist sg[XBUFSIZE];
1149 struct skcipher_request *req;
1150 struct crypto_wait wait;
1151 char *xbuf[XBUFSIZE];
1152};
1153
1154static int do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -07001155 u32 num_mb, int *rc)
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001156{
Kees Cook4e234ee2018-04-26 19:57:28 -07001157 int i, err = 0;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001158
1159 /* Fire up a bunch of concurrent requests */
1160 for (i = 0; i < num_mb; i++) {
1161 if (enc == ENCRYPT)
1162 rc[i] = crypto_skcipher_encrypt(data[i].req);
1163 else
1164 rc[i] = crypto_skcipher_decrypt(data[i].req);
1165 }
1166
1167 /* Wait for all requests to finish */
1168 for (i = 0; i < num_mb; i++) {
1169 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
1170
1171 if (rc[i]) {
1172 pr_info("concurrent request %d error %d\n", i, rc[i]);
1173 err = rc[i];
1174 }
1175 }
1176
1177 return err;
1178}
1179
1180static int test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc,
1181 int blen, int secs, u32 num_mb)
1182{
1183 unsigned long start, end;
1184 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -07001185 int ret = 0;
1186 int *rc;
1187
1188 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1189 if (!rc)
1190 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001191
1192 for (start = jiffies, end = start + secs * HZ, bcount = 0;
1193 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001194 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001195 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -07001196 goto out;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001197 }
1198
1199 pr_cont("%d operations in %d seconds (%ld bytes)\n",
1200 bcount * num_mb, secs, (long)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -07001201
1202out:
1203 kfree(rc);
1204 return ret;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001205}
1206
1207static int test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc,
1208 int blen, u32 num_mb)
1209{
1210 unsigned long cycles = 0;
1211 int ret = 0;
1212 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -07001213 int *rc;
1214
1215 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1216 if (!rc)
1217 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001218
1219 /* Warm-up run. */
1220 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001221 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001222 if (ret)
1223 goto out;
1224 }
1225
1226 /* The real thing. */
1227 for (i = 0; i < 8; i++) {
1228 cycles_t start, end;
1229
1230 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -07001231 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001232 end = get_cycles();
1233
1234 if (ret)
1235 goto out;
1236
1237 cycles += end - start;
1238 }
1239
Kees Cook4e234ee2018-04-26 19:57:28 -07001240 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1241 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001242
Kees Cook4e234ee2018-04-26 19:57:28 -07001243out:
1244 kfree(rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001245 return ret;
1246}
1247
1248static void test_mb_skcipher_speed(const char *algo, int enc, int secs,
1249 struct cipher_speed_template *template,
1250 unsigned int tcount, u8 *keysize, u32 num_mb)
1251{
1252 struct test_mb_skcipher_data *data;
1253 struct crypto_skcipher *tfm;
1254 unsigned int i, j, iv_len;
1255 const char *key;
1256 const char *e;
1257 u32 *b_size;
1258 char iv[128];
1259 int ret;
1260
1261 if (enc == ENCRYPT)
1262 e = "encryption";
1263 else
1264 e = "decryption";
1265
1266 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
1267 if (!data)
1268 return;
1269
1270 tfm = crypto_alloc_skcipher(algo, 0, 0);
1271 if (IS_ERR(tfm)) {
1272 pr_err("failed to load transform for %s: %ld\n",
1273 algo, PTR_ERR(tfm));
1274 goto out_free_data;
1275 }
1276
1277 for (i = 0; i < num_mb; ++i)
1278 if (testmgr_alloc_buf(data[i].xbuf)) {
1279 while (i--)
1280 testmgr_free_buf(data[i].xbuf);
1281 goto out_free_tfm;
1282 }
1283
1284
1285 for (i = 0; i < num_mb; ++i)
1286 if (testmgr_alloc_buf(data[i].xbuf)) {
1287 while (i--)
1288 testmgr_free_buf(data[i].xbuf);
1289 goto out_free_tfm;
1290 }
1291
1292
1293 for (i = 0; i < num_mb; ++i) {
1294 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL);
1295 if (!data[i].req) {
1296 pr_err("alg: skcipher: Failed to allocate request for %s\n",
1297 algo);
1298 while (i--)
1299 skcipher_request_free(data[i].req);
1300 goto out_free_xbuf;
1301 }
1302 }
1303
1304 for (i = 0; i < num_mb; ++i) {
1305 skcipher_request_set_callback(data[i].req,
1306 CRYPTO_TFM_REQ_MAY_BACKLOG,
1307 crypto_req_done, &data[i].wait);
1308 crypto_init_wait(&data[i].wait);
1309 }
1310
1311 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
1312 get_driver_name(crypto_skcipher, tfm), e);
1313
1314 i = 0;
1315 do {
1316 b_size = block_sizes;
1317 do {
1318 if (*b_size > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +00001319 pr_err("template (%u) too big for buffer (%lu)\n",
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001320 *b_size, XBUFSIZE * PAGE_SIZE);
1321 goto out;
1322 }
1323
1324 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1325 *keysize * 8, *b_size);
1326
1327 /* Set up tfm global state, i.e. the key */
1328
1329 memset(tvmem[0], 0xff, PAGE_SIZE);
1330 key = tvmem[0];
1331 for (j = 0; j < tcount; j++) {
1332 if (template[j].klen == *keysize) {
1333 key = template[j].key;
1334 break;
1335 }
1336 }
1337
1338 crypto_skcipher_clear_flags(tfm, ~0);
1339
1340 ret = crypto_skcipher_setkey(tfm, key, *keysize);
1341 if (ret) {
1342 pr_err("setkey() failed flags=%x\n",
1343 crypto_skcipher_get_flags(tfm));
1344 goto out;
1345 }
1346
1347 iv_len = crypto_skcipher_ivsize(tfm);
1348 if (iv_len)
1349 memset(&iv, 0xff, iv_len);
1350
1351 /* Now setup per request stuff, i.e. buffers */
1352
1353 for (j = 0; j < num_mb; ++j) {
1354 struct test_mb_skcipher_data *cur = &data[j];
1355 unsigned int k = *b_size;
1356 unsigned int pages = DIV_ROUND_UP(k, PAGE_SIZE);
1357 unsigned int p = 0;
1358
1359 sg_init_table(cur->sg, pages);
1360
1361 while (k > PAGE_SIZE) {
1362 sg_set_buf(cur->sg + p, cur->xbuf[p],
1363 PAGE_SIZE);
1364 memset(cur->xbuf[p], 0xff, PAGE_SIZE);
1365 p++;
1366 k -= PAGE_SIZE;
1367 }
1368
1369 sg_set_buf(cur->sg + p, cur->xbuf[p], k);
1370 memset(cur->xbuf[p], 0xff, k);
1371
1372 skcipher_request_set_crypt(cur->req, cur->sg,
1373 cur->sg, *b_size,
1374 iv);
1375 }
1376
Horia Geantă2af63292018-07-23 17:18:48 +03001377 if (secs) {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001378 ret = test_mb_acipher_jiffies(data, enc,
1379 *b_size, secs,
1380 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001381 cond_resched();
1382 } else {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001383 ret = test_mb_acipher_cycles(data, enc,
1384 *b_size, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001385 }
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001386
1387 if (ret) {
1388 pr_err("%s() failed flags=%x\n", e,
1389 crypto_skcipher_get_flags(tfm));
1390 break;
1391 }
1392 b_size++;
1393 i++;
1394 } while (*b_size);
1395 keysize++;
1396 } while (*keysize);
1397
1398out:
1399 for (i = 0; i < num_mb; ++i)
1400 skcipher_request_free(data[i].req);
1401out_free_xbuf:
1402 for (i = 0; i < num_mb; ++i)
1403 testmgr_free_buf(data[i].xbuf);
1404out_free_tfm:
1405 crypto_free_skcipher(tfm);
1406out_free_data:
1407 kfree(data);
1408}
1409
Herbert Xu7166e582016-06-29 18:03:50 +08001410static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001411{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001412 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001413
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001414 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001415}
1416
Herbert Xu7166e582016-06-29 18:03:50 +08001417static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001418 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001419{
1420 unsigned long start, end;
1421 int bcount;
1422 int ret;
1423
Mark Rustad3e3dc252014-07-25 02:53:38 -07001424 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001425 time_before(jiffies, end); bcount++) {
1426 if (enc)
1427 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001428 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001429 else
1430 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001431 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001432
1433 if (ret)
1434 return ret;
1435 }
1436
1437 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001438 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001439 return 0;
1440}
1441
Herbert Xu7166e582016-06-29 18:03:50 +08001442static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001443 int blen)
1444{
1445 unsigned long cycles = 0;
1446 int ret = 0;
1447 int i;
1448
1449 /* Warm-up run. */
1450 for (i = 0; i < 4; i++) {
1451 if (enc)
1452 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001453 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001454 else
1455 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001456 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001457
1458 if (ret)
1459 goto out;
1460 }
1461
1462 /* The real thing. */
1463 for (i = 0; i < 8; i++) {
1464 cycles_t start, end;
1465
1466 start = get_cycles();
1467 if (enc)
1468 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001469 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001470 else
1471 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001472 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001473 end = get_cycles();
1474
1475 if (ret)
1476 goto out;
1477
1478 cycles += end - start;
1479 }
1480
1481out:
1482 if (ret == 0)
1483 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1484 (cycles + 4) / 8, blen);
1485
1486 return ret;
1487}
1488
Herbert Xu7166e582016-06-29 18:03:50 +08001489static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
1490 struct cipher_speed_template *template,
1491 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001492{
Nicolas Royerde1975332012-07-01 19:19:47 +02001493 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001494 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001495 const char *key;
1496 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +08001497 struct skcipher_request *req;
1498 struct crypto_skcipher *tfm;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001499 const char *e;
1500 u32 *b_size;
1501
1502 if (enc == ENCRYPT)
1503 e = "encryption";
1504 else
1505 e = "decryption";
1506
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001507 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001508
Herbert Xu7166e582016-06-29 18:03:50 +08001509 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001510
1511 if (IS_ERR(tfm)) {
1512 pr_err("failed to load transform for %s: %ld\n", algo,
1513 PTR_ERR(tfm));
1514 return;
1515 }
1516
Luca Clementi263a8df2014-06-25 22:57:42 -07001517 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
Herbert Xu7166e582016-06-29 18:03:50 +08001518 get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -07001519
Herbert Xu7166e582016-06-29 18:03:50 +08001520 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001521 if (!req) {
1522 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1523 algo);
1524 goto out;
1525 }
1526
Herbert Xu7166e582016-06-29 18:03:50 +08001527 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001528 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001529
1530 i = 0;
1531 do {
1532 b_size = block_sizes;
1533
1534 do {
1535 struct scatterlist sg[TVMEMSIZE];
1536
1537 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1538 pr_err("template (%u) too big for "
1539 "tvmem (%lu)\n", *keysize + *b_size,
1540 TVMEMSIZE * PAGE_SIZE);
1541 goto out_free_req;
1542 }
1543
1544 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1545 *keysize * 8, *b_size);
1546
1547 memset(tvmem[0], 0xff, PAGE_SIZE);
1548
1549 /* set key, plain text and IV */
1550 key = tvmem[0];
1551 for (j = 0; j < tcount; j++) {
1552 if (template[j].klen == *keysize) {
1553 key = template[j].key;
1554 break;
1555 }
1556 }
1557
Herbert Xu7166e582016-06-29 18:03:50 +08001558 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001559
Herbert Xu7166e582016-06-29 18:03:50 +08001560 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001561 if (ret) {
1562 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +08001563 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001564 goto out_free_req;
1565 }
1566
Nicolas Royerde1975332012-07-01 19:19:47 +02001567 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001568 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1569
Nicolas Royerde1975332012-07-01 19:19:47 +02001570 if (k > PAGE_SIZE) {
1571 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001572 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001573 k -= PAGE_SIZE;
1574 j = 1;
1575 while (k > PAGE_SIZE) {
1576 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1577 memset(tvmem[j], 0xff, PAGE_SIZE);
1578 j++;
1579 k -= PAGE_SIZE;
1580 }
1581 sg_set_buf(sg + j, tvmem[j], k);
1582 memset(tvmem[j], 0xff, k);
1583 } else {
1584 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001585 }
1586
Herbert Xu7166e582016-06-29 18:03:50 +08001587 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001588 if (iv_len)
1589 memset(&iv, 0xff, iv_len);
1590
Herbert Xu7166e582016-06-29 18:03:50 +08001591 skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001592
Horia Geantă2af63292018-07-23 17:18:48 +03001593 if (secs) {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001594 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001595 *b_size, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001596 cond_resched();
1597 } else {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001598 ret = test_acipher_cycles(req, enc,
1599 *b_size);
Horia Geantă2af63292018-07-23 17:18:48 +03001600 }
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001601
1602 if (ret) {
1603 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +08001604 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001605 break;
1606 }
1607 b_size++;
1608 i++;
1609 } while (*b_size);
1610 keysize++;
1611 } while (*keysize);
1612
1613out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +08001614 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001615out:
Herbert Xu7166e582016-06-29 18:03:50 +08001616 crypto_free_skcipher(tfm);
1617}
1618
1619static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
1620 struct cipher_speed_template *template,
1621 unsigned int tcount, u8 *keysize)
1622{
1623 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1624 true);
1625}
1626
1627static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
1628 struct cipher_speed_template *template,
1629 unsigned int tcount, u8 *keysize)
1630{
1631 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1632 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001633}
1634
Herbert Xuef2736f2005-06-22 13:26:03 -07001635static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001636{
1637 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001638
Linus Torvalds1da177e2005-04-16 15:20:36 -07001639 while (*name) {
1640 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001641 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001642 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001643 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001644 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001645}
1646
Herbert Xu01b32322008-07-31 15:41:55 +08001647static inline int tcrypt_test(const char *alg)
1648{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001649 int ret;
1650
Rabin Vincent76512f22017-01-18 14:54:05 +01001651 pr_debug("testing %s\n", alg);
1652
Jarod Wilson4e033a62009-05-27 15:10:21 +10001653 ret = alg_test(alg, alg, 0, 0);
1654 /* non-fips algs return -EINVAL in fips mode */
1655 if (fips_enabled && ret == -EINVAL)
1656 ret = 0;
1657 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001658}
1659
Kees Cook4e234ee2018-04-26 19:57:28 -07001660static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
Herbert Xu01b32322008-07-31 15:41:55 +08001661{
1662 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001663 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001664
1665 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001666 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001667 if (alg) {
1668 if (!crypto_has_alg(alg, type,
1669 mask ?: CRYPTO_ALG_TYPE_MASK))
1670 ret = -ENOENT;
1671 break;
1672 }
1673
Herbert Xu01b32322008-07-31 15:41:55 +08001674 for (i = 1; i < 200; i++)
Kees Cook4e234ee2018-04-26 19:57:28 -07001675 ret += do_test(NULL, 0, 0, i, num_mb);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001676 break;
1677
1678 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001679 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001680 break;
1681
1682 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001683 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001684 break;
1685
1686 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001687 ret += tcrypt_test("ecb(des)");
1688 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001689 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001690 break;
1691
1692 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001693 ret += tcrypt_test("ecb(des3_ede)");
1694 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001695 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001696 break;
1697
1698 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001699 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001700 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001701
Linus Torvalds1da177e2005-04-16 15:20:36 -07001702 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001703 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001704 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001705
Linus Torvalds1da177e2005-04-16 15:20:36 -07001706 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001707 ret += tcrypt_test("ecb(blowfish)");
1708 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001709 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001710 break;
1711
1712 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001713 ret += tcrypt_test("ecb(twofish)");
1714 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001715 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001716 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001717 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001718 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001719
Linus Torvalds1da177e2005-04-16 15:20:36 -07001720 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001721 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001722 ret += tcrypt_test("cbc(serpent)");
1723 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001724 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001725 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001726 break;
1727
1728 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001729 ret += tcrypt_test("ecb(aes)");
1730 ret += tcrypt_test("cbc(aes)");
1731 ret += tcrypt_test("lrw(aes)");
1732 ret += tcrypt_test("xts(aes)");
1733 ret += tcrypt_test("ctr(aes)");
1734 ret += tcrypt_test("rfc3686(ctr(aes))");
Gilad Ben-Yossefdfb89ab2018-09-20 14:18:40 +01001735 ret += tcrypt_test("ofb(aes)");
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03001736 ret += tcrypt_test("cfb(aes)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001737 break;
1738
1739 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001740 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001741 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001742
Linus Torvalds1da177e2005-04-16 15:20:36 -07001743 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001744 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001745 break;
1746
1747 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001748 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001749 break;
1750
1751 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001752 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001753 ret += tcrypt_test("cbc(cast5)");
1754 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001755 break;
1756
1757 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001758 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001759 ret += tcrypt_test("cbc(cast6)");
1760 ret += tcrypt_test("ctr(cast6)");
1761 ret += tcrypt_test("lrw(cast6)");
1762 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001763 break;
1764
1765 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001766 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001767 break;
1768
1769 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001770 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001771 break;
1772
1773 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001774 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001775 break;
1776
1777 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001778 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001779 break;
1780
1781 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001782 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001783 break;
1784
1785 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001786 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001787 break;
1788
1789 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001790 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001791 break;
1792
1793 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001794 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001795 break;
1796
1797 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001798 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001799 break;
1800
1801 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001802 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001803 break;
1804
1805 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001806 ret += tcrypt_test("ecb(anubis)");
1807 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001808 break;
1809
1810 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001811 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001812 break;
1813
1814 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001815 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001816 break;
1817
1818 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001819 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001820 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001821
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001822 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001823 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001824 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001825
David Howells90831632006-12-16 12:13:14 +11001826 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001827 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001828 break;
1829
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001830 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001831 ret += tcrypt_test("ecb(camellia)");
1832 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001833 ret += tcrypt_test("ctr(camellia)");
1834 ret += tcrypt_test("lrw(camellia)");
1835 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001836 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001837
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001838 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001839 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001840 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001841
Tan Swee Heng2407d602007-11-23 19:45:00 +08001842 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001843 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001844 break;
1845
Herbert Xu8df213d2007-12-02 14:55:47 +11001846 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001847 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001848 break;
1849
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001850 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001851 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001852 break;
1853
Joy Latten93cc74e2007-12-12 20:24:22 +08001854 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001855 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001856 break;
1857
Kevin Coffman76cb9522008-03-24 21:26:16 +08001858 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001859 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001860 break;
1861
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001862 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001863 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001864 break;
1865
1866 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001867 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001868 break;
1869
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001870 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001871 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001872 break;
1873
1874 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001875 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001876 break;
1877
1878 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001879 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001880 break;
1881
Jarod Wilson5d667322009-05-04 19:23:40 +08001882 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001883 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001884 break;
1885
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001886 case 46:
1887 ret += tcrypt_test("ghash");
1888 break;
1889
Herbert Xu684115212013-09-07 12:56:26 +10001890 case 47:
1891 ret += tcrypt_test("crct10dif");
1892 break;
1893
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301894 case 48:
1895 ret += tcrypt_test("sha3-224");
1896 break;
1897
1898 case 49:
1899 ret += tcrypt_test("sha3-256");
1900 break;
1901
1902 case 50:
1903 ret += tcrypt_test("sha3-384");
1904 break;
1905
1906 case 51:
1907 ret += tcrypt_test("sha3-512");
1908 break;
1909
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001910 case 52:
1911 ret += tcrypt_test("sm3");
1912 break;
1913
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001914 case 53:
1915 ret += tcrypt_test("streebog256");
1916 break;
1917
1918 case 54:
1919 ret += tcrypt_test("streebog512");
1920 break;
1921
Linus Torvalds1da177e2005-04-16 15:20:36 -07001922 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001923 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001924 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001925
Linus Torvalds1da177e2005-04-16 15:20:36 -07001926 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001927 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001928 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001929
Linus Torvalds1da177e2005-04-16 15:20:36 -07001930 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001931 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001932 break;
1933
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001934 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001935 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001936 break;
1937
1938 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001939 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001940 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001941
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001942 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001943 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001944 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001945
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001946 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001947 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001948 break;
1949
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001950 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001951 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001952 break;
1953
1954 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001955 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001956 break;
1957
Shane Wangf1939f72009-09-02 20:05:22 +10001958 case 109:
Eric Biggers0917b872018-06-18 10:22:40 -07001959 ret += tcrypt_test("vmac64(aes)");
Shane Wangf1939f72009-09-02 20:05:22 +10001960 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001961
raveendra padasalagi98eca722016-07-01 11:16:54 +05301962 case 111:
1963 ret += tcrypt_test("hmac(sha3-224)");
1964 break;
1965
1966 case 112:
1967 ret += tcrypt_test("hmac(sha3-256)");
1968 break;
1969
1970 case 113:
1971 ret += tcrypt_test("hmac(sha3-384)");
1972 break;
1973
1974 case 114:
1975 ret += tcrypt_test("hmac(sha3-512)");
1976 break;
1977
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001978 case 115:
1979 ret += tcrypt_test("hmac(streebog256)");
1980 break;
1981
1982 case 116:
1983 ret += tcrypt_test("hmac(streebog512)");
1984 break;
1985
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001986 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001987 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001988 break;
1989
Adrian Hoban69435b92010-11-04 15:02:04 -04001990 case 151:
1991 ret += tcrypt_test("rfc4106(gcm(aes))");
1992 break;
1993
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001994 case 152:
1995 ret += tcrypt_test("rfc4543(gcm(aes))");
1996 break;
1997
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001998 case 153:
1999 ret += tcrypt_test("cmac(aes)");
2000 break;
2001
2002 case 154:
2003 ret += tcrypt_test("cmac(des3_ede)");
2004 break;
2005
Horia Geantabbf9c892013-11-28 15:11:16 +02002006 case 155:
2007 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
2008 break;
2009
Horia Geantabca4feb2014-03-14 17:46:51 +02002010 case 156:
2011 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
2012 break;
2013
2014 case 157:
2015 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
2016 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05302017 case 181:
2018 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
2019 break;
2020 case 182:
2021 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
2022 break;
2023 case 183:
2024 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
2025 break;
2026 case 184:
2027 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
2028 break;
2029 case 185:
2030 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
2031 break;
2032 case 186:
2033 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
2034 break;
2035 case 187:
2036 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
2037 break;
2038 case 188:
2039 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
2040 break;
2041 case 189:
2042 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
2043 break;
2044 case 190:
2045 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
2046 break;
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002047 case 191:
2048 ret += tcrypt_test("ecb(sm4)");
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002049 ret += tcrypt_test("cbc(sm4)");
2050 ret += tcrypt_test("ctr(sm4)");
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002051 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07002052 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10002053 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002054 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002055 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002056 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002057 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002058 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002059 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002060 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11002061 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002062 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11002063 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002064 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08002065 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002066 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08002067 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002068 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002069 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2070 speed_template_16_24_32);
2071 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2072 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10002073 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2074 speed_template_16_24_32);
2075 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2076 speed_template_16_24_32);
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03002077 test_cipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2078 speed_template_16_24_32);
2079 test_cipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2080 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002081 break;
2082
2083 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10002084 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002085 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002086 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002087 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002088 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002089 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002090 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002091 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002092 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002093 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002094 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002095 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03002096 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
2097 des3_speed_template, DES3_SPEED_VECTORS,
2098 speed_template_24);
2099 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
2100 des3_speed_template, DES3_SPEED_VECTORS,
2101 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002102 break;
2103
2104 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10002105 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002106 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002107 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002108 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002109 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002110 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002111 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002112 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03002113 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2114 speed_template_16_24_32);
2115 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2116 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03002117 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2118 speed_template_32_40_48);
2119 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2120 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03002121 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2122 speed_template_32_48_64);
2123 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2124 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002125 break;
2126
2127 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10002128 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002129 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002130 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002131 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002132 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002133 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002134 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002135 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03002136 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2137 speed_template_8_32);
2138 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2139 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002140 break;
2141
2142 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10002143 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002144 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002145 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002146 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002147 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002148 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002149 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002150 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002151 break;
2152
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002153 case 205:
2154 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002155 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002156 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002157 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002158 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002159 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002160 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002161 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02002162 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2163 speed_template_16_24_32);
2164 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2165 speed_template_16_24_32);
2166 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2167 speed_template_32_40_48);
2168 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2169 speed_template_32_40_48);
2170 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2171 speed_template_32_48_64);
2172 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2173 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002174 break;
2175
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002176 case 206:
2177 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002178 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08002179 break;
2180
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002181 case 207:
2182 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2183 speed_template_16_32);
2184 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2185 speed_template_16_32);
2186 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2187 speed_template_16_32);
2188 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2189 speed_template_16_32);
2190 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2191 speed_template_16_32);
2192 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2193 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002194 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2195 speed_template_32_48);
2196 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2197 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002198 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2199 speed_template_32_64);
2200 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2201 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002202 break;
2203
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002204 case 208:
2205 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2206 speed_template_8);
2207 break;
2208
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002209 case 209:
2210 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2211 speed_template_8_16);
2212 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2213 speed_template_8_16);
2214 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2215 speed_template_8_16);
2216 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2217 speed_template_8_16);
2218 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2219 speed_template_8_16);
2220 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2221 speed_template_8_16);
2222 break;
2223
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002224 case 210:
2225 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2226 speed_template_16_32);
2227 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2228 speed_template_16_32);
2229 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2230 speed_template_16_32);
2231 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2232 speed_template_16_32);
2233 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2234 speed_template_16_32);
2235 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2236 speed_template_16_32);
2237 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2238 speed_template_32_48);
2239 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2240 speed_template_32_48);
2241 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2242 speed_template_32_64);
2243 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2244 speed_template_32_64);
2245 break;
2246
Tim Chen53f52d72013-12-11 14:28:47 -08002247 case 211:
2248 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002249 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05302250 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01002251 NULL, 0, 16, 8, speed_template_16_24_32);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002252 test_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec,
2253 NULL, 0, 16, 16, aead_speed_template_20);
2254 test_aead_speed("gcm(aes)", DECRYPT, sec,
2255 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08002256 break;
2257
Herbert Xu4e4aab62015-06-17 14:04:21 +08002258 case 212:
2259 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002260 NULL, 0, 16, 16, aead_speed_template_19);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002261 test_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec,
2262 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08002263 break;
2264
Martin Willi2dce0632015-07-16 19:13:59 +02002265 case 213:
2266 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
2267 NULL, 0, 16, 8, aead_speed_template_36);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002268 test_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT, sec,
2269 NULL, 0, 16, 8, aead_speed_template_36);
Martin Willi2dce0632015-07-16 19:13:59 +02002270 break;
2271
2272 case 214:
2273 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
2274 speed_template_32);
2275 break;
2276
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +00002277 case 215:
2278 test_mb_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec, NULL,
2279 0, 16, 16, aead_speed_template_20, num_mb);
2280 test_mb_aead_speed("gcm(aes)", ENCRYPT, sec, NULL, 0, 16, 8,
2281 speed_template_16_24_32, num_mb);
2282 test_mb_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec, NULL,
2283 0, 16, 16, aead_speed_template_20, num_mb);
2284 test_mb_aead_speed("gcm(aes)", DECRYPT, sec, NULL, 0, 16, 8,
2285 speed_template_16_24_32, num_mb);
2286 break;
2287
2288 case 216:
2289 test_mb_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec, NULL, 0,
2290 16, 16, aead_speed_template_19, num_mb);
2291 test_mb_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec, NULL, 0,
2292 16, 16, aead_speed_template_19, num_mb);
2293 break;
2294
2295 case 217:
2296 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT,
2297 sec, NULL, 0, 16, 8, aead_speed_template_36,
2298 num_mb);
2299 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT,
2300 sec, NULL, 0, 16, 8, aead_speed_template_36,
2301 num_mb);
2302 break;
2303
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002304 case 218:
2305 test_cipher_speed("ecb(sm4)", ENCRYPT, sec, NULL, 0,
2306 speed_template_16);
2307 test_cipher_speed("ecb(sm4)", DECRYPT, sec, NULL, 0,
2308 speed_template_16);
2309 test_cipher_speed("cbc(sm4)", ENCRYPT, sec, NULL, 0,
2310 speed_template_16);
2311 test_cipher_speed("cbc(sm4)", DECRYPT, sec, NULL, 0,
2312 speed_template_16);
2313 test_cipher_speed("ctr(sm4)", ENCRYPT, sec, NULL, 0,
2314 speed_template_16);
2315 test_cipher_speed("ctr(sm4)", DECRYPT, sec, NULL, 0,
2316 speed_template_16);
2317 break;
Eric Biggers059c2a42018-11-16 17:26:31 -08002318
2319 case 219:
2320 test_cipher_speed("adiantum(xchacha12,aes)", ENCRYPT, sec, NULL,
2321 0, speed_template_32);
2322 test_cipher_speed("adiantum(xchacha12,aes)", DECRYPT, sec, NULL,
2323 0, speed_template_32);
2324 test_cipher_speed("adiantum(xchacha20,aes)", ENCRYPT, sec, NULL,
2325 0, speed_template_32);
2326 test_cipher_speed("adiantum(xchacha20,aes)", DECRYPT, sec, NULL,
2327 0, speed_template_32);
2328 break;
2329
Michal Ludvige8057922006-05-30 22:04:19 +10002330 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08002331 if (alg) {
2332 test_hash_speed(alg, sec, generic_hash_speed_template);
2333 break;
2334 }
Michal Ludvige8057922006-05-30 22:04:19 +10002335 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002336 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10002337 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002338 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002339 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002340 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10002341 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002342 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002343 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002344 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10002345 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002346 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002347 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002348 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10002349 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002350 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002351 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002352 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10002353 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002354 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002355 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002356 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10002357 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002358 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002359 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002360 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10002361 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002362 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002363 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002364 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10002365 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002366 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002367 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002368 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10002369 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002370 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002371 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002372 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10002373 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002374 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002375 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002376 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10002377 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002378 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002379 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002380 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10002381 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002382 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002383 /* fall through */
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08002384 case 313:
2385 test_hash_speed("sha224", sec, generic_hash_speed_template);
2386 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002387 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002388 case 314:
2389 test_hash_speed("rmd128", sec, generic_hash_speed_template);
2390 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002391 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002392 case 315:
2393 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2394 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002395 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002396 case 316:
2397 test_hash_speed("rmd256", sec, generic_hash_speed_template);
2398 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002399 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08002400 case 317:
2401 test_hash_speed("rmd320", sec, generic_hash_speed_template);
2402 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002403 /* fall through */
Huang Ying18bcc912010-03-10 18:30:32 +08002404 case 318:
2405 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
2406 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002407 /* fall through */
Tim Chene3899e42012-09-27 15:44:24 -07002408 case 319:
2409 test_hash_speed("crc32c", sec, generic_hash_speed_template);
2410 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002411 /* fall through */
Herbert Xu684115212013-09-07 12:56:26 +10002412 case 320:
2413 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
2414 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002415 /* fall through */
Martin Willi2dce0632015-07-16 19:13:59 +02002416 case 321:
2417 test_hash_speed("poly1305", sec, poly1305_speed_template);
2418 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002419 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302420 case 322:
2421 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
2422 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002423 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302424 case 323:
2425 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
2426 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002427 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302428 case 324:
2429 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
2430 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002431 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302432 case 325:
2433 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
2434 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002435 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002436 case 326:
2437 test_hash_speed("sm3", sec, generic_hash_speed_template);
2438 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002439 /* fall through */
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002440 case 327:
2441 test_hash_speed("streebog256", sec,
2442 generic_hash_speed_template);
2443 if (mode > 300 && mode < 400) break;
2444 /* fall through */
2445 case 328:
2446 test_hash_speed("streebog512", sec,
2447 generic_hash_speed_template);
2448 if (mode > 300 && mode < 400) break;
2449 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10002450 case 399:
2451 break;
2452
David S. Millerbeb63da72010-05-19 14:11:21 +10002453 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08002454 if (alg) {
2455 test_ahash_speed(alg, sec, generic_hash_speed_template);
2456 break;
2457 }
David S. Millerbeb63da72010-05-19 14:11:21 +10002458 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002459 case 401:
2460 test_ahash_speed("md4", sec, generic_hash_speed_template);
2461 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002462 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002463 case 402:
2464 test_ahash_speed("md5", sec, generic_hash_speed_template);
2465 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002466 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002467 case 403:
2468 test_ahash_speed("sha1", sec, generic_hash_speed_template);
2469 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002470 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002471 case 404:
2472 test_ahash_speed("sha256", sec, generic_hash_speed_template);
2473 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002474 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002475 case 405:
2476 test_ahash_speed("sha384", sec, generic_hash_speed_template);
2477 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002478 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002479 case 406:
2480 test_ahash_speed("sha512", sec, generic_hash_speed_template);
2481 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002482 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002483 case 407:
2484 test_ahash_speed("wp256", sec, generic_hash_speed_template);
2485 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002486 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002487 case 408:
2488 test_ahash_speed("wp384", sec, generic_hash_speed_template);
2489 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002490 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002491 case 409:
2492 test_ahash_speed("wp512", sec, generic_hash_speed_template);
2493 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002494 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002495 case 410:
2496 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
2497 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002498 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002499 case 411:
2500 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
2501 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002502 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002503 case 412:
2504 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
2505 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002506 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002507 case 413:
2508 test_ahash_speed("sha224", sec, generic_hash_speed_template);
2509 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002510 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002511 case 414:
2512 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
2513 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002514 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002515 case 415:
2516 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
2517 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002518 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002519 case 416:
2520 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
2521 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002522 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002523 case 417:
2524 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
2525 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002526 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302527 case 418:
2528 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
2529 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002530 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302531 case 419:
2532 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
2533 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002534 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302535 case 420:
2536 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
2537 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002538 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302539 case 421:
2540 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
2541 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002542 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002543 case 422:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002544 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template,
2545 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002546 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002547 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07002548 case 423:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002549 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template,
2550 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002551 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002552 /* fall through */
Megha Dey14009c42016-06-27 10:20:09 -07002553 case 424:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002554 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template,
2555 num_mb);
Megha Dey14009c42016-06-27 10:20:09 -07002556 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002557 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002558 case 425:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002559 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template,
2560 num_mb);
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002561 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05002562 /* fall through */
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002563 case 426:
2564 test_mb_ahash_speed("streebog256", sec,
2565 generic_hash_speed_template, num_mb);
2566 if (mode > 400 && mode < 500) break;
2567 /* fall through */
2568 case 427:
2569 test_mb_ahash_speed("streebog512", sec,
2570 generic_hash_speed_template, num_mb);
2571 if (mode > 400 && mode < 500) break;
2572 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10002573 case 499:
2574 break;
2575
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002576 case 500:
2577 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2578 speed_template_16_24_32);
2579 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2580 speed_template_16_24_32);
2581 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2582 speed_template_16_24_32);
2583 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2584 speed_template_16_24_32);
2585 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2586 speed_template_32_40_48);
2587 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2588 speed_template_32_40_48);
2589 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002590 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002591 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002592 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002593 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2594 speed_template_16_24_32);
2595 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2596 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002597 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2598 speed_template_16_24_32);
2599 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2600 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02002601 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2602 speed_template_16_24_32);
2603 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2604 speed_template_16_24_32);
2605 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2606 speed_template_16_24_32);
2607 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2608 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02002609 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
2610 speed_template_20_28_36);
2611 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
2612 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002613 break;
2614
2615 case 501:
2616 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2617 des3_speed_template, DES3_SPEED_VECTORS,
2618 speed_template_24);
2619 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2620 des3_speed_template, DES3_SPEED_VECTORS,
2621 speed_template_24);
2622 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2623 des3_speed_template, DES3_SPEED_VECTORS,
2624 speed_template_24);
2625 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2626 des3_speed_template, DES3_SPEED_VECTORS,
2627 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002628 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2629 des3_speed_template, DES3_SPEED_VECTORS,
2630 speed_template_24);
2631 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2632 des3_speed_template, DES3_SPEED_VECTORS,
2633 speed_template_24);
2634 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2635 des3_speed_template, DES3_SPEED_VECTORS,
2636 speed_template_24);
2637 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2638 des3_speed_template, DES3_SPEED_VECTORS,
2639 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002640 break;
2641
2642 case 502:
2643 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2644 speed_template_8);
2645 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2646 speed_template_8);
2647 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2648 speed_template_8);
2649 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2650 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002651 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2652 speed_template_8);
2653 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2654 speed_template_8);
2655 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2656 speed_template_8);
2657 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2658 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002659 break;
2660
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002661 case 503:
2662 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2663 speed_template_16_32);
2664 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2665 speed_template_16_32);
2666 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2667 speed_template_16_32);
2668 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2669 speed_template_16_32);
2670 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2671 speed_template_16_32);
2672 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2673 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002674 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2675 speed_template_32_48);
2676 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2677 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002678 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2679 speed_template_32_64);
2680 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2681 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002682 break;
2683
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002684 case 504:
2685 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2686 speed_template_16_24_32);
2687 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2688 speed_template_16_24_32);
2689 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2690 speed_template_16_24_32);
2691 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2692 speed_template_16_24_32);
2693 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2694 speed_template_16_24_32);
2695 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2696 speed_template_16_24_32);
2697 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2698 speed_template_32_40_48);
2699 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2700 speed_template_32_40_48);
2701 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2702 speed_template_32_48_64);
2703 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2704 speed_template_32_48_64);
2705 break;
2706
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002707 case 505:
2708 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2709 speed_template_8);
2710 break;
2711
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002712 case 506:
2713 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2714 speed_template_8_16);
2715 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2716 speed_template_8_16);
2717 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2718 speed_template_8_16);
2719 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2720 speed_template_8_16);
2721 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2722 speed_template_8_16);
2723 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2724 speed_template_8_16);
2725 break;
2726
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002727 case 507:
2728 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2729 speed_template_16_32);
2730 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2731 speed_template_16_32);
2732 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2733 speed_template_16_32);
2734 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2735 speed_template_16_32);
2736 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2737 speed_template_16_32);
2738 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2739 speed_template_16_32);
2740 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2741 speed_template_32_48);
2742 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2743 speed_template_32_48);
2744 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2745 speed_template_32_64);
2746 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2747 speed_template_32_64);
2748 break;
2749
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002750 case 508:
2751 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2752 speed_template_16_32);
2753 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2754 speed_template_16_32);
2755 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2756 speed_template_16_32);
2757 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2758 speed_template_16_32);
2759 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2760 speed_template_16_32);
2761 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2762 speed_template_16_32);
2763 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2764 speed_template_32_48);
2765 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2766 speed_template_32_48);
2767 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2768 speed_template_32_64);
2769 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2770 speed_template_32_64);
2771 break;
2772
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002773 case 509:
2774 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2775 speed_template_8_32);
2776 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2777 speed_template_8_32);
2778 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2779 speed_template_8_32);
2780 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2781 speed_template_8_32);
2782 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2783 speed_template_8_32);
2784 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2785 speed_template_8_32);
2786 break;
2787
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00002788 case 600:
2789 test_mb_skcipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2790 speed_template_16_24_32, num_mb);
2791 test_mb_skcipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2792 speed_template_16_24_32, num_mb);
2793 test_mb_skcipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2794 speed_template_16_24_32, num_mb);
2795 test_mb_skcipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2796 speed_template_16_24_32, num_mb);
2797 test_mb_skcipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2798 speed_template_32_40_48, num_mb);
2799 test_mb_skcipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2800 speed_template_32_40_48, num_mb);
2801 test_mb_skcipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2802 speed_template_32_64, num_mb);
2803 test_mb_skcipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2804 speed_template_32_64, num_mb);
2805 test_mb_skcipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2806 speed_template_16_24_32, num_mb);
2807 test_mb_skcipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2808 speed_template_16_24_32, num_mb);
2809 test_mb_skcipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2810 speed_template_16_24_32, num_mb);
2811 test_mb_skcipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2812 speed_template_16_24_32, num_mb);
2813 test_mb_skcipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2814 speed_template_16_24_32, num_mb);
2815 test_mb_skcipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2816 speed_template_16_24_32, num_mb);
2817 test_mb_skcipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2818 speed_template_16_24_32, num_mb);
2819 test_mb_skcipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2820 speed_template_16_24_32, num_mb);
2821 test_mb_skcipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL,
2822 0, speed_template_20_28_36, num_mb);
2823 test_mb_skcipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL,
2824 0, speed_template_20_28_36, num_mb);
2825 break;
2826
2827 case 601:
2828 test_mb_skcipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2829 des3_speed_template, DES3_SPEED_VECTORS,
2830 speed_template_24, num_mb);
2831 test_mb_skcipher_speed("ecb(des3_ede)", DECRYPT, sec,
2832 des3_speed_template, DES3_SPEED_VECTORS,
2833 speed_template_24, num_mb);
2834 test_mb_skcipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2835 des3_speed_template, DES3_SPEED_VECTORS,
2836 speed_template_24, num_mb);
2837 test_mb_skcipher_speed("cbc(des3_ede)", DECRYPT, sec,
2838 des3_speed_template, DES3_SPEED_VECTORS,
2839 speed_template_24, num_mb);
2840 test_mb_skcipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2841 des3_speed_template, DES3_SPEED_VECTORS,
2842 speed_template_24, num_mb);
2843 test_mb_skcipher_speed("cfb(des3_ede)", DECRYPT, sec,
2844 des3_speed_template, DES3_SPEED_VECTORS,
2845 speed_template_24, num_mb);
2846 test_mb_skcipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2847 des3_speed_template, DES3_SPEED_VECTORS,
2848 speed_template_24, num_mb);
2849 test_mb_skcipher_speed("ofb(des3_ede)", DECRYPT, sec,
2850 des3_speed_template, DES3_SPEED_VECTORS,
2851 speed_template_24, num_mb);
2852 break;
2853
2854 case 602:
2855 test_mb_skcipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2856 speed_template_8, num_mb);
2857 test_mb_skcipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2858 speed_template_8, num_mb);
2859 test_mb_skcipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2860 speed_template_8, num_mb);
2861 test_mb_skcipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2862 speed_template_8, num_mb);
2863 test_mb_skcipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2864 speed_template_8, num_mb);
2865 test_mb_skcipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2866 speed_template_8, num_mb);
2867 test_mb_skcipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2868 speed_template_8, num_mb);
2869 test_mb_skcipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2870 speed_template_8, num_mb);
2871 break;
2872
2873 case 603:
2874 test_mb_skcipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2875 speed_template_16_32, num_mb);
2876 test_mb_skcipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2877 speed_template_16_32, num_mb);
2878 test_mb_skcipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2879 speed_template_16_32, num_mb);
2880 test_mb_skcipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2881 speed_template_16_32, num_mb);
2882 test_mb_skcipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2883 speed_template_16_32, num_mb);
2884 test_mb_skcipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2885 speed_template_16_32, num_mb);
2886 test_mb_skcipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2887 speed_template_32_48, num_mb);
2888 test_mb_skcipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2889 speed_template_32_48, num_mb);
2890 test_mb_skcipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2891 speed_template_32_64, num_mb);
2892 test_mb_skcipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2893 speed_template_32_64, num_mb);
2894 break;
2895
2896 case 604:
2897 test_mb_skcipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2898 speed_template_16_24_32, num_mb);
2899 test_mb_skcipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2900 speed_template_16_24_32, num_mb);
2901 test_mb_skcipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2902 speed_template_16_24_32, num_mb);
2903 test_mb_skcipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2904 speed_template_16_24_32, num_mb);
2905 test_mb_skcipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2906 speed_template_16_24_32, num_mb);
2907 test_mb_skcipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2908 speed_template_16_24_32, num_mb);
2909 test_mb_skcipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2910 speed_template_32_40_48, num_mb);
2911 test_mb_skcipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2912 speed_template_32_40_48, num_mb);
2913 test_mb_skcipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2914 speed_template_32_48_64, num_mb);
2915 test_mb_skcipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2916 speed_template_32_48_64, num_mb);
2917 break;
2918
2919 case 605:
2920 test_mb_skcipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2921 speed_template_8, num_mb);
2922 break;
2923
2924 case 606:
2925 test_mb_skcipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2926 speed_template_8_16, num_mb);
2927 test_mb_skcipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2928 speed_template_8_16, num_mb);
2929 test_mb_skcipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2930 speed_template_8_16, num_mb);
2931 test_mb_skcipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2932 speed_template_8_16, num_mb);
2933 test_mb_skcipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2934 speed_template_8_16, num_mb);
2935 test_mb_skcipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2936 speed_template_8_16, num_mb);
2937 break;
2938
2939 case 607:
2940 test_mb_skcipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2941 speed_template_16_32, num_mb);
2942 test_mb_skcipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2943 speed_template_16_32, num_mb);
2944 test_mb_skcipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2945 speed_template_16_32, num_mb);
2946 test_mb_skcipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2947 speed_template_16_32, num_mb);
2948 test_mb_skcipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2949 speed_template_16_32, num_mb);
2950 test_mb_skcipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2951 speed_template_16_32, num_mb);
2952 test_mb_skcipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2953 speed_template_32_48, num_mb);
2954 test_mb_skcipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2955 speed_template_32_48, num_mb);
2956 test_mb_skcipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2957 speed_template_32_64, num_mb);
2958 test_mb_skcipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2959 speed_template_32_64, num_mb);
2960 break;
2961
2962 case 608:
2963 test_mb_skcipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2964 speed_template_16_32, num_mb);
2965 test_mb_skcipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2966 speed_template_16_32, num_mb);
2967 test_mb_skcipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2968 speed_template_16_32, num_mb);
2969 test_mb_skcipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2970 speed_template_16_32, num_mb);
2971 test_mb_skcipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2972 speed_template_16_32, num_mb);
2973 test_mb_skcipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2974 speed_template_16_32, num_mb);
2975 test_mb_skcipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2976 speed_template_32_48, num_mb);
2977 test_mb_skcipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2978 speed_template_32_48, num_mb);
2979 test_mb_skcipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2980 speed_template_32_64, num_mb);
2981 test_mb_skcipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2982 speed_template_32_64, num_mb);
2983 break;
2984
2985 case 609:
2986 test_mb_skcipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2987 speed_template_8_32, num_mb);
2988 test_mb_skcipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2989 speed_template_8_32, num_mb);
2990 test_mb_skcipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2991 speed_template_8_32, num_mb);
2992 test_mb_skcipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2993 speed_template_8_32, num_mb);
2994 test_mb_skcipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2995 speed_template_8_32, num_mb);
2996 test_mb_skcipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2997 speed_template_8_32, num_mb);
2998 break;
2999
Linus Torvalds1da177e2005-04-16 15:20:36 -07003000 case 1000:
3001 test_available();
3002 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003003 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10003004
3005 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003006}
3007
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003008static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07003009{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003010 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08003011 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003012
Herbert Xuf139cfa2008-07-31 12:23:53 +08003013 for (i = 0; i < TVMEMSIZE; i++) {
3014 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
3015 if (!tvmem[i])
3016 goto err_free_tv;
3017 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003018
Kees Cook4e234ee2018-04-26 19:57:28 -07003019 err = do_test(alg, type, mask, mode, num_mb);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003020
Jarod Wilson4e033a62009-05-27 15:10:21 +10003021 if (err) {
3022 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3023 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01003024 } else {
3025 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10003026 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003027
Jarod Wilson4e033a62009-05-27 15:10:21 +10003028 /* We intentionaly return -EAGAIN to prevent keeping the module,
3029 * unless we're running in fips mode. It does all its work from
3030 * init() and doesn't offer any runtime functionality, but in
3031 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10003032 * => we don't need it in the memory, do we?
3033 * -- mludvig
3034 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10003035 if (!fips_enabled)
3036 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003037
Herbert Xuf139cfa2008-07-31 12:23:53 +08003038err_free_tv:
3039 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
3040 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003041
3042 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003043}
3044
3045/*
3046 * If an init function is provided, an exit function must also be provided
3047 * to allow module unload.
3048 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003049static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003050
Eric Biggersc4741b22019-04-11 21:57:42 -07003051subsys_initcall(tcrypt_mod_init);
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003052module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003053
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003054module_param(alg, charp, 0);
3055module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08003056module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003057module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07003058module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07003059MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3060 "(defaults to zero which uses CPU cycles instead)");
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00003061module_param(num_mb, uint, 0000);
3062MODULE_PARM_DESC(num_mb, "Number of concurrent requests to be used in mb speed tests (defaults to 8)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07003063
3064MODULE_LICENSE("GPL");
3065MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3066MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");