blob: 00149657a4bc169c90951a95297c3e7394ca397a [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Herbert Xuef2736f2005-06-22 13:26:03 -07002/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07003 * Quick & dirty crypto testing module.
4 *
5 * This will only exist until we have a better testing mechanism
6 * (e.g. a char device).
7 *
8 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
9 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +080010 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070011 *
Adrian Hoban69435b92010-11-04 15:02:04 -040012 * Updated RFC4106 AES-GCM testing.
13 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Adrian Hoban <adrian.hoban@intel.com>
15 * Gabriele Paoloni <gabriele.paoloni@intel.com>
16 * Tadeusz Struk (tadeusz.struk@intel.com)
17 * Copyright (c) 2010, Intel Corporation.
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 */
19
Rabin Vincent76512f22017-01-18 14:54:05 +010020#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
21
Herbert Xu1ce5a042015-04-22 15:06:30 +080022#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080023#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080024#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100025#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080026#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070027#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090028#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100030#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070031#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070033#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070034#include <linux/timex.h>
35#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include "tcrypt.h"
37
38/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080039 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070040 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080041#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070042
43/*
Herbert Xuda7f0332008-07-31 17:08:25 +080044* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070045*/
46#define ENCRYPT 1
47#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070048
Horia Geant?f074f7b2015-08-27 18:38:36 +030049#define MAX_DIGEST_SIZE 64
50
Harald Welteebfd9bc2005-06-22 13:27:23 -070051/*
Luca Clementi263a8df2014-06-25 22:57:42 -070052 * return a string with the driver name
53 */
54#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
55
56/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070057 * Used by test_cipher_speed()
58 */
Herbert Xu6a179442005-06-22 13:29:03 -070059static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070060
Steffen Klasserta873a5f2009-06-19 19:46:53 +080061static char *alg = NULL;
62static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080063static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070064static int mode;
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +000065static u32 num_mb = 8;
Herbert Xuba974ad2020-08-05 15:57:08 +100066static unsigned int klen;
Herbert Xuf139cfa2008-07-31 12:23:53 +080067static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070068
Corentin Labbe07d8f182019-11-08 15:42:13 +000069static const char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030070 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080071 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
72 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
Ard Biesheuvel784506a2021-02-01 19:02:30 +010073 "khazad", "wp512", "wp384", "wp256", "xeta", "fcrypt",
Ard Biesheuvel663f63e2021-01-21 14:07:33 +010074 "camellia", "seed", "rmd160",
Dave Rodgman45ec9752019-03-07 16:30:44 -080075 "lzo", "lzo-rle", "cts", "sha3-224", "sha3-256", "sha3-384",
76 "sha3-512", "streebog256", "streebog512",
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +030077 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070078};
79
Tianjia Zhanga7fc80b2021-07-20 11:46:42 +080080static const int block_sizes[] = { 16, 64, 128, 256, 1024, 1420, 4096, 0 };
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +010081static const int aead_sizes[] = { 16, 64, 256, 512, 1024, 1420, 4096, 8192, 0 };
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +000082
83#define XBUFSIZE 8
84#define MAX_IVLEN 32
85
86static int testmgr_alloc_buf(char *buf[XBUFSIZE])
87{
88 int i;
89
90 for (i = 0; i < XBUFSIZE; i++) {
91 buf[i] = (void *)__get_free_page(GFP_KERNEL);
92 if (!buf[i])
93 goto err_free_buf;
94 }
95
96 return 0;
97
98err_free_buf:
99 while (i-- > 0)
100 free_page((unsigned long)buf[i]);
101
102 return -ENOMEM;
103}
104
105static void testmgr_free_buf(char *buf[XBUFSIZE])
106{
107 int i;
108
109 for (i = 0; i < XBUFSIZE; i++)
110 free_page((unsigned long)buf[i]);
111}
112
113static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
114 unsigned int buflen, const void *assoc,
115 unsigned int aad_size)
116{
117 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
118 int k, rem;
119
120 if (np > XBUFSIZE) {
121 rem = PAGE_SIZE;
122 np = XBUFSIZE;
123 } else {
124 rem = buflen % PAGE_SIZE;
125 }
126
127 sg_init_table(sg, np + 1);
128
129 sg_set_buf(&sg[0], assoc, aad_size);
130
131 if (rem)
132 np--;
133 for (k = 0; k < np; k++)
134 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
135
136 if (rem)
137 sg_set_buf(&sg[k + 1], xbuf[k], rem);
138}
139
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530140static inline int do_one_aead_op(struct aead_request *req, int ret)
141{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100142 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530143
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100144 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530145}
146
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000147struct test_mb_aead_data {
148 struct scatterlist sg[XBUFSIZE];
149 struct scatterlist sgout[XBUFSIZE];
150 struct aead_request *req;
151 struct crypto_wait wait;
152 char *xbuf[XBUFSIZE];
153 char *xoutbuf[XBUFSIZE];
154 char *axbuf[XBUFSIZE];
155};
156
157static int do_mult_aead_op(struct test_mb_aead_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -0700158 u32 num_mb, int *rc)
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000159{
Kees Cook4e234ee2018-04-26 19:57:28 -0700160 int i, err = 0;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000161
162 /* Fire up a bunch of concurrent requests */
163 for (i = 0; i < num_mb; i++) {
164 if (enc == ENCRYPT)
165 rc[i] = crypto_aead_encrypt(data[i].req);
166 else
167 rc[i] = crypto_aead_decrypt(data[i].req);
168 }
169
170 /* Wait for all requests to finish */
171 for (i = 0; i < num_mb; i++) {
172 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
173
174 if (rc[i]) {
175 pr_info("concurrent request %d error %d\n", i, rc[i]);
176 err = rc[i];
177 }
178 }
179
180 return err;
181}
182
183static int test_mb_aead_jiffies(struct test_mb_aead_data *data, int enc,
184 int blen, int secs, u32 num_mb)
185{
186 unsigned long start, end;
187 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700188 int ret = 0;
189 int *rc;
190
191 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
192 if (!rc)
193 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000194
195 for (start = jiffies, end = start + secs * HZ, bcount = 0;
196 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700197 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000198 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700199 goto out;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000200 }
201
Ard Biesheuvel303fd3e2020-12-08 15:34:41 +0100202 pr_cont("%d operations in %d seconds (%llu bytes)\n",
203 bcount * num_mb, secs, (u64)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700204
205out:
206 kfree(rc);
207 return ret;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000208}
209
210static int test_mb_aead_cycles(struct test_mb_aead_data *data, int enc,
211 int blen, u32 num_mb)
212{
213 unsigned long cycles = 0;
214 int ret = 0;
215 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700216 int *rc;
217
218 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
219 if (!rc)
220 return -ENOMEM;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000221
222 /* Warm-up run. */
223 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700224 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000225 if (ret)
226 goto out;
227 }
228
229 /* The real thing. */
230 for (i = 0; i < 8; i++) {
231 cycles_t start, end;
232
233 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700234 ret = do_mult_aead_op(data, enc, num_mb, rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000235 end = get_cycles();
236
237 if (ret)
238 goto out;
239
240 cycles += end - start;
241 }
242
Kees Cook4e234ee2018-04-26 19:57:28 -0700243 pr_cont("1 operation in %lu cycles (%d bytes)\n",
244 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000245
Kees Cook4e234ee2018-04-26 19:57:28 -0700246out:
247 kfree(rc);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000248 return ret;
249}
250
251static void test_mb_aead_speed(const char *algo, int enc, int secs,
252 struct aead_speed_template *template,
253 unsigned int tcount, u8 authsize,
254 unsigned int aad_size, u8 *keysize, u32 num_mb)
255{
256 struct test_mb_aead_data *data;
257 struct crypto_aead *tfm;
258 unsigned int i, j, iv_len;
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100259 const int *b_size;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000260 const char *key;
261 const char *e;
262 void *assoc;
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000263 char *iv;
264 int ret;
265
266
267 if (aad_size >= PAGE_SIZE) {
268 pr_err("associate data length (%u) too big\n", aad_size);
269 return;
270 }
271
272 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
273 if (!iv)
274 return;
275
276 if (enc == ENCRYPT)
277 e = "encryption";
278 else
279 e = "decryption";
280
281 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
282 if (!data)
283 goto out_free_iv;
284
285 tfm = crypto_alloc_aead(algo, 0, 0);
286 if (IS_ERR(tfm)) {
287 pr_err("failed to load transform for %s: %ld\n",
288 algo, PTR_ERR(tfm));
289 goto out_free_data;
290 }
291
292 ret = crypto_aead_setauthsize(tfm, authsize);
Tianjia Zhang7b3d5262021-08-13 15:55:06 +0800293 if (ret) {
294 pr_err("alg: aead: Failed to setauthsize for %s: %d\n", algo,
295 ret);
296 goto out_free_tfm;
297 }
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000298
299 for (i = 0; i < num_mb; ++i)
300 if (testmgr_alloc_buf(data[i].xbuf)) {
301 while (i--)
302 testmgr_free_buf(data[i].xbuf);
303 goto out_free_tfm;
304 }
305
306 for (i = 0; i < num_mb; ++i)
307 if (testmgr_alloc_buf(data[i].axbuf)) {
308 while (i--)
309 testmgr_free_buf(data[i].axbuf);
310 goto out_free_xbuf;
311 }
312
313 for (i = 0; i < num_mb; ++i)
314 if (testmgr_alloc_buf(data[i].xoutbuf)) {
315 while (i--)
Colin Ian Kingc6ba4f32018-01-02 15:43:04 +0000316 testmgr_free_buf(data[i].xoutbuf);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000317 goto out_free_axbuf;
318 }
319
320 for (i = 0; i < num_mb; ++i) {
321 data[i].req = aead_request_alloc(tfm, GFP_KERNEL);
322 if (!data[i].req) {
Tianjia Zhang7b3d5262021-08-13 15:55:06 +0800323 pr_err("alg: aead: Failed to allocate request for %s\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000324 algo);
325 while (i--)
326 aead_request_free(data[i].req);
327 goto out_free_xoutbuf;
328 }
329 }
330
331 for (i = 0; i < num_mb; ++i) {
332 crypto_init_wait(&data[i].wait);
333 aead_request_set_callback(data[i].req,
334 CRYPTO_TFM_REQ_MAY_BACKLOG,
335 crypto_req_done, &data[i].wait);
336 }
337
338 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
339 get_driver_name(crypto_aead, tfm), e);
340
341 i = 0;
342 do {
343 b_size = aead_sizes;
344 do {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100345 int bs = round_up(*b_size, crypto_aead_blocksize(tfm));
346
347 if (bs + authsize > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +0000348 pr_err("template (%u) too big for buffer (%lu)\n",
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100349 authsize + bs,
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000350 XBUFSIZE * PAGE_SIZE);
351 goto out;
352 }
353
354 pr_info("test %u (%d bit key, %d byte blocks): ", i,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100355 *keysize * 8, bs);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000356
357 /* Set up tfm global state, i.e. the key */
358
359 memset(tvmem[0], 0xff, PAGE_SIZE);
360 key = tvmem[0];
361 for (j = 0; j < tcount; j++) {
362 if (template[j].klen == *keysize) {
363 key = template[j].key;
364 break;
365 }
366 }
367
368 crypto_aead_clear_flags(tfm, ~0);
369
370 ret = crypto_aead_setkey(tfm, key, *keysize);
371 if (ret) {
372 pr_err("setkey() failed flags=%x\n",
373 crypto_aead_get_flags(tfm));
374 goto out;
375 }
376
377 iv_len = crypto_aead_ivsize(tfm);
378 if (iv_len)
379 memset(iv, 0xff, iv_len);
380
381 /* Now setup per request stuff, i.e. buffers */
382
383 for (j = 0; j < num_mb; ++j) {
384 struct test_mb_aead_data *cur = &data[j];
385
386 assoc = cur->axbuf[0];
387 memset(assoc, 0xff, aad_size);
388
389 sg_init_aead(cur->sg, cur->xbuf,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100390 bs + (enc ? 0 : authsize),
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000391 assoc, aad_size);
392
393 sg_init_aead(cur->sgout, cur->xoutbuf,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100394 bs + (enc ? authsize : 0),
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000395 assoc, aad_size);
396
397 aead_request_set_ad(cur->req, aad_size);
398
399 if (!enc) {
400
401 aead_request_set_crypt(cur->req,
402 cur->sgout,
403 cur->sg,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100404 bs, iv);
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000405 ret = crypto_aead_encrypt(cur->req);
406 ret = do_one_aead_op(cur->req, ret);
407
408 if (ret) {
Randy Dunlap129a4db2020-07-30 19:39:24 -0700409 pr_err("calculating auth failed (%d)\n",
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000410 ret);
411 break;
412 }
413 }
414
415 aead_request_set_crypt(cur->req, cur->sg,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100416 cur->sgout, bs +
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000417 (enc ? 0 : authsize),
418 iv);
419
420 }
421
Horia Geantă2af63292018-07-23 17:18:48 +0300422 if (secs) {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100423 ret = test_mb_aead_jiffies(data, enc, bs,
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000424 secs, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300425 cond_resched();
426 } else {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100427 ret = test_mb_aead_cycles(data, enc, bs,
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000428 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300429 }
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +0000430
431 if (ret) {
432 pr_err("%s() failed return code=%d\n", e, ret);
433 break;
434 }
435 b_size++;
436 i++;
437 } while (*b_size);
438 keysize++;
439 } while (*keysize);
440
441out:
442 for (i = 0; i < num_mb; ++i)
443 aead_request_free(data[i].req);
444out_free_xoutbuf:
445 for (i = 0; i < num_mb; ++i)
446 testmgr_free_buf(data[i].xoutbuf);
447out_free_axbuf:
448 for (i = 0; i < num_mb; ++i)
449 testmgr_free_buf(data[i].axbuf);
450out_free_xbuf:
451 for (i = 0; i < num_mb; ++i)
452 testmgr_free_buf(data[i].xbuf);
453out_free_tfm:
454 crypto_free_aead(tfm);
455out_free_data:
456 kfree(data);
457out_free_iv:
458 kfree(iv);
459}
460
Tim Chen53f52d72013-12-11 14:28:47 -0800461static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700462 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800463{
464 unsigned long start, end;
465 int bcount;
466 int ret;
467
Mark Rustad3e3dc252014-07-25 02:53:38 -0700468 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800469 time_before(jiffies, end); bcount++) {
470 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530471 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800472 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530473 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800474
475 if (ret)
476 return ret;
477 }
478
Ard Biesheuvel303fd3e2020-12-08 15:34:41 +0100479 pr_cont("%d operations in %d seconds (%llu bytes)\n",
480 bcount, secs, (u64)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800481 return 0;
482}
483
484static int test_aead_cycles(struct aead_request *req, int enc, int blen)
485{
486 unsigned long cycles = 0;
487 int ret = 0;
488 int i;
489
Tim Chen53f52d72013-12-11 14:28:47 -0800490 /* Warm-up run. */
491 for (i = 0; i < 4; i++) {
492 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530493 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800494 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530495 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800496
497 if (ret)
498 goto out;
499 }
500
501 /* The real thing. */
502 for (i = 0; i < 8; i++) {
503 cycles_t start, end;
504
505 start = get_cycles();
506 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530507 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800508 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530509 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800510 end = get_cycles();
511
512 if (ret)
513 goto out;
514
515 cycles += end - start;
516 }
517
518out:
Tim Chen53f52d72013-12-11 14:28:47 -0800519 if (ret == 0)
520 printk("1 operation in %lu cycles (%d bytes)\n",
521 (cycles + 4) / 8, blen);
522
523 return ret;
524}
525
Mark Rustad3e3dc252014-07-25 02:53:38 -0700526static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800527 struct aead_speed_template *template,
528 unsigned int tcount, u8 authsize,
529 unsigned int aad_size, u8 *keysize)
530{
531 unsigned int i, j;
532 struct crypto_aead *tfm;
533 int ret = -ENOMEM;
534 const char *key;
535 struct aead_request *req;
536 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800537 struct scatterlist *sgout;
538 const char *e;
539 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200540 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800541 char *xbuf[XBUFSIZE];
542 char *xoutbuf[XBUFSIZE];
543 char *axbuf[XBUFSIZE];
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100544 const int *b_size;
Tim Chen53f52d72013-12-11 14:28:47 -0800545 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100546 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800547
Cristian Stoica96692a732015-01-28 13:07:32 +0200548 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
549 if (!iv)
550 return;
551
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200552 if (aad_size >= PAGE_SIZE) {
553 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200554 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200555 }
556
Tim Chen53f52d72013-12-11 14:28:47 -0800557 if (enc == ENCRYPT)
558 e = "encryption";
559 else
560 e = "decryption";
561
562 if (testmgr_alloc_buf(xbuf))
563 goto out_noxbuf;
564 if (testmgr_alloc_buf(axbuf))
565 goto out_noaxbuf;
566 if (testmgr_alloc_buf(xoutbuf))
567 goto out_nooutbuf;
568
Herbert Xua3f21852015-05-27 16:03:51 +0800569 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800570 if (!sg)
571 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800572 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800573
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800574 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800575 if (IS_ERR(tfm)) {
576 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
577 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200578 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800579 }
580
Tianjia Zhang7b3d5262021-08-13 15:55:06 +0800581 ret = crypto_aead_setauthsize(tfm, authsize);
582 if (ret) {
583 pr_err("alg: aead: Failed to setauthsize for %s: %d\n", algo,
584 ret);
585 goto out_noreq;
586 }
587
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100588 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700589 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
590 get_driver_name(crypto_aead, tfm), e);
591
Tim Chen53f52d72013-12-11 14:28:47 -0800592 req = aead_request_alloc(tfm, GFP_KERNEL);
593 if (!req) {
594 pr_err("alg: aead: Failed to allocate request for %s\n",
595 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200596 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800597 }
598
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530599 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100600 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530601
Tim Chen53f52d72013-12-11 14:28:47 -0800602 i = 0;
603 do {
604 b_size = aead_sizes;
605 do {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100606 u32 bs = round_up(*b_size, crypto_aead_blocksize(tfm));
607
Tim Chen53f52d72013-12-11 14:28:47 -0800608 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200609 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800610
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100611 if ((*keysize + bs) > TVMEMSIZE * PAGE_SIZE) {
Tim Chen53f52d72013-12-11 14:28:47 -0800612 pr_err("template (%u) too big for tvmem (%lu)\n",
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100613 *keysize + bs,
Tim Chen53f52d72013-12-11 14:28:47 -0800614 TVMEMSIZE * PAGE_SIZE);
615 goto out;
616 }
617
618 key = tvmem[0];
619 for (j = 0; j < tcount; j++) {
620 if (template[j].klen == *keysize) {
621 key = template[j].key;
622 break;
623 }
624 }
Tianjia Zhang7b3d5262021-08-13 15:55:06 +0800625
Tim Chen53f52d72013-12-11 14:28:47 -0800626 ret = crypto_aead_setkey(tfm, key, *keysize);
Tianjia Zhang7b3d5262021-08-13 15:55:06 +0800627 if (ret) {
628 pr_err("setkey() failed flags=%x: %d\n",
629 crypto_aead_get_flags(tfm), ret);
630 goto out;
631 }
Tim Chen53f52d72013-12-11 14:28:47 -0800632
633 iv_len = crypto_aead_ivsize(tfm);
634 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200635 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800636
637 crypto_aead_clear_flags(tfm, ~0);
638 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100639 i, *keysize * 8, bs);
Tim Chen53f52d72013-12-11 14:28:47 -0800640
Tim Chen53f52d72013-12-11 14:28:47 -0800641 memset(tvmem[0], 0xff, PAGE_SIZE);
642
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100643 sg_init_aead(sg, xbuf, bs + (enc ? 0 : authsize),
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200644 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800645
Herbert Xu31267272015-06-17 14:05:26 +0800646 sg_init_aead(sgout, xoutbuf,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100647 bs + (enc ? authsize : 0), assoc,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200648 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800649
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000650 aead_request_set_ad(req, aad_size);
651
652 if (!enc) {
653
654 /*
655 * For decryption we need a proper auth so
656 * we do the encryption path once with buffers
657 * reversed (input <-> output) to calculate it
658 */
659 aead_request_set_crypt(req, sgout, sg,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100660 bs, iv);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000661 ret = do_one_aead_op(req,
662 crypto_aead_encrypt(req));
663
664 if (ret) {
Randy Dunlap129a4db2020-07-30 19:39:24 -0700665 pr_err("calculating auth failed (%d)\n",
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +0000666 ret);
667 break;
668 }
669 }
670
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300671 aead_request_set_crypt(req, sg, sgout,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100672 bs + (enc ? 0 : authsize),
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300673 iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800674
Horia Geantă2af63292018-07-23 17:18:48 +0300675 if (secs) {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100676 ret = test_aead_jiffies(req, enc, bs,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700677 secs);
Horia Geantă2af63292018-07-23 17:18:48 +0300678 cond_resched();
679 } else {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +0100680 ret = test_aead_cycles(req, enc, bs);
Horia Geantă2af63292018-07-23 17:18:48 +0300681 }
Tim Chen53f52d72013-12-11 14:28:47 -0800682
683 if (ret) {
684 pr_err("%s() failed return code=%d\n", e, ret);
685 break;
686 }
687 b_size++;
688 i++;
689 } while (*b_size);
690 keysize++;
691 } while (*keysize);
692
693out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200694 aead_request_free(req);
695out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800696 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200697out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800698 kfree(sg);
699out_nosg:
700 testmgr_free_buf(xoutbuf);
701out_nooutbuf:
702 testmgr_free_buf(axbuf);
703out_noaxbuf:
704 testmgr_free_buf(xbuf);
705out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200706 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800707}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800708
David S. Millerbeb63da2010-05-19 14:11:21 +1000709static void test_hash_sg_init(struct scatterlist *sg)
710{
711 int i;
712
713 sg_init_table(sg, TVMEMSIZE);
714 for (i = 0; i < TVMEMSIZE; i++) {
715 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
716 memset(tvmem[i], 0xff, PAGE_SIZE);
717 }
718}
719
David S. Millerbeb63da2010-05-19 14:11:21 +1000720static inline int do_one_ahash_op(struct ahash_request *req, int ret)
721{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100722 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da2010-05-19 14:11:21 +1000723
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100724 return crypto_wait_req(ret, wait);
David S. Millerbeb63da2010-05-19 14:11:21 +1000725}
726
Herbert Xu72259de2016-06-28 20:33:52 +0800727struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000728 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800729 char result[64];
730 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100731 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800732 char *xbuf[XBUFSIZE];
733};
Megha Dey087bcd22016-06-23 18:40:47 -0700734
Kees Cook4e234ee2018-04-26 19:57:28 -0700735static inline int do_mult_ahash_op(struct test_mb_ahash_data *data, u32 num_mb,
736 int *rc)
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000737{
Kees Cook4e234ee2018-04-26 19:57:28 -0700738 int i, err = 0;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000739
740 /* Fire up a bunch of concurrent requests */
741 for (i = 0; i < num_mb; i++)
742 rc[i] = crypto_ahash_digest(data[i].req);
743
744 /* Wait for all requests to finish */
745 for (i = 0; i < num_mb; i++) {
746 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
747
748 if (rc[i]) {
749 pr_info("concurrent request %d error %d\n", i, rc[i]);
750 err = rc[i];
751 }
752 }
753
754 return err;
755}
756
757static int test_mb_ahash_jiffies(struct test_mb_ahash_data *data, int blen,
758 int secs, u32 num_mb)
759{
760 unsigned long start, end;
761 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -0700762 int ret = 0;
763 int *rc;
764
765 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
766 if (!rc)
767 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000768
769 for (start = jiffies, end = start + secs * HZ, bcount = 0;
770 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700771 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000772 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -0700773 goto out;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000774 }
775
Ard Biesheuvel303fd3e2020-12-08 15:34:41 +0100776 pr_cont("%d operations in %d seconds (%llu bytes)\n",
777 bcount * num_mb, secs, (u64)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -0700778
779out:
780 kfree(rc);
781 return ret;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000782}
783
784static int test_mb_ahash_cycles(struct test_mb_ahash_data *data, int blen,
785 u32 num_mb)
786{
787 unsigned long cycles = 0;
788 int ret = 0;
789 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -0700790 int *rc;
791
792 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
793 if (!rc)
794 return -ENOMEM;
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000795
796 /* Warm-up run. */
797 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -0700798 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000799 if (ret)
800 goto out;
801 }
802
803 /* The real thing. */
804 for (i = 0; i < 8; i++) {
805 cycles_t start, end;
806
807 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -0700808 ret = do_mult_ahash_op(data, num_mb, rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000809 end = get_cycles();
810
811 if (ret)
812 goto out;
813
814 cycles += end - start;
815 }
816
Kees Cook4e234ee2018-04-26 19:57:28 -0700817 pr_cont("1 operation in %lu cycles (%d bytes)\n",
818 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000819
Kees Cook4e234ee2018-04-26 19:57:28 -0700820out:
821 kfree(rc);
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000822 return ret;
823}
824
825static void test_mb_ahash_speed(const char *algo, unsigned int secs,
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000826 struct hash_speed *speed, u32 num_mb)
Megha Dey087bcd22016-06-23 18:40:47 -0700827{
Herbert Xu72259de2016-06-28 20:33:52 +0800828 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700829 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800830 unsigned int i, j, k;
831 int ret;
832
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000833 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
Herbert Xu72259de2016-06-28 20:33:52 +0800834 if (!data)
835 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700836
837 tfm = crypto_alloc_ahash(algo, 0, 0);
838 if (IS_ERR(tfm)) {
839 pr_err("failed to load transform for %s: %ld\n",
840 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800841 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700842 }
Herbert Xu72259de2016-06-28 20:33:52 +0800843
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000844 for (i = 0; i < num_mb; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800845 if (testmgr_alloc_buf(data[i].xbuf))
846 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700847
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100848 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700849
Herbert Xu72259de2016-06-28 20:33:52 +0800850 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
851 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200852 pr_err("alg: hash: Failed to allocate request for %s\n",
853 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800854 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700855 }
Megha Dey087bcd22016-06-23 18:40:47 -0700856
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100857 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
858 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000859
860 sg_init_table(data[i].sg, XBUFSIZE);
861 for (j = 0; j < XBUFSIZE; j++) {
862 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
863 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
864 }
Megha Dey087bcd22016-06-23 18:40:47 -0700865 }
866
Herbert Xu72259de2016-06-28 20:33:52 +0800867 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
868 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700869
870 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800871 /* For some reason this only tests digests. */
872 if (speed[i].blen != speed[i].plen)
873 continue;
874
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000875 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200876 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000877 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200878 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700879 }
880
Herbert Xuba974ad2020-08-05 15:57:08 +1000881 if (klen)
882 crypto_ahash_setkey(tfm, tvmem[0], klen);
Megha Dey087bcd22016-06-23 18:40:47 -0700883
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000884 for (k = 0; k < num_mb; k++)
Herbert Xu72259de2016-06-28 20:33:52 +0800885 ahash_request_set_crypt(data[k].req, data[k].sg,
886 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700887
Herbert Xu72259de2016-06-28 20:33:52 +0800888 pr_info("test%3u "
889 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700890 i, speed[i].blen, speed[i].plen,
891 speed[i].blen / speed[i].plen);
892
Horia Geantă2af63292018-07-23 17:18:48 +0300893 if (secs) {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000894 ret = test_mb_ahash_jiffies(data, speed[i].blen, secs,
895 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300896 cond_resched();
897 } else {
Gilad Ben-Yossefb34a0f672017-12-17 08:29:03 +0000898 ret = test_mb_ahash_cycles(data, speed[i].blen, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +0300899 }
Herbert Xu72259de2016-06-28 20:33:52 +0800900
Herbert Xu72259de2016-06-28 20:33:52 +0800901
902 if (ret) {
903 pr_err("At least one hashing failed ret=%d\n", ret);
904 break;
905 }
Megha Dey087bcd22016-06-23 18:40:47 -0700906 }
Megha Dey087bcd22016-06-23 18:40:47 -0700907
908out:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000909 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800910 ahash_request_free(data[k].req);
911
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +0000912 for (k = 0; k < num_mb; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800913 testmgr_free_buf(data[k].xbuf);
914
915 crypto_free_ahash(tfm);
916
917free_data:
918 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700919}
920
David S. Millerbeb63da2010-05-19 14:11:21 +1000921static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700922 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000923{
924 unsigned long start, end;
925 int bcount;
926 int ret;
927
Mark Rustad3e3dc252014-07-25 02:53:38 -0700928 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000929 time_before(jiffies, end); bcount++) {
930 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
931 if (ret)
932 return ret;
933 }
934
935 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700936 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000937
938 return 0;
939}
940
941static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700942 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000943{
944 unsigned long start, end;
945 int bcount, pcount;
946 int ret;
947
948 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700949 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000950
Mark Rustad3e3dc252014-07-25 02:53:38 -0700951 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000952 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800953 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000954 if (ret)
955 return ret;
956 for (pcount = 0; pcount < blen; pcount += plen) {
957 ret = do_one_ahash_op(req, crypto_ahash_update(req));
958 if (ret)
959 return ret;
960 }
961 /* we assume there is enough space in 'out' for the result */
962 ret = do_one_ahash_op(req, crypto_ahash_final(req));
963 if (ret)
964 return ret;
965 }
966
967 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700968 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000969
970 return 0;
971}
972
973static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
974 char *out)
975{
976 unsigned long cycles = 0;
977 int ret, i;
978
979 /* Warm-up run. */
980 for (i = 0; i < 4; i++) {
981 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
982 if (ret)
983 goto out;
984 }
985
986 /* The real thing. */
987 for (i = 0; i < 8; i++) {
988 cycles_t start, end;
989
990 start = get_cycles();
991
992 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
993 if (ret)
994 goto out;
995
996 end = get_cycles();
997
998 cycles += end - start;
999 }
1000
1001out:
1002 if (ret)
1003 return ret;
1004
1005 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1006 cycles / 8, cycles / (8 * blen));
1007
1008 return 0;
1009}
1010
1011static int test_ahash_cycles(struct ahash_request *req, int blen,
1012 int plen, char *out)
1013{
1014 unsigned long cycles = 0;
1015 int i, pcount, ret;
1016
1017 if (plen == blen)
1018 return test_ahash_cycles_digest(req, blen, out);
1019
1020 /* Warm-up run. */
1021 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +08001022 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001023 if (ret)
1024 goto out;
1025 for (pcount = 0; pcount < blen; pcount += plen) {
1026 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1027 if (ret)
1028 goto out;
1029 }
1030 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1031 if (ret)
1032 goto out;
1033 }
1034
1035 /* The real thing. */
1036 for (i = 0; i < 8; i++) {
1037 cycles_t start, end;
1038
1039 start = get_cycles();
1040
Herbert Xu43a96072015-04-22 11:02:27 +08001041 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +10001042 if (ret)
1043 goto out;
1044 for (pcount = 0; pcount < blen; pcount += plen) {
1045 ret = do_one_ahash_op(req, crypto_ahash_update(req));
1046 if (ret)
1047 goto out;
1048 }
1049 ret = do_one_ahash_op(req, crypto_ahash_final(req));
1050 if (ret)
1051 goto out;
1052
1053 end = get_cycles();
1054
1055 cycles += end - start;
1056 }
1057
1058out:
1059 if (ret)
1060 return ret;
1061
1062 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
1063 cycles / 8, cycles / (8 * blen));
1064
1065 return 0;
1066}
1067
Herbert Xu06605112016-02-01 21:36:49 +08001068static void test_ahash_speed_common(const char *algo, unsigned int secs,
1069 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +10001070{
1071 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001072 struct crypto_wait wait;
David S. Millerbeb63da2010-05-19 14:11:21 +10001073 struct ahash_request *req;
1074 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +03001075 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +10001076 int i, ret;
1077
Herbert Xu06605112016-02-01 21:36:49 +08001078 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +10001079 if (IS_ERR(tfm)) {
1080 pr_err("failed to load transform for %s: %ld\n",
1081 algo, PTR_ERR(tfm));
1082 return;
1083 }
1084
Luca Clementi263a8df2014-06-25 22:57:42 -07001085 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
1086 get_driver_name(crypto_ahash, tfm));
1087
Horia Geant?f074f7b2015-08-27 18:38:36 +03001088 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
1089 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
1090 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +10001091 goto out;
1092 }
1093
1094 test_hash_sg_init(sg);
1095 req = ahash_request_alloc(tfm, GFP_KERNEL);
1096 if (!req) {
1097 pr_err("ahash request allocation failure\n");
1098 goto out;
1099 }
1100
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001101 crypto_init_wait(&wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001102 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001103 crypto_req_done, &wait);
David S. Millerbeb63da2010-05-19 14:11:21 +10001104
Horia Geant?f074f7b2015-08-27 18:38:36 +03001105 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
1106 if (!output)
1107 goto out_nomem;
1108
David S. Millerbeb63da2010-05-19 14:11:21 +10001109 for (i = 0; speed[i].blen != 0; i++) {
1110 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
1111 pr_err("template (%u) too big for tvmem (%lu)\n",
1112 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
1113 break;
1114 }
1115
Herbert Xuba974ad2020-08-05 15:57:08 +10001116 if (klen)
1117 crypto_ahash_setkey(tfm, tvmem[0], klen);
Horia Geantă331351f2018-09-12 16:20:48 +03001118
David S. Millerbeb63da2010-05-19 14:11:21 +10001119 pr_info("test%3u "
1120 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
1121 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
1122
1123 ahash_request_set_crypt(req, sg, output, speed[i].plen);
1124
Horia Geantă2af63292018-07-23 17:18:48 +03001125 if (secs) {
David S. Millerbeb63da2010-05-19 14:11:21 +10001126 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001127 speed[i].plen, output, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001128 cond_resched();
1129 } else {
David S. Millerbeb63da2010-05-19 14:11:21 +10001130 ret = test_ahash_cycles(req, speed[i].blen,
1131 speed[i].plen, output);
Horia Geantă2af63292018-07-23 17:18:48 +03001132 }
David S. Millerbeb63da2010-05-19 14:11:21 +10001133
1134 if (ret) {
1135 pr_err("hashing failed ret=%d\n", ret);
1136 break;
1137 }
1138 }
1139
Horia Geant?f074f7b2015-08-27 18:38:36 +03001140 kfree(output);
1141
1142out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +10001143 ahash_request_free(req);
1144
1145out:
1146 crypto_free_ahash(tfm);
1147}
1148
Herbert Xu06605112016-02-01 21:36:49 +08001149static void test_ahash_speed(const char *algo, unsigned int secs,
1150 struct hash_speed *speed)
1151{
1152 return test_ahash_speed_common(algo, secs, speed, 0);
1153}
1154
1155static void test_hash_speed(const char *algo, unsigned int secs,
1156 struct hash_speed *speed)
1157{
1158 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
1159}
1160
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001161struct test_mb_skcipher_data {
1162 struct scatterlist sg[XBUFSIZE];
1163 struct skcipher_request *req;
1164 struct crypto_wait wait;
1165 char *xbuf[XBUFSIZE];
1166};
1167
1168static int do_mult_acipher_op(struct test_mb_skcipher_data *data, int enc,
Kees Cook4e234ee2018-04-26 19:57:28 -07001169 u32 num_mb, int *rc)
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001170{
Kees Cook4e234ee2018-04-26 19:57:28 -07001171 int i, err = 0;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001172
1173 /* Fire up a bunch of concurrent requests */
1174 for (i = 0; i < num_mb; i++) {
1175 if (enc == ENCRYPT)
1176 rc[i] = crypto_skcipher_encrypt(data[i].req);
1177 else
1178 rc[i] = crypto_skcipher_decrypt(data[i].req);
1179 }
1180
1181 /* Wait for all requests to finish */
1182 for (i = 0; i < num_mb; i++) {
1183 rc[i] = crypto_wait_req(rc[i], &data[i].wait);
1184
1185 if (rc[i]) {
1186 pr_info("concurrent request %d error %d\n", i, rc[i]);
1187 err = rc[i];
1188 }
1189 }
1190
1191 return err;
1192}
1193
1194static int test_mb_acipher_jiffies(struct test_mb_skcipher_data *data, int enc,
1195 int blen, int secs, u32 num_mb)
1196{
1197 unsigned long start, end;
1198 int bcount;
Kees Cook4e234ee2018-04-26 19:57:28 -07001199 int ret = 0;
1200 int *rc;
1201
1202 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1203 if (!rc)
1204 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001205
1206 for (start = jiffies, end = start + secs * HZ, bcount = 0;
1207 time_before(jiffies, end); bcount++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001208 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001209 if (ret)
Kees Cook4e234ee2018-04-26 19:57:28 -07001210 goto out;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001211 }
1212
Ard Biesheuvel303fd3e2020-12-08 15:34:41 +01001213 pr_cont("%d operations in %d seconds (%llu bytes)\n",
1214 bcount * num_mb, secs, (u64)bcount * blen * num_mb);
Kees Cook4e234ee2018-04-26 19:57:28 -07001215
1216out:
1217 kfree(rc);
1218 return ret;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001219}
1220
1221static int test_mb_acipher_cycles(struct test_mb_skcipher_data *data, int enc,
1222 int blen, u32 num_mb)
1223{
1224 unsigned long cycles = 0;
1225 int ret = 0;
1226 int i;
Kees Cook4e234ee2018-04-26 19:57:28 -07001227 int *rc;
1228
1229 rc = kcalloc(num_mb, sizeof(*rc), GFP_KERNEL);
1230 if (!rc)
1231 return -ENOMEM;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001232
1233 /* Warm-up run. */
1234 for (i = 0; i < 4; i++) {
Kees Cook4e234ee2018-04-26 19:57:28 -07001235 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001236 if (ret)
1237 goto out;
1238 }
1239
1240 /* The real thing. */
1241 for (i = 0; i < 8; i++) {
1242 cycles_t start, end;
1243
1244 start = get_cycles();
Kees Cook4e234ee2018-04-26 19:57:28 -07001245 ret = do_mult_acipher_op(data, enc, num_mb, rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001246 end = get_cycles();
1247
1248 if (ret)
1249 goto out;
1250
1251 cycles += end - start;
1252 }
1253
Kees Cook4e234ee2018-04-26 19:57:28 -07001254 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1255 (cycles + 4) / (8 * num_mb), blen);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001256
Kees Cook4e234ee2018-04-26 19:57:28 -07001257out:
1258 kfree(rc);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001259 return ret;
1260}
1261
1262static void test_mb_skcipher_speed(const char *algo, int enc, int secs,
1263 struct cipher_speed_template *template,
1264 unsigned int tcount, u8 *keysize, u32 num_mb)
1265{
1266 struct test_mb_skcipher_data *data;
1267 struct crypto_skcipher *tfm;
1268 unsigned int i, j, iv_len;
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001269 const int *b_size;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001270 const char *key;
1271 const char *e;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001272 char iv[128];
1273 int ret;
1274
1275 if (enc == ENCRYPT)
1276 e = "encryption";
1277 else
1278 e = "decryption";
1279
1280 data = kcalloc(num_mb, sizeof(*data), GFP_KERNEL);
1281 if (!data)
1282 return;
1283
1284 tfm = crypto_alloc_skcipher(algo, 0, 0);
1285 if (IS_ERR(tfm)) {
1286 pr_err("failed to load transform for %s: %ld\n",
1287 algo, PTR_ERR(tfm));
1288 goto out_free_data;
1289 }
1290
1291 for (i = 0; i < num_mb; ++i)
1292 if (testmgr_alloc_buf(data[i].xbuf)) {
1293 while (i--)
1294 testmgr_free_buf(data[i].xbuf);
1295 goto out_free_tfm;
1296 }
1297
1298
1299 for (i = 0; i < num_mb; ++i)
1300 if (testmgr_alloc_buf(data[i].xbuf)) {
1301 while (i--)
1302 testmgr_free_buf(data[i].xbuf);
1303 goto out_free_tfm;
1304 }
1305
1306
1307 for (i = 0; i < num_mb; ++i) {
1308 data[i].req = skcipher_request_alloc(tfm, GFP_KERNEL);
1309 if (!data[i].req) {
1310 pr_err("alg: skcipher: Failed to allocate request for %s\n",
1311 algo);
1312 while (i--)
1313 skcipher_request_free(data[i].req);
1314 goto out_free_xbuf;
1315 }
1316 }
1317
1318 for (i = 0; i < num_mb; ++i) {
1319 skcipher_request_set_callback(data[i].req,
1320 CRYPTO_TFM_REQ_MAY_BACKLOG,
1321 crypto_req_done, &data[i].wait);
1322 crypto_init_wait(&data[i].wait);
1323 }
1324
1325 pr_info("\ntesting speed of multibuffer %s (%s) %s\n", algo,
1326 get_driver_name(crypto_skcipher, tfm), e);
1327
1328 i = 0;
1329 do {
1330 b_size = block_sizes;
1331 do {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001332 u32 bs = round_up(*b_size, crypto_skcipher_blocksize(tfm));
1333
1334 if (bs > XBUFSIZE * PAGE_SIZE) {
Colin Ian King38dbe2d2018-01-02 09:21:06 +00001335 pr_err("template (%u) too big for buffer (%lu)\n",
Horia Geantă3ae88f62021-10-15 10:39:18 +03001336 bs, XBUFSIZE * PAGE_SIZE);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001337 goto out;
1338 }
1339
1340 pr_info("test %u (%d bit key, %d byte blocks): ", i,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001341 *keysize * 8, bs);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001342
1343 /* Set up tfm global state, i.e. the key */
1344
1345 memset(tvmem[0], 0xff, PAGE_SIZE);
1346 key = tvmem[0];
1347 for (j = 0; j < tcount; j++) {
1348 if (template[j].klen == *keysize) {
1349 key = template[j].key;
1350 break;
1351 }
1352 }
1353
1354 crypto_skcipher_clear_flags(tfm, ~0);
1355
1356 ret = crypto_skcipher_setkey(tfm, key, *keysize);
1357 if (ret) {
1358 pr_err("setkey() failed flags=%x\n",
1359 crypto_skcipher_get_flags(tfm));
1360 goto out;
1361 }
1362
1363 iv_len = crypto_skcipher_ivsize(tfm);
1364 if (iv_len)
1365 memset(&iv, 0xff, iv_len);
1366
1367 /* Now setup per request stuff, i.e. buffers */
1368
1369 for (j = 0; j < num_mb; ++j) {
1370 struct test_mb_skcipher_data *cur = &data[j];
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001371 unsigned int k = bs;
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001372 unsigned int pages = DIV_ROUND_UP(k, PAGE_SIZE);
1373 unsigned int p = 0;
1374
1375 sg_init_table(cur->sg, pages);
1376
1377 while (k > PAGE_SIZE) {
1378 sg_set_buf(cur->sg + p, cur->xbuf[p],
1379 PAGE_SIZE);
1380 memset(cur->xbuf[p], 0xff, PAGE_SIZE);
1381 p++;
1382 k -= PAGE_SIZE;
1383 }
1384
1385 sg_set_buf(cur->sg + p, cur->xbuf[p], k);
1386 memset(cur->xbuf[p], 0xff, k);
1387
1388 skcipher_request_set_crypt(cur->req, cur->sg,
Horia Geantă3ae88f62021-10-15 10:39:18 +03001389 cur->sg, bs, iv);
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001390 }
1391
Horia Geantă2af63292018-07-23 17:18:48 +03001392 if (secs) {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001393 ret = test_mb_acipher_jiffies(data, enc,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001394 bs, secs,
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001395 num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001396 cond_resched();
1397 } else {
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001398 ret = test_mb_acipher_cycles(data, enc,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001399 bs, num_mb);
Horia Geantă2af63292018-07-23 17:18:48 +03001400 }
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00001401
1402 if (ret) {
1403 pr_err("%s() failed flags=%x\n", e,
1404 crypto_skcipher_get_flags(tfm));
1405 break;
1406 }
1407 b_size++;
1408 i++;
1409 } while (*b_size);
1410 keysize++;
1411 } while (*keysize);
1412
1413out:
1414 for (i = 0; i < num_mb; ++i)
1415 skcipher_request_free(data[i].req);
1416out_free_xbuf:
1417 for (i = 0; i < num_mb; ++i)
1418 testmgr_free_buf(data[i].xbuf);
1419out_free_tfm:
1420 crypto_free_skcipher(tfm);
1421out_free_data:
1422 kfree(data);
1423}
1424
Herbert Xu7166e582016-06-29 18:03:50 +08001425static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001426{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001427 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001428
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001429 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001430}
1431
Herbert Xu7166e582016-06-29 18:03:50 +08001432static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001433 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001434{
1435 unsigned long start, end;
1436 int bcount;
1437 int ret;
1438
Mark Rustad3e3dc252014-07-25 02:53:38 -07001439 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001440 time_before(jiffies, end); bcount++) {
1441 if (enc)
1442 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001443 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001444 else
1445 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001446 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001447
1448 if (ret)
1449 return ret;
1450 }
1451
Ard Biesheuvel303fd3e2020-12-08 15:34:41 +01001452 pr_cont("%d operations in %d seconds (%llu bytes)\n",
1453 bcount, secs, (u64)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001454 return 0;
1455}
1456
Herbert Xu7166e582016-06-29 18:03:50 +08001457static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001458 int blen)
1459{
1460 unsigned long cycles = 0;
1461 int ret = 0;
1462 int i;
1463
1464 /* Warm-up run. */
1465 for (i = 0; i < 4; i++) {
1466 if (enc)
1467 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001468 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001469 else
1470 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001471 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001472
1473 if (ret)
1474 goto out;
1475 }
1476
1477 /* The real thing. */
1478 for (i = 0; i < 8; i++) {
1479 cycles_t start, end;
1480
1481 start = get_cycles();
1482 if (enc)
1483 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001484 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001485 else
1486 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +08001487 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001488 end = get_cycles();
1489
1490 if (ret)
1491 goto out;
1492
1493 cycles += end - start;
1494 }
1495
1496out:
1497 if (ret == 0)
1498 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1499 (cycles + 4) / 8, blen);
1500
1501 return ret;
1502}
1503
Herbert Xu7166e582016-06-29 18:03:50 +08001504static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
1505 struct cipher_speed_template *template,
1506 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001507{
Nicolas Royerde1975332012-07-01 19:19:47 +02001508 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001509 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001510 const char *key;
1511 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +08001512 struct skcipher_request *req;
1513 struct crypto_skcipher *tfm;
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001514 const int *b_size;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001515 const char *e;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001516
1517 if (enc == ENCRYPT)
1518 e = "encryption";
1519 else
1520 e = "decryption";
1521
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001522 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001523
Herbert Xu7166e582016-06-29 18:03:50 +08001524 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001525
1526 if (IS_ERR(tfm)) {
1527 pr_err("failed to load transform for %s: %ld\n", algo,
1528 PTR_ERR(tfm));
1529 return;
1530 }
1531
Horia Geantă8e3b7fd2020-02-05 12:19:58 +02001532 pr_info("\ntesting speed of %s %s (%s) %s\n", async ? "async" : "sync",
1533 algo, get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -07001534
Herbert Xu7166e582016-06-29 18:03:50 +08001535 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001536 if (!req) {
1537 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1538 algo);
1539 goto out;
1540 }
1541
Herbert Xu7166e582016-06-29 18:03:50 +08001542 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +01001543 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001544
1545 i = 0;
1546 do {
1547 b_size = block_sizes;
1548
1549 do {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001550 u32 bs = round_up(*b_size, crypto_skcipher_blocksize(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001551 struct scatterlist sg[TVMEMSIZE];
1552
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001553 if ((*keysize + bs) > TVMEMSIZE * PAGE_SIZE) {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001554 pr_err("template (%u) too big for "
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001555 "tvmem (%lu)\n", *keysize + bs,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001556 TVMEMSIZE * PAGE_SIZE);
1557 goto out_free_req;
1558 }
1559
1560 pr_info("test %u (%d bit key, %d byte blocks): ", i,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001561 *keysize * 8, bs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001562
1563 memset(tvmem[0], 0xff, PAGE_SIZE);
1564
1565 /* set key, plain text and IV */
1566 key = tvmem[0];
1567 for (j = 0; j < tcount; j++) {
1568 if (template[j].klen == *keysize) {
1569 key = template[j].key;
1570 break;
1571 }
1572 }
1573
Herbert Xu7166e582016-06-29 18:03:50 +08001574 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001575
Herbert Xu7166e582016-06-29 18:03:50 +08001576 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001577 if (ret) {
1578 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +08001579 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001580 goto out_free_req;
1581 }
1582
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001583 k = *keysize + bs;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001584 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1585
Nicolas Royerde1975332012-07-01 19:19:47 +02001586 if (k > PAGE_SIZE) {
1587 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001588 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001589 k -= PAGE_SIZE;
1590 j = 1;
1591 while (k > PAGE_SIZE) {
1592 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1593 memset(tvmem[j], 0xff, PAGE_SIZE);
1594 j++;
1595 k -= PAGE_SIZE;
1596 }
1597 sg_set_buf(sg + j, tvmem[j], k);
1598 memset(tvmem[j], 0xff, k);
1599 } else {
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001600 sg_set_buf(sg, tvmem[0] + *keysize, bs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001601 }
1602
Herbert Xu7166e582016-06-29 18:03:50 +08001603 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001604 if (iv_len)
1605 memset(&iv, 0xff, iv_len);
1606
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001607 skcipher_request_set_crypt(req, sg, sg, bs, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001608
Horia Geantă2af63292018-07-23 17:18:48 +03001609 if (secs) {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001610 ret = test_acipher_jiffies(req, enc,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001611 bs, secs);
Horia Geantă2af63292018-07-23 17:18:48 +03001612 cond_resched();
1613 } else {
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001614 ret = test_acipher_cycles(req, enc,
Ard Biesheuvelad6d66b2020-11-20 12:04:33 +01001615 bs);
Horia Geantă2af63292018-07-23 17:18:48 +03001616 }
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001617
1618 if (ret) {
1619 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +08001620 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001621 break;
1622 }
1623 b_size++;
1624 i++;
1625 } while (*b_size);
1626 keysize++;
1627 } while (*keysize);
1628
1629out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +08001630 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001631out:
Herbert Xu7166e582016-06-29 18:03:50 +08001632 crypto_free_skcipher(tfm);
1633}
1634
1635static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
1636 struct cipher_speed_template *template,
1637 unsigned int tcount, u8 *keysize)
1638{
1639 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1640 true);
1641}
1642
1643static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
1644 struct cipher_speed_template *template,
1645 unsigned int tcount, u8 *keysize)
1646{
1647 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
1648 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001649}
1650
Herbert Xuef2736f2005-06-22 13:26:03 -07001651static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001652{
Corentin Labbe07d8f182019-11-08 15:42:13 +00001653 const char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001654
Linus Torvalds1da177e2005-04-16 15:20:36 -07001655 while (*name) {
1656 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001657 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001658 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001659 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001660 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001661}
1662
Herbert Xu01b32322008-07-31 15:41:55 +08001663static inline int tcrypt_test(const char *alg)
1664{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001665 int ret;
1666
Rabin Vincent76512f22017-01-18 14:54:05 +01001667 pr_debug("testing %s\n", alg);
1668
Jarod Wilson4e033a62009-05-27 15:10:21 +10001669 ret = alg_test(alg, alg, 0, 0);
1670 /* non-fips algs return -EINVAL in fips mode */
1671 if (fips_enabled && ret == -EINVAL)
1672 ret = 0;
1673 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001674}
1675
Kees Cook4e234ee2018-04-26 19:57:28 -07001676static int do_test(const char *alg, u32 type, u32 mask, int m, u32 num_mb)
Herbert Xu01b32322008-07-31 15:41:55 +08001677{
1678 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001679 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001680
1681 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001682 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001683 if (alg) {
1684 if (!crypto_has_alg(alg, type,
1685 mask ?: CRYPTO_ALG_TYPE_MASK))
1686 ret = -ENOENT;
1687 break;
1688 }
1689
Herbert Xu01b32322008-07-31 15:41:55 +08001690 for (i = 1; i < 200; i++)
Kees Cook4e234ee2018-04-26 19:57:28 -07001691 ret += do_test(NULL, 0, 0, i, num_mb);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001692 break;
1693
1694 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001695 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001696 break;
1697
1698 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001699 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001700 break;
1701
1702 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001703 ret += tcrypt_test("ecb(des)");
1704 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001705 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001706 break;
1707
1708 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001709 ret += tcrypt_test("ecb(des3_ede)");
1710 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001711 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001712 break;
1713
1714 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001715 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001716 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001717
Linus Torvalds1da177e2005-04-16 15:20:36 -07001718 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001719 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001720 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001721
Linus Torvalds1da177e2005-04-16 15:20:36 -07001722 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001723 ret += tcrypt_test("ecb(blowfish)");
1724 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001725 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001726 break;
1727
1728 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001729 ret += tcrypt_test("ecb(twofish)");
1730 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001731 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001732 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001733 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001734 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001735
Linus Torvalds1da177e2005-04-16 15:20:36 -07001736 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001737 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001738 ret += tcrypt_test("cbc(serpent)");
1739 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001740 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001741 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001742 break;
1743
1744 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001745 ret += tcrypt_test("ecb(aes)");
1746 ret += tcrypt_test("cbc(aes)");
1747 ret += tcrypt_test("lrw(aes)");
1748 ret += tcrypt_test("xts(aes)");
1749 ret += tcrypt_test("ctr(aes)");
1750 ret += tcrypt_test("rfc3686(ctr(aes))");
Gilad Ben-Yossefdfb89ab2018-09-20 14:18:40 +01001751 ret += tcrypt_test("ofb(aes)");
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03001752 ret += tcrypt_test("cfb(aes)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001753 break;
1754
1755 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001756 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001757 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001758
Linus Torvalds1da177e2005-04-16 15:20:36 -07001759 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001760 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001761 break;
1762
1763 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001764 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001765 break;
1766
1767 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001768 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001769 ret += tcrypt_test("cbc(cast5)");
1770 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001771 break;
1772
1773 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001774 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001775 ret += tcrypt_test("cbc(cast6)");
1776 ret += tcrypt_test("ctr(cast6)");
1777 ret += tcrypt_test("lrw(cast6)");
1778 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001779 break;
1780
1781 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001782 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001783 break;
1784
1785 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001786 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001787 break;
1788
1789 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001790 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001791 break;
1792
1793 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001794 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001795 break;
1796
1797 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001798 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001799 break;
1800
1801 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001802 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001803 break;
1804
1805 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001806 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001807 break;
1808
1809 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001810 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001811 break;
1812
1813 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001814 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001815 break;
1816
Linus Torvalds1da177e2005-04-16 15:20:36 -07001817 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001818 ret += tcrypt_test("ecb(anubis)");
1819 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001820 break;
1821
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001822 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001823 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001824 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001825
David Howells90831632006-12-16 12:13:14 +11001826 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001827 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001828 break;
1829
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001830 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001831 ret += tcrypt_test("ecb(camellia)");
1832 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001833 ret += tcrypt_test("ctr(camellia)");
1834 ret += tcrypt_test("lrw(camellia)");
1835 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001836 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001837
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001838 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001839 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001840 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001841
Herbert Xu8df213d2007-12-02 14:55:47 +11001842 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001843 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001844 break;
1845
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001846 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001847 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001848 break;
1849
Joy Latten93cc74e2007-12-12 20:24:22 +08001850 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001851 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001852 break;
1853
Kevin Coffman76cb9522008-03-24 21:26:16 +08001854 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001855 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001856 break;
1857
Ard Biesheuvelaa22cd72021-05-20 12:40:00 +02001858 case 39:
1859 ret += tcrypt_test("xxhash64");
1860 break;
1861
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001862 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001863 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001864 break;
1865
Ard Biesheuvelaa22cd72021-05-20 12:40:00 +02001866 case 41:
1867 ret += tcrypt_test("blake2s-256");
1868 break;
1869
1870 case 42:
1871 ret += tcrypt_test("blake2b-512");
1872 break;
1873
Herbert Xu01b32322008-07-31 15:41:55 +08001874 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001875 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001876 break;
1877
Jarod Wilson5d667322009-05-04 19:23:40 +08001878 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001879 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001880 break;
1881
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001882 case 46:
1883 ret += tcrypt_test("ghash");
1884 break;
1885
Herbert Xu684115212013-09-07 12:56:26 +10001886 case 47:
1887 ret += tcrypt_test("crct10dif");
1888 break;
1889
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301890 case 48:
1891 ret += tcrypt_test("sha3-224");
1892 break;
1893
1894 case 49:
1895 ret += tcrypt_test("sha3-256");
1896 break;
1897
1898 case 50:
1899 ret += tcrypt_test("sha3-384");
1900 break;
1901
1902 case 51:
1903 ret += tcrypt_test("sha3-512");
1904 break;
1905
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001906 case 52:
1907 ret += tcrypt_test("sm3");
1908 break;
1909
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001910 case 53:
1911 ret += tcrypt_test("streebog256");
1912 break;
1913
1914 case 54:
1915 ret += tcrypt_test("streebog512");
1916 break;
1917
Tianjia Zhang357a7532021-08-13 15:55:08 +08001918 case 55:
1919 ret += tcrypt_test("gcm(sm4)");
1920 break;
1921
1922 case 56:
1923 ret += tcrypt_test("ccm(sm4)");
1924 break;
1925
Linus Torvalds1da177e2005-04-16 15:20:36 -07001926 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001927 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001928 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001929
Linus Torvalds1da177e2005-04-16 15:20:36 -07001930 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001931 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001932 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001933
Linus Torvalds1da177e2005-04-16 15:20:36 -07001934 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001935 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001936 break;
1937
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001938 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001939 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001940 break;
1941
1942 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001943 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001944 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001945
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001946 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001947 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001948 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001949
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001950 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001951 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001952 break;
1953
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001954 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001955 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001956 break;
1957
Shane Wangf1939f72009-09-02 20:05:22 +10001958 case 109:
Eric Biggers0917b872018-06-18 10:22:40 -07001959 ret += tcrypt_test("vmac64(aes)");
Shane Wangf1939f72009-09-02 20:05:22 +10001960 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001961
raveendra padasalagi98eca722016-07-01 11:16:54 +05301962 case 111:
1963 ret += tcrypt_test("hmac(sha3-224)");
1964 break;
1965
1966 case 112:
1967 ret += tcrypt_test("hmac(sha3-256)");
1968 break;
1969
1970 case 113:
1971 ret += tcrypt_test("hmac(sha3-384)");
1972 break;
1973
1974 case 114:
1975 ret += tcrypt_test("hmac(sha3-512)");
1976 break;
1977
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03001978 case 115:
1979 ret += tcrypt_test("hmac(streebog256)");
1980 break;
1981
1982 case 116:
1983 ret += tcrypt_test("hmac(streebog512)");
1984 break;
1985
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001986 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001987 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001988 break;
1989
Adrian Hoban69435b92010-11-04 15:02:04 -04001990 case 151:
1991 ret += tcrypt_test("rfc4106(gcm(aes))");
1992 break;
1993
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001994 case 152:
1995 ret += tcrypt_test("rfc4543(gcm(aes))");
1996 break;
1997
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001998 case 153:
1999 ret += tcrypt_test("cmac(aes)");
2000 break;
2001
2002 case 154:
2003 ret += tcrypt_test("cmac(des3_ede)");
2004 break;
2005
Horia Geantabbf9c892013-11-28 15:11:16 +02002006 case 155:
2007 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
2008 break;
2009
Horia Geantabca4feb2014-03-14 17:46:51 +02002010 case 156:
2011 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
2012 break;
2013
2014 case 157:
2015 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
2016 break;
Tianjia Zhang357a7532021-08-13 15:55:08 +08002017
2018 case 158:
2019 ret += tcrypt_test("cbcmac(sm4)");
2020 break;
2021
2022 case 159:
2023 ret += tcrypt_test("cmac(sm4)");
2024 break;
2025
Nitesh Lal5208ed22014-05-21 17:09:08 +05302026 case 181:
2027 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
2028 break;
2029 case 182:
2030 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
2031 break;
2032 case 183:
2033 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
2034 break;
2035 case 184:
2036 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
2037 break;
2038 case 185:
2039 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
2040 break;
2041 case 186:
2042 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
2043 break;
2044 case 187:
2045 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
2046 break;
2047 case 188:
2048 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
2049 break;
2050 case 189:
2051 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
2052 break;
2053 case 190:
2054 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
2055 break;
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002056 case 191:
2057 ret += tcrypt_test("ecb(sm4)");
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002058 ret += tcrypt_test("cbc(sm4)");
Tianjia Zhanga7fc80b2021-07-20 11:46:42 +08002059 ret += tcrypt_test("cfb(sm4)");
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002060 ret += tcrypt_test("ctr(sm4)");
Gilad Ben-Yossefcd83a8a2018-03-06 09:44:43 +00002061 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07002062 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10002063 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002064 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002065 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002066 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002067 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002068 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002069 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002070 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11002071 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002072 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11002073 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002074 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08002075 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002076 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08002077 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002078 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002079 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2080 speed_template_16_24_32);
2081 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2082 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10002083 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2084 speed_template_16_24_32);
2085 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2086 speed_template_16_24_32);
Dmitry Eremin-Solenikov7da66672018-10-20 02:01:53 +03002087 test_cipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2088 speed_template_16_24_32);
2089 test_cipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2090 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002091 break;
2092
2093 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10002094 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002095 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002096 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002097 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002098 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002099 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002100 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002101 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002102 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10002103 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08002104 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002105 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03002106 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
2107 des3_speed_template, DES3_SPEED_VECTORS,
2108 speed_template_24);
2109 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
2110 des3_speed_template, DES3_SPEED_VECTORS,
2111 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002112 break;
2113
2114 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10002115 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002116 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002117 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002118 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002119 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002120 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10002121 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002122 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03002123 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2124 speed_template_16_24_32);
2125 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2126 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03002127 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2128 speed_template_32_40_48);
2129 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2130 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03002131 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2132 speed_template_32_48_64);
2133 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2134 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002135 break;
2136
2137 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10002138 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002139 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002140 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002141 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002142 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002143 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10002144 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002145 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03002146 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2147 speed_template_8_32);
2148 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2149 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002150 break;
2151
2152 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10002153 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002154 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002155 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002156 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002157 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002158 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10002159 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002160 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002161 break;
2162
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002163 case 205:
2164 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002165 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002166 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002167 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002168 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002169 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002170 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08002171 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02002172 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2173 speed_template_16_24_32);
2174 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2175 speed_template_16_24_32);
2176 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2177 speed_template_32_40_48);
2178 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2179 speed_template_32_40_48);
2180 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2181 speed_template_32_48_64);
2182 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2183 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11002184 break;
2185
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002186 case 207:
2187 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2188 speed_template_16_32);
2189 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2190 speed_template_16_32);
2191 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2192 speed_template_16_32);
2193 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2194 speed_template_16_32);
2195 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2196 speed_template_16_32);
2197 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2198 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002199 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2200 speed_template_32_48);
2201 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2202 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002203 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2204 speed_template_32_64);
2205 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2206 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002207 break;
2208
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002209 case 208:
2210 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2211 speed_template_8);
2212 break;
2213
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002214 case 209:
2215 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2216 speed_template_8_16);
2217 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2218 speed_template_8_16);
2219 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2220 speed_template_8_16);
2221 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2222 speed_template_8_16);
2223 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2224 speed_template_8_16);
2225 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2226 speed_template_8_16);
2227 break;
2228
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002229 case 210:
2230 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2231 speed_template_16_32);
2232 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2233 speed_template_16_32);
2234 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2235 speed_template_16_32);
2236 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2237 speed_template_16_32);
2238 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2239 speed_template_16_32);
2240 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2241 speed_template_16_32);
2242 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2243 speed_template_32_48);
2244 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2245 speed_template_32_48);
2246 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2247 speed_template_32_64);
2248 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2249 speed_template_32_64);
2250 break;
2251
Tim Chen53f52d72013-12-11 14:28:47 -08002252 case 211:
2253 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002254 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05302255 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01002256 NULL, 0, 16, 8, speed_template_16_24_32);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002257 test_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec,
2258 NULL, 0, 16, 16, aead_speed_template_20);
2259 test_aead_speed("gcm(aes)", DECRYPT, sec,
2260 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08002261 break;
2262
Herbert Xu4e4aab62015-06-17 14:04:21 +08002263 case 212:
2264 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08002265 NULL, 0, 16, 16, aead_speed_template_19);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002266 test_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec,
2267 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08002268 break;
2269
Martin Willi2dce0632015-07-16 19:13:59 +02002270 case 213:
2271 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
2272 NULL, 0, 16, 8, aead_speed_template_36);
Gilad Ben-Yossef4431bd42017-12-17 08:29:01 +00002273 test_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT, sec,
2274 NULL, 0, 16, 8, aead_speed_template_36);
Martin Willi2dce0632015-07-16 19:13:59 +02002275 break;
2276
2277 case 214:
2278 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
2279 speed_template_32);
2280 break;
2281
Gilad Ben-Yossef427988d2017-12-17 08:29:05 +00002282 case 215:
2283 test_mb_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec, NULL,
2284 0, 16, 16, aead_speed_template_20, num_mb);
2285 test_mb_aead_speed("gcm(aes)", ENCRYPT, sec, NULL, 0, 16, 8,
2286 speed_template_16_24_32, num_mb);
2287 test_mb_aead_speed("rfc4106(gcm(aes))", DECRYPT, sec, NULL,
2288 0, 16, 16, aead_speed_template_20, num_mb);
2289 test_mb_aead_speed("gcm(aes)", DECRYPT, sec, NULL, 0, 16, 8,
2290 speed_template_16_24_32, num_mb);
2291 break;
2292
2293 case 216:
2294 test_mb_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec, NULL, 0,
2295 16, 16, aead_speed_template_19, num_mb);
2296 test_mb_aead_speed("rfc4309(ccm(aes))", DECRYPT, sec, NULL, 0,
2297 16, 16, aead_speed_template_19, num_mb);
2298 break;
2299
2300 case 217:
2301 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT,
2302 sec, NULL, 0, 16, 8, aead_speed_template_36,
2303 num_mb);
2304 test_mb_aead_speed("rfc7539esp(chacha20,poly1305)", DECRYPT,
2305 sec, NULL, 0, 16, 8, aead_speed_template_36,
2306 num_mb);
2307 break;
2308
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002309 case 218:
2310 test_cipher_speed("ecb(sm4)", ENCRYPT, sec, NULL, 0,
2311 speed_template_16);
2312 test_cipher_speed("ecb(sm4)", DECRYPT, sec, NULL, 0,
2313 speed_template_16);
2314 test_cipher_speed("cbc(sm4)", ENCRYPT, sec, NULL, 0,
2315 speed_template_16);
2316 test_cipher_speed("cbc(sm4)", DECRYPT, sec, NULL, 0,
2317 speed_template_16);
Tianjia Zhanga7fc80b2021-07-20 11:46:42 +08002318 test_cipher_speed("cfb(sm4)", ENCRYPT, sec, NULL, 0,
2319 speed_template_16);
2320 test_cipher_speed("cfb(sm4)", DECRYPT, sec, NULL, 0,
2321 speed_template_16);
Gilad Ben-Yossef95ba5972018-09-20 14:18:38 +01002322 test_cipher_speed("ctr(sm4)", ENCRYPT, sec, NULL, 0,
2323 speed_template_16);
2324 test_cipher_speed("ctr(sm4)", DECRYPT, sec, NULL, 0,
2325 speed_template_16);
2326 break;
Eric Biggers059c2a42018-11-16 17:26:31 -08002327
2328 case 219:
2329 test_cipher_speed("adiantum(xchacha12,aes)", ENCRYPT, sec, NULL,
2330 0, speed_template_32);
2331 test_cipher_speed("adiantum(xchacha12,aes)", DECRYPT, sec, NULL,
2332 0, speed_template_32);
2333 test_cipher_speed("adiantum(xchacha20,aes)", ENCRYPT, sec, NULL,
2334 0, speed_template_32);
2335 test_cipher_speed("adiantum(xchacha20,aes)", DECRYPT, sec, NULL,
2336 0, speed_template_32);
2337 break;
2338
Ard Biesheuvelf975abb2019-08-19 17:17:34 +03002339 case 220:
2340 test_acipher_speed("essiv(cbc(aes),sha256)",
2341 ENCRYPT, sec, NULL, 0,
2342 speed_template_16_24_32);
2343 test_acipher_speed("essiv(cbc(aes),sha256)",
2344 DECRYPT, sec, NULL, 0,
2345 speed_template_16_24_32);
2346 break;
2347
Ard Biesheuvel97bcb162019-07-03 10:55:12 +02002348 case 221:
2349 test_aead_speed("aegis128", ENCRYPT, sec,
2350 NULL, 0, 16, 8, speed_template_16);
2351 test_aead_speed("aegis128", DECRYPT, sec,
2352 NULL, 0, 16, 8, speed_template_16);
2353 break;
2354
Tianjia Zhang357a7532021-08-13 15:55:08 +08002355 case 222:
2356 test_aead_speed("gcm(sm4)", ENCRYPT, sec,
2357 NULL, 0, 16, 8, speed_template_16);
2358 test_aead_speed("gcm(sm4)", DECRYPT, sec,
2359 NULL, 0, 16, 8, speed_template_16);
2360 break;
2361
2362 case 223:
2363 test_aead_speed("rfc4309(ccm(sm4))", ENCRYPT, sec,
2364 NULL, 0, 16, 16, aead_speed_template_19);
2365 test_aead_speed("rfc4309(ccm(sm4))", DECRYPT, sec,
2366 NULL, 0, 16, 16, aead_speed_template_19);
2367 break;
2368
2369 case 224:
2370 test_mb_aead_speed("gcm(sm4)", ENCRYPT, sec, NULL, 0, 16, 8,
2371 speed_template_16, num_mb);
2372 test_mb_aead_speed("gcm(sm4)", DECRYPT, sec, NULL, 0, 16, 8,
2373 speed_template_16, num_mb);
2374 break;
2375
2376 case 225:
2377 test_mb_aead_speed("rfc4309(ccm(sm4))", ENCRYPT, sec, NULL, 0,
2378 16, 16, aead_speed_template_19, num_mb);
2379 test_mb_aead_speed("rfc4309(ccm(sm4))", DECRYPT, sec, NULL, 0,
2380 16, 16, aead_speed_template_19, num_mb);
2381 break;
2382
Michal Ludvige8057922006-05-30 22:04:19 +10002383 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08002384 if (alg) {
2385 test_hash_speed(alg, sec, generic_hash_speed_template);
2386 break;
2387 }
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002388 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002389 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10002390 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002391 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002392 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002393 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10002394 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002395 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002396 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002397 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10002398 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002399 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002400 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002401 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10002402 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002403 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002404 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002405 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10002406 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002407 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002408 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002409 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10002410 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002411 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002412 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002413 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10002414 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002415 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002416 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002417 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10002418 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002419 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002420 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002421 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10002422 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10002423 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002424 fallthrough;
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08002425 case 313:
2426 test_hash_speed("sha224", sec, generic_hash_speed_template);
2427 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002428 fallthrough;
Ard Biesheuvelaa22cd72021-05-20 12:40:00 +02002429 case 314:
2430 test_hash_speed("xxhash64", sec, generic_hash_speed_template);
2431 if (mode > 300 && mode < 400) break;
2432 fallthrough;
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08002433 case 315:
2434 test_hash_speed("rmd160", sec, generic_hash_speed_template);
2435 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002436 fallthrough;
Ard Biesheuvelaa22cd72021-05-20 12:40:00 +02002437 case 316:
2438 test_hash_speed("blake2s-256", sec, generic_hash_speed_template);
2439 if (mode > 300 && mode < 400) break;
2440 fallthrough;
2441 case 317:
2442 test_hash_speed("blake2b-512", sec, generic_hash_speed_template);
2443 if (mode > 300 && mode < 400) break;
2444 fallthrough;
Huang Ying18bcc912010-03-10 18:30:32 +08002445 case 318:
Herbert Xuba974ad2020-08-05 15:57:08 +10002446 klen = 16;
2447 test_hash_speed("ghash", sec, generic_hash_speed_template);
Huang Ying18bcc912010-03-10 18:30:32 +08002448 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002449 fallthrough;
Tim Chene3899e42012-09-27 15:44:24 -07002450 case 319:
2451 test_hash_speed("crc32c", sec, generic_hash_speed_template);
2452 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002453 fallthrough;
Herbert Xu684115212013-09-07 12:56:26 +10002454 case 320:
2455 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
2456 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002457 fallthrough;
Martin Willi2dce0632015-07-16 19:13:59 +02002458 case 321:
2459 test_hash_speed("poly1305", sec, poly1305_speed_template);
2460 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002461 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302462 case 322:
2463 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
2464 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002465 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302466 case 323:
2467 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
2468 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002469 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302470 case 324:
2471 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
2472 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002473 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302474 case 325:
2475 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
2476 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002477 fallthrough;
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002478 case 326:
2479 test_hash_speed("sm3", sec, generic_hash_speed_template);
2480 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002481 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002482 case 327:
2483 test_hash_speed("streebog256", sec,
2484 generic_hash_speed_template);
2485 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002486 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002487 case 328:
2488 test_hash_speed("streebog512", sec,
2489 generic_hash_speed_template);
2490 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002491 fallthrough;
Michal Ludvige8057922006-05-30 22:04:19 +10002492 case 399:
2493 break;
2494
David S. Millerbeb63da2010-05-19 14:11:21 +10002495 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08002496 if (alg) {
2497 test_ahash_speed(alg, sec, generic_hash_speed_template);
2498 break;
2499 }
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002500 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002501 case 401:
2502 test_ahash_speed("md4", sec, generic_hash_speed_template);
2503 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002504 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002505 case 402:
2506 test_ahash_speed("md5", sec, generic_hash_speed_template);
2507 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002508 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002509 case 403:
2510 test_ahash_speed("sha1", sec, generic_hash_speed_template);
2511 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002512 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002513 case 404:
2514 test_ahash_speed("sha256", sec, generic_hash_speed_template);
2515 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002516 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002517 case 405:
2518 test_ahash_speed("sha384", sec, generic_hash_speed_template);
2519 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002520 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002521 case 406:
2522 test_ahash_speed("sha512", sec, generic_hash_speed_template);
2523 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002524 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002525 case 407:
2526 test_ahash_speed("wp256", sec, generic_hash_speed_template);
2527 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002528 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002529 case 408:
2530 test_ahash_speed("wp384", sec, generic_hash_speed_template);
2531 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002532 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002533 case 409:
2534 test_ahash_speed("wp512", sec, generic_hash_speed_template);
2535 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002536 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002537 case 413:
2538 test_ahash_speed("sha224", sec, generic_hash_speed_template);
2539 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002540 fallthrough;
Ard Biesheuvelaa22cd72021-05-20 12:40:00 +02002541 case 414:
2542 test_ahash_speed("xxhash64", sec, generic_hash_speed_template);
2543 if (mode > 400 && mode < 500) break;
2544 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002545 case 415:
2546 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
2547 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002548 fallthrough;
Ard Biesheuvelaa22cd72021-05-20 12:40:00 +02002549 case 416:
2550 test_ahash_speed("blake2s-256", sec, generic_hash_speed_template);
2551 if (mode > 400 && mode < 500) break;
2552 fallthrough;
2553 case 417:
2554 test_ahash_speed("blake2b-512", sec, generic_hash_speed_template);
2555 if (mode > 400 && mode < 500) break;
2556 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302557 case 418:
2558 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
2559 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002560 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302561 case 419:
2562 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
2563 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002564 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302565 case 420:
2566 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
2567 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002568 fallthrough;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05302569 case 421:
2570 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
2571 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002572 fallthrough;
Megha Dey087bcd22016-06-23 18:40:47 -07002573 case 422:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002574 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template,
2575 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002576 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002577 fallthrough;
Megha Dey087bcd22016-06-23 18:40:47 -07002578 case 423:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002579 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template,
2580 num_mb);
Megha Dey087bcd22016-06-23 18:40:47 -07002581 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002582 fallthrough;
Megha Dey14009c42016-06-27 10:20:09 -07002583 case 424:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002584 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template,
2585 num_mb);
Megha Dey14009c42016-06-27 10:20:09 -07002586 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002587 fallthrough;
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002588 case 425:
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00002589 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template,
2590 num_mb);
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03002591 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002592 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002593 case 426:
2594 test_mb_ahash_speed("streebog256", sec,
2595 generic_hash_speed_template, num_mb);
2596 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002597 fallthrough;
Vitaly Chikunov25a0b9d2018-11-07 00:00:03 +03002598 case 427:
2599 test_mb_ahash_speed("streebog512", sec,
2600 generic_hash_speed_template, num_mb);
2601 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silvadf561f662020-08-23 17:36:59 -05002602 fallthrough;
David S. Millerbeb63da2010-05-19 14:11:21 +10002603 case 499:
2604 break;
2605
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002606 case 500:
2607 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2608 speed_template_16_24_32);
2609 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2610 speed_template_16_24_32);
2611 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2612 speed_template_16_24_32);
2613 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2614 speed_template_16_24_32);
2615 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2616 speed_template_32_40_48);
2617 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2618 speed_template_32_40_48);
2619 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002620 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002621 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03002622 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08002623 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2624 speed_template_16_24_32);
2625 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2626 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002627 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2628 speed_template_16_24_32);
2629 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2630 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02002631 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2632 speed_template_16_24_32);
2633 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2634 speed_template_16_24_32);
2635 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2636 speed_template_16_24_32);
2637 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2638 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02002639 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
2640 speed_template_20_28_36);
2641 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
2642 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002643 break;
2644
2645 case 501:
2646 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2647 des3_speed_template, DES3_SPEED_VECTORS,
2648 speed_template_24);
2649 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2650 des3_speed_template, DES3_SPEED_VECTORS,
2651 speed_template_24);
2652 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2653 des3_speed_template, DES3_SPEED_VECTORS,
2654 speed_template_24);
2655 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2656 des3_speed_template, DES3_SPEED_VECTORS,
2657 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002658 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2659 des3_speed_template, DES3_SPEED_VECTORS,
2660 speed_template_24);
2661 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2662 des3_speed_template, DES3_SPEED_VECTORS,
2663 speed_template_24);
2664 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2665 des3_speed_template, DES3_SPEED_VECTORS,
2666 speed_template_24);
2667 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2668 des3_speed_template, DES3_SPEED_VECTORS,
2669 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002670 break;
2671
2672 case 502:
2673 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2674 speed_template_8);
2675 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2676 speed_template_8);
2677 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2678 speed_template_8);
2679 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2680 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002681 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2682 speed_template_8);
2683 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2684 speed_template_8);
2685 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2686 speed_template_8);
2687 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2688 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002689 break;
2690
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002691 case 503:
2692 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2693 speed_template_16_32);
2694 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2695 speed_template_16_32);
2696 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2697 speed_template_16_32);
2698 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2699 speed_template_16_32);
2700 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2701 speed_template_16_32);
2702 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2703 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002704 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2705 speed_template_32_48);
2706 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2707 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002708 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2709 speed_template_32_64);
2710 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2711 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002712 break;
2713
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002714 case 504:
2715 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2716 speed_template_16_24_32);
2717 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2718 speed_template_16_24_32);
2719 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2720 speed_template_16_24_32);
2721 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2722 speed_template_16_24_32);
2723 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2724 speed_template_16_24_32);
2725 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2726 speed_template_16_24_32);
2727 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2728 speed_template_32_40_48);
2729 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2730 speed_template_32_40_48);
2731 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2732 speed_template_32_48_64);
2733 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2734 speed_template_32_48_64);
2735 break;
2736
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002737 case 505:
2738 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2739 speed_template_8);
2740 break;
2741
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002742 case 506:
2743 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2744 speed_template_8_16);
2745 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2746 speed_template_8_16);
2747 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2748 speed_template_8_16);
2749 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2750 speed_template_8_16);
2751 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2752 speed_template_8_16);
2753 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2754 speed_template_8_16);
2755 break;
2756
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002757 case 507:
2758 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2759 speed_template_16_32);
2760 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2761 speed_template_16_32);
2762 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2763 speed_template_16_32);
2764 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2765 speed_template_16_32);
2766 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2767 speed_template_16_32);
2768 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2769 speed_template_16_32);
2770 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2771 speed_template_32_48);
2772 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2773 speed_template_32_48);
2774 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2775 speed_template_32_64);
2776 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2777 speed_template_32_64);
2778 break;
2779
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002780 case 508:
2781 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2782 speed_template_16_32);
2783 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2784 speed_template_16_32);
2785 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2786 speed_template_16_32);
2787 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2788 speed_template_16_32);
2789 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2790 speed_template_16_32);
2791 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2792 speed_template_16_32);
2793 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2794 speed_template_32_48);
2795 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2796 speed_template_32_48);
2797 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2798 speed_template_32_64);
2799 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2800 speed_template_32_64);
2801 break;
2802
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002803 case 509:
2804 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2805 speed_template_8_32);
2806 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2807 speed_template_8_32);
2808 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2809 speed_template_8_32);
2810 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2811 speed_template_8_32);
2812 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2813 speed_template_8_32);
2814 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2815 speed_template_8_32);
2816 break;
2817
Tianjia Zhanga7fc80b2021-07-20 11:46:42 +08002818 case 518:
2819 test_acipher_speed("ecb(sm4)", ENCRYPT, sec, NULL, 0,
2820 speed_template_16);
2821 test_acipher_speed("ecb(sm4)", DECRYPT, sec, NULL, 0,
2822 speed_template_16);
2823 test_acipher_speed("cbc(sm4)", ENCRYPT, sec, NULL, 0,
2824 speed_template_16);
2825 test_acipher_speed("cbc(sm4)", DECRYPT, sec, NULL, 0,
2826 speed_template_16);
2827 test_acipher_speed("cfb(sm4)", ENCRYPT, sec, NULL, 0,
2828 speed_template_16);
2829 test_acipher_speed("cfb(sm4)", DECRYPT, sec, NULL, 0,
2830 speed_template_16);
2831 test_acipher_speed("ctr(sm4)", ENCRYPT, sec, NULL, 0,
2832 speed_template_16);
2833 test_acipher_speed("ctr(sm4)", DECRYPT, sec, NULL, 0,
2834 speed_template_16);
2835 break;
2836
Gilad Ben-Yossefe161c592017-12-17 08:29:04 +00002837 case 600:
2838 test_mb_skcipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
2839 speed_template_16_24_32, num_mb);
2840 test_mb_skcipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
2841 speed_template_16_24_32, num_mb);
2842 test_mb_skcipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
2843 speed_template_16_24_32, num_mb);
2844 test_mb_skcipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
2845 speed_template_16_24_32, num_mb);
2846 test_mb_skcipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
2847 speed_template_32_40_48, num_mb);
2848 test_mb_skcipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
2849 speed_template_32_40_48, num_mb);
2850 test_mb_skcipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
2851 speed_template_32_64, num_mb);
2852 test_mb_skcipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
2853 speed_template_32_64, num_mb);
2854 test_mb_skcipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
2855 speed_template_16_24_32, num_mb);
2856 test_mb_skcipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
2857 speed_template_16_24_32, num_mb);
2858 test_mb_skcipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
2859 speed_template_16_24_32, num_mb);
2860 test_mb_skcipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
2861 speed_template_16_24_32, num_mb);
2862 test_mb_skcipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
2863 speed_template_16_24_32, num_mb);
2864 test_mb_skcipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
2865 speed_template_16_24_32, num_mb);
2866 test_mb_skcipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
2867 speed_template_16_24_32, num_mb);
2868 test_mb_skcipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
2869 speed_template_16_24_32, num_mb);
2870 test_mb_skcipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL,
2871 0, speed_template_20_28_36, num_mb);
2872 test_mb_skcipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL,
2873 0, speed_template_20_28_36, num_mb);
2874 break;
2875
2876 case 601:
2877 test_mb_skcipher_speed("ecb(des3_ede)", ENCRYPT, sec,
2878 des3_speed_template, DES3_SPEED_VECTORS,
2879 speed_template_24, num_mb);
2880 test_mb_skcipher_speed("ecb(des3_ede)", DECRYPT, sec,
2881 des3_speed_template, DES3_SPEED_VECTORS,
2882 speed_template_24, num_mb);
2883 test_mb_skcipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2884 des3_speed_template, DES3_SPEED_VECTORS,
2885 speed_template_24, num_mb);
2886 test_mb_skcipher_speed("cbc(des3_ede)", DECRYPT, sec,
2887 des3_speed_template, DES3_SPEED_VECTORS,
2888 speed_template_24, num_mb);
2889 test_mb_skcipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2890 des3_speed_template, DES3_SPEED_VECTORS,
2891 speed_template_24, num_mb);
2892 test_mb_skcipher_speed("cfb(des3_ede)", DECRYPT, sec,
2893 des3_speed_template, DES3_SPEED_VECTORS,
2894 speed_template_24, num_mb);
2895 test_mb_skcipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2896 des3_speed_template, DES3_SPEED_VECTORS,
2897 speed_template_24, num_mb);
2898 test_mb_skcipher_speed("ofb(des3_ede)", DECRYPT, sec,
2899 des3_speed_template, DES3_SPEED_VECTORS,
2900 speed_template_24, num_mb);
2901 break;
2902
2903 case 602:
2904 test_mb_skcipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2905 speed_template_8, num_mb);
2906 test_mb_skcipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2907 speed_template_8, num_mb);
2908 test_mb_skcipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2909 speed_template_8, num_mb);
2910 test_mb_skcipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2911 speed_template_8, num_mb);
2912 test_mb_skcipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2913 speed_template_8, num_mb);
2914 test_mb_skcipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2915 speed_template_8, num_mb);
2916 test_mb_skcipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2917 speed_template_8, num_mb);
2918 test_mb_skcipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2919 speed_template_8, num_mb);
2920 break;
2921
2922 case 603:
2923 test_mb_skcipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2924 speed_template_16_32, num_mb);
2925 test_mb_skcipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2926 speed_template_16_32, num_mb);
2927 test_mb_skcipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2928 speed_template_16_32, num_mb);
2929 test_mb_skcipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2930 speed_template_16_32, num_mb);
2931 test_mb_skcipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2932 speed_template_16_32, num_mb);
2933 test_mb_skcipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2934 speed_template_16_32, num_mb);
2935 test_mb_skcipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2936 speed_template_32_48, num_mb);
2937 test_mb_skcipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2938 speed_template_32_48, num_mb);
2939 test_mb_skcipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2940 speed_template_32_64, num_mb);
2941 test_mb_skcipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2942 speed_template_32_64, num_mb);
2943 break;
2944
2945 case 604:
2946 test_mb_skcipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2947 speed_template_16_24_32, num_mb);
2948 test_mb_skcipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2949 speed_template_16_24_32, num_mb);
2950 test_mb_skcipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2951 speed_template_16_24_32, num_mb);
2952 test_mb_skcipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2953 speed_template_16_24_32, num_mb);
2954 test_mb_skcipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2955 speed_template_16_24_32, num_mb);
2956 test_mb_skcipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2957 speed_template_16_24_32, num_mb);
2958 test_mb_skcipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2959 speed_template_32_40_48, num_mb);
2960 test_mb_skcipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2961 speed_template_32_40_48, num_mb);
2962 test_mb_skcipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2963 speed_template_32_48_64, num_mb);
2964 test_mb_skcipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2965 speed_template_32_48_64, num_mb);
2966 break;
2967
2968 case 605:
2969 test_mb_skcipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2970 speed_template_8, num_mb);
2971 break;
2972
2973 case 606:
2974 test_mb_skcipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2975 speed_template_8_16, num_mb);
2976 test_mb_skcipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2977 speed_template_8_16, num_mb);
2978 test_mb_skcipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2979 speed_template_8_16, num_mb);
2980 test_mb_skcipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2981 speed_template_8_16, num_mb);
2982 test_mb_skcipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2983 speed_template_8_16, num_mb);
2984 test_mb_skcipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2985 speed_template_8_16, num_mb);
2986 break;
2987
2988 case 607:
2989 test_mb_skcipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2990 speed_template_16_32, num_mb);
2991 test_mb_skcipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2992 speed_template_16_32, num_mb);
2993 test_mb_skcipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2994 speed_template_16_32, num_mb);
2995 test_mb_skcipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2996 speed_template_16_32, num_mb);
2997 test_mb_skcipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2998 speed_template_16_32, num_mb);
2999 test_mb_skcipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
3000 speed_template_16_32, num_mb);
3001 test_mb_skcipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
3002 speed_template_32_48, num_mb);
3003 test_mb_skcipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
3004 speed_template_32_48, num_mb);
3005 test_mb_skcipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
3006 speed_template_32_64, num_mb);
3007 test_mb_skcipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
3008 speed_template_32_64, num_mb);
3009 break;
3010
3011 case 608:
3012 test_mb_skcipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
3013 speed_template_16_32, num_mb);
3014 test_mb_skcipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
3015 speed_template_16_32, num_mb);
3016 test_mb_skcipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
3017 speed_template_16_32, num_mb);
3018 test_mb_skcipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
3019 speed_template_16_32, num_mb);
3020 test_mb_skcipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
3021 speed_template_16_32, num_mb);
3022 test_mb_skcipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
3023 speed_template_16_32, num_mb);
3024 test_mb_skcipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
3025 speed_template_32_48, num_mb);
3026 test_mb_skcipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
3027 speed_template_32_48, num_mb);
3028 test_mb_skcipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
3029 speed_template_32_64, num_mb);
3030 test_mb_skcipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
3031 speed_template_32_64, num_mb);
3032 break;
3033
3034 case 609:
3035 test_mb_skcipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
3036 speed_template_8_32, num_mb);
3037 test_mb_skcipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
3038 speed_template_8_32, num_mb);
3039 test_mb_skcipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
3040 speed_template_8_32, num_mb);
3041 test_mb_skcipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
3042 speed_template_8_32, num_mb);
3043 test_mb_skcipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
3044 speed_template_8_32, num_mb);
3045 test_mb_skcipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
3046 speed_template_8_32, num_mb);
3047 break;
3048
Linus Torvalds1da177e2005-04-16 15:20:36 -07003049 case 1000:
3050 test_available();
3051 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003052 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10003053
3054 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003055}
3056
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003057static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07003058{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003059 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08003060 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003061
Herbert Xuf139cfa2008-07-31 12:23:53 +08003062 for (i = 0; i < TVMEMSIZE; i++) {
3063 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
3064 if (!tvmem[i])
3065 goto err_free_tv;
3066 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003067
Kees Cook4e234ee2018-04-26 19:57:28 -07003068 err = do_test(alg, type, mask, mode, num_mb);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003069
Jarod Wilson4e033a62009-05-27 15:10:21 +10003070 if (err) {
3071 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
3072 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01003073 } else {
3074 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10003075 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003076
Jarod Wilson4e033a62009-05-27 15:10:21 +10003077 /* We intentionaly return -EAGAIN to prevent keeping the module,
3078 * unless we're running in fips mode. It does all its work from
3079 * init() and doesn't offer any runtime functionality, but in
3080 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10003081 * => we don't need it in the memory, do we?
3082 * -- mludvig
3083 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10003084 if (!fips_enabled)
3085 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003086
Herbert Xuf139cfa2008-07-31 12:23:53 +08003087err_free_tv:
3088 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
3089 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08003090
3091 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07003092}
3093
3094/*
3095 * If an init function is provided, an exit function must also be provided
3096 * to allow module unload.
3097 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003098static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07003099
Ard Biesheuvel08a7e332020-11-20 12:04:31 +01003100late_initcall(tcrypt_mod_init);
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08003101module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003102
Steffen Klasserta873a5f2009-06-19 19:46:53 +08003103module_param(alg, charp, 0);
3104module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08003105module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07003106module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07003107module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07003108MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
3109 "(defaults to zero which uses CPU cycles instead)");
Gilad Ben-Yossef8fcdc862017-12-17 08:29:02 +00003110module_param(num_mb, uint, 0000);
3111MODULE_PARM_DESC(num_mb, "Number of concurrent requests to be used in mb speed tests (defaults to 8)");
Herbert Xuba974ad2020-08-05 15:57:08 +10003112module_param(klen, uint, 0);
3113MODULE_PARM_DESC(klen, "Key length (defaults to 0)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07003114
3115MODULE_LICENSE("GPL");
3116MODULE_DESCRIPTION("Quick & dirty crypto testing module");
3117MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");