blob: a0c4e0dbffa4f7949fc465e4bfb2b1325e1dab33 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Rabin Vincent76512f22017-01-18 14:54:05 +010025#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
26
Herbert Xu1ce5a042015-04-22 15:06:30 +080027#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080028#include <crypto/hash.h>
Herbert Xu7166e582016-06-29 18:03:50 +080029#include <crypto/skcipher.h>
Herbert Xucba83562006-08-13 08:26:09 +100030#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080031#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070032#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090033#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100035#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070036#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070037#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070038#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070039#include <linux/timex.h>
40#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070041#include "tcrypt.h"
42
43/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080044 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070045 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080046#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070047
48/*
Herbert Xuda7f0332008-07-31 17:08:25 +080049* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070050*/
51#define ENCRYPT 1
52#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070053
Horia Geant?f074f7b2015-08-27 18:38:36 +030054#define MAX_DIGEST_SIZE 64
55
Harald Welteebfd9bc2005-06-22 13:27:23 -070056/*
Luca Clementi263a8df2014-06-25 22:57:42 -070057 * return a string with the driver name
58 */
59#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
60
61/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070062 * Used by test_cipher_speed()
63 */
Herbert Xu6a179442005-06-22 13:29:03 -070064static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070065
Steffen Klasserta873a5f2009-06-19 19:46:53 +080066static char *alg = NULL;
67static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080068static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070069static int mode;
Herbert Xuf139cfa2008-07-31 12:23:53 +080070static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070071
72static char *check[] = {
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +030073 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", "sm3",
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080074 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
75 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110076 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080077 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +053078 "lzo", "cts", "zlib", "sha3-224", "sha3-256", "sha3-384", "sha3-512",
79 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070080};
81
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +053082static inline int do_one_aead_op(struct aead_request *req, int ret)
83{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +010084 struct crypto_wait *wait = req->base.data;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +053085
Gilad Ben-Yossef64671042017-10-18 08:00:48 +010086 return crypto_wait_req(ret, wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +053087}
88
Tim Chen53f52d72013-12-11 14:28:47 -080089static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -070090 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -080091{
92 unsigned long start, end;
93 int bcount;
94 int ret;
95
Mark Rustad3e3dc252014-07-25 02:53:38 -070096 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -080097 time_before(jiffies, end); bcount++) {
98 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +053099 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800100 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530101 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800102
103 if (ret)
104 return ret;
105 }
106
107 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700108 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800109 return 0;
110}
111
112static int test_aead_cycles(struct aead_request *req, int enc, int blen)
113{
114 unsigned long cycles = 0;
115 int ret = 0;
116 int i;
117
Tim Chen53f52d72013-12-11 14:28:47 -0800118 /* Warm-up run. */
119 for (i = 0; i < 4; i++) {
120 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530121 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800122 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530123 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800124
125 if (ret)
126 goto out;
127 }
128
129 /* The real thing. */
130 for (i = 0; i < 8; i++) {
131 cycles_t start, end;
132
133 start = get_cycles();
134 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530135 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800136 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530137 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800138 end = get_cycles();
139
140 if (ret)
141 goto out;
142
143 cycles += end - start;
144 }
145
146out:
Tim Chen53f52d72013-12-11 14:28:47 -0800147 if (ret == 0)
148 printk("1 operation in %lu cycles (%d bytes)\n",
149 (cycles + 4) / 8, blen);
150
151 return ret;
152}
153
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800154static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
Tim Chen53f52d72013-12-11 14:28:47 -0800155static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
156
157#define XBUFSIZE 8
158#define MAX_IVLEN 32
159
160static int testmgr_alloc_buf(char *buf[XBUFSIZE])
161{
162 int i;
163
164 for (i = 0; i < XBUFSIZE; i++) {
165 buf[i] = (void *)__get_free_page(GFP_KERNEL);
166 if (!buf[i])
167 goto err_free_buf;
168 }
169
170 return 0;
171
172err_free_buf:
173 while (i-- > 0)
174 free_page((unsigned long)buf[i]);
175
176 return -ENOMEM;
177}
178
179static void testmgr_free_buf(char *buf[XBUFSIZE])
180{
181 int i;
182
183 for (i = 0; i < XBUFSIZE; i++)
184 free_page((unsigned long)buf[i]);
185}
186
187static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200188 unsigned int buflen, const void *assoc,
189 unsigned int aad_size)
Tim Chen53f52d72013-12-11 14:28:47 -0800190{
191 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
192 int k, rem;
193
Tim Chen53f52d72013-12-11 14:28:47 -0800194 if (np > XBUFSIZE) {
195 rem = PAGE_SIZE;
196 np = XBUFSIZE;
Cristian Stoicac4768992015-01-27 11:54:27 +0200197 } else {
198 rem = buflen % PAGE_SIZE;
Tim Chen53f52d72013-12-11 14:28:47 -0800199 }
Cristian Stoicac4768992015-01-27 11:54:27 +0200200
Herbert Xu31267272015-06-17 14:05:26 +0800201 sg_init_table(sg, np + 1);
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200202
203 sg_set_buf(&sg[0], assoc, aad_size);
204
Robert Baronescu5c6ac1d2017-10-10 13:21:59 +0300205 if (rem)
206 np--;
Cristian Stoicac4768992015-01-27 11:54:27 +0200207 for (k = 0; k < np; k++)
Herbert Xu31267272015-06-17 14:05:26 +0800208 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
Cristian Stoicac4768992015-01-27 11:54:27 +0200209
Robert Baronescu5c6ac1d2017-10-10 13:21:59 +0300210 if (rem)
211 sg_set_buf(&sg[k + 1], xbuf[k], rem);
Tim Chen53f52d72013-12-11 14:28:47 -0800212}
213
Mark Rustad3e3dc252014-07-25 02:53:38 -0700214static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800215 struct aead_speed_template *template,
216 unsigned int tcount, u8 authsize,
217 unsigned int aad_size, u8 *keysize)
218{
219 unsigned int i, j;
220 struct crypto_aead *tfm;
221 int ret = -ENOMEM;
222 const char *key;
223 struct aead_request *req;
224 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800225 struct scatterlist *sgout;
226 const char *e;
227 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200228 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800229 char *xbuf[XBUFSIZE];
230 char *xoutbuf[XBUFSIZE];
231 char *axbuf[XBUFSIZE];
232 unsigned int *b_size;
233 unsigned int iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100234 struct crypto_wait wait;
Tim Chen53f52d72013-12-11 14:28:47 -0800235
Cristian Stoica96692a732015-01-28 13:07:32 +0200236 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
237 if (!iv)
238 return;
239
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200240 if (aad_size >= PAGE_SIZE) {
241 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200242 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200243 }
244
Tim Chen53f52d72013-12-11 14:28:47 -0800245 if (enc == ENCRYPT)
246 e = "encryption";
247 else
248 e = "decryption";
249
250 if (testmgr_alloc_buf(xbuf))
251 goto out_noxbuf;
252 if (testmgr_alloc_buf(axbuf))
253 goto out_noaxbuf;
254 if (testmgr_alloc_buf(xoutbuf))
255 goto out_nooutbuf;
256
Herbert Xua3f21852015-05-27 16:03:51 +0800257 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800258 if (!sg)
259 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800260 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800261
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800262 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800263
264 if (IS_ERR(tfm)) {
265 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
266 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200267 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800268 }
269
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100270 crypto_init_wait(&wait);
Luca Clementi263a8df2014-06-25 22:57:42 -0700271 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
272 get_driver_name(crypto_aead, tfm), e);
273
Tim Chen53f52d72013-12-11 14:28:47 -0800274 req = aead_request_alloc(tfm, GFP_KERNEL);
275 if (!req) {
276 pr_err("alg: aead: Failed to allocate request for %s\n",
277 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200278 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800279 }
280
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530281 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100282 crypto_req_done, &wait);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530283
Tim Chen53f52d72013-12-11 14:28:47 -0800284 i = 0;
285 do {
286 b_size = aead_sizes;
287 do {
288 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200289 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800290
291 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
292 pr_err("template (%u) too big for tvmem (%lu)\n",
293 *keysize + *b_size,
294 TVMEMSIZE * PAGE_SIZE);
295 goto out;
296 }
297
298 key = tvmem[0];
299 for (j = 0; j < tcount; j++) {
300 if (template[j].klen == *keysize) {
301 key = template[j].key;
302 break;
303 }
304 }
305 ret = crypto_aead_setkey(tfm, key, *keysize);
306 ret = crypto_aead_setauthsize(tfm, authsize);
307
308 iv_len = crypto_aead_ivsize(tfm);
309 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200310 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800311
312 crypto_aead_clear_flags(tfm, ~0);
313 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
314 i, *keysize * 8, *b_size);
315
316
317 memset(tvmem[0], 0xff, PAGE_SIZE);
318
319 if (ret) {
320 pr_err("setkey() failed flags=%x\n",
321 crypto_aead_get_flags(tfm));
322 goto out;
323 }
324
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200325 sg_init_aead(sg, xbuf, *b_size + (enc ? 0 : authsize),
326 assoc, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800327
Herbert Xu31267272015-06-17 14:05:26 +0800328 sg_init_aead(sgout, xoutbuf,
Tudor-Dan Ambarus5601e012017-11-14 16:59:15 +0200329 *b_size + (enc ? authsize : 0), assoc,
330 aad_size);
Herbert Xu31267272015-06-17 14:05:26 +0800331
Robert Baronescu7aacbfc2017-10-10 13:22:00 +0300332 aead_request_set_crypt(req, sg, sgout,
333 *b_size + (enc ? 0 : authsize),
334 iv);
Herbert Xua3f21852015-05-27 16:03:51 +0800335 aead_request_set_ad(req, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800336
Mark Rustad3e3dc252014-07-25 02:53:38 -0700337 if (secs)
338 ret = test_aead_jiffies(req, enc, *b_size,
339 secs);
Tim Chen53f52d72013-12-11 14:28:47 -0800340 else
341 ret = test_aead_cycles(req, enc, *b_size);
342
343 if (ret) {
344 pr_err("%s() failed return code=%d\n", e, ret);
345 break;
346 }
347 b_size++;
348 i++;
349 } while (*b_size);
350 keysize++;
351 } while (*keysize);
352
353out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200354 aead_request_free(req);
355out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800356 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200357out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800358 kfree(sg);
359out_nosg:
360 testmgr_free_buf(xoutbuf);
361out_nooutbuf:
362 testmgr_free_buf(axbuf);
363out_noaxbuf:
364 testmgr_free_buf(xbuf);
365out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200366 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800367}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800368
David S. Millerbeb63da72010-05-19 14:11:21 +1000369static void test_hash_sg_init(struct scatterlist *sg)
370{
371 int i;
372
373 sg_init_table(sg, TVMEMSIZE);
374 for (i = 0; i < TVMEMSIZE; i++) {
375 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
376 memset(tvmem[i], 0xff, PAGE_SIZE);
377 }
378}
379
David S. Millerbeb63da72010-05-19 14:11:21 +1000380static inline int do_one_ahash_op(struct ahash_request *req, int ret)
381{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100382 struct crypto_wait *wait = req->base.data;
David S. Millerbeb63da72010-05-19 14:11:21 +1000383
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100384 return crypto_wait_req(ret, wait);
David S. Millerbeb63da72010-05-19 14:11:21 +1000385}
386
Herbert Xu72259de2016-06-28 20:33:52 +0800387struct test_mb_ahash_data {
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000388 struct scatterlist sg[XBUFSIZE];
Herbert Xu72259de2016-06-28 20:33:52 +0800389 char result[64];
390 struct ahash_request *req;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100391 struct crypto_wait wait;
Herbert Xu72259de2016-06-28 20:33:52 +0800392 char *xbuf[XBUFSIZE];
393};
Megha Dey087bcd22016-06-23 18:40:47 -0700394
395static void test_mb_ahash_speed(const char *algo, unsigned int sec,
Herbert Xu72259de2016-06-28 20:33:52 +0800396 struct hash_speed *speed)
Megha Dey087bcd22016-06-23 18:40:47 -0700397{
Herbert Xu72259de2016-06-28 20:33:52 +0800398 struct test_mb_ahash_data *data;
Megha Dey087bcd22016-06-23 18:40:47 -0700399 struct crypto_ahash *tfm;
Herbert Xu72259de2016-06-28 20:33:52 +0800400 unsigned long start, end;
Herbert Xuf8de55b2016-06-28 16:41:38 +0800401 unsigned long cycles;
Herbert Xu72259de2016-06-28 20:33:52 +0800402 unsigned int i, j, k;
403 int ret;
404
405 data = kzalloc(sizeof(*data) * 8, GFP_KERNEL);
406 if (!data)
407 return;
Megha Dey087bcd22016-06-23 18:40:47 -0700408
409 tfm = crypto_alloc_ahash(algo, 0, 0);
410 if (IS_ERR(tfm)) {
411 pr_err("failed to load transform for %s: %ld\n",
412 algo, PTR_ERR(tfm));
Herbert Xu72259de2016-06-28 20:33:52 +0800413 goto free_data;
Megha Dey087bcd22016-06-23 18:40:47 -0700414 }
Herbert Xu72259de2016-06-28 20:33:52 +0800415
Megha Dey087bcd22016-06-23 18:40:47 -0700416 for (i = 0; i < 8; ++i) {
Herbert Xu72259de2016-06-28 20:33:52 +0800417 if (testmgr_alloc_buf(data[i].xbuf))
418 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700419
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100420 crypto_init_wait(&data[i].wait);
Megha Dey087bcd22016-06-23 18:40:47 -0700421
Herbert Xu72259de2016-06-28 20:33:52 +0800422 data[i].req = ahash_request_alloc(tfm, GFP_KERNEL);
423 if (!data[i].req) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200424 pr_err("alg: hash: Failed to allocate request for %s\n",
425 algo);
Herbert Xu72259de2016-06-28 20:33:52 +0800426 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700427 }
Megha Dey087bcd22016-06-23 18:40:47 -0700428
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100429 ahash_request_set_callback(data[i].req, 0, crypto_req_done,
430 &data[i].wait);
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000431
432 sg_init_table(data[i].sg, XBUFSIZE);
433 for (j = 0; j < XBUFSIZE; j++) {
434 sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
435 memset(data[i].xbuf[j], 0xff, PAGE_SIZE);
436 }
Megha Dey087bcd22016-06-23 18:40:47 -0700437 }
438
Herbert Xu72259de2016-06-28 20:33:52 +0800439 pr_info("\ntesting speed of multibuffer %s (%s)\n", algo,
440 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700441
442 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xu72259de2016-06-28 20:33:52 +0800443 /* For some reason this only tests digests. */
444 if (speed[i].blen != speed[i].plen)
445 continue;
446
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000447 if (speed[i].blen > XBUFSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200448 pr_err("template (%u) too big for tvmem (%lu)\n",
Gilad Ben-Yossef7c3f1322017-12-17 08:29:00 +0000449 speed[i].blen, XBUFSIZE * PAGE_SIZE);
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200450 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700451 }
452
453 if (speed[i].klen)
454 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
455
Herbert Xu72259de2016-06-28 20:33:52 +0800456 for (k = 0; k < 8; k++)
457 ahash_request_set_crypt(data[k].req, data[k].sg,
458 data[k].result, speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700459
Herbert Xu72259de2016-06-28 20:33:52 +0800460 pr_info("test%3u "
461 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700462 i, speed[i].blen, speed[i].plen,
463 speed[i].blen / speed[i].plen);
464
Herbert Xu72259de2016-06-28 20:33:52 +0800465 start = get_cycles();
466
467 for (k = 0; k < 8; k++) {
468 ret = crypto_ahash_digest(data[k].req);
Herbert Xud13cd112016-06-30 11:00:13 +0800469 if (ret == -EINPROGRESS) {
470 ret = 0;
Megha Dey087bcd22016-06-23 18:40:47 -0700471 continue;
Herbert Xud13cd112016-06-30 11:00:13 +0800472 }
Megha Dey087bcd22016-06-23 18:40:47 -0700473
Megha Dey087bcd22016-06-23 18:40:47 -0700474 if (ret)
Herbert Xu72259de2016-06-28 20:33:52 +0800475 break;
476
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100477 crypto_req_done(&data[k].req->base, 0);
Megha Dey087bcd22016-06-23 18:40:47 -0700478 }
479
Herbert Xu72259de2016-06-28 20:33:52 +0800480 for (j = 0; j < k; j++) {
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100481 struct crypto_wait *wait = &data[j].wait;
482 int wait_ret;
Herbert Xu72259de2016-06-28 20:33:52 +0800483
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100484 wait_ret = crypto_wait_req(-EINPROGRESS, wait);
485 if (wait_ret)
486 ret = wait_ret;
Herbert Xu72259de2016-06-28 20:33:52 +0800487 }
488
489 end = get_cycles();
490 cycles = end - start;
491 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
492 cycles, cycles / (8 * speed[i].blen));
493
494 if (ret) {
495 pr_err("At least one hashing failed ret=%d\n", ret);
496 break;
497 }
Megha Dey087bcd22016-06-23 18:40:47 -0700498 }
Megha Dey087bcd22016-06-23 18:40:47 -0700499
500out:
501 for (k = 0; k < 8; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800502 ahash_request_free(data[k].req);
503
Megha Dey087bcd22016-06-23 18:40:47 -0700504 for (k = 0; k < 8; ++k)
Herbert Xu72259de2016-06-28 20:33:52 +0800505 testmgr_free_buf(data[k].xbuf);
506
507 crypto_free_ahash(tfm);
508
509free_data:
510 kfree(data);
Megha Dey087bcd22016-06-23 18:40:47 -0700511}
512
David S. Millerbeb63da72010-05-19 14:11:21 +1000513static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700514 char *out, int secs)
David S. Millerbeb63da72010-05-19 14:11:21 +1000515{
516 unsigned long start, end;
517 int bcount;
518 int ret;
519
Mark Rustad3e3dc252014-07-25 02:53:38 -0700520 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da72010-05-19 14:11:21 +1000521 time_before(jiffies, end); bcount++) {
522 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
523 if (ret)
524 return ret;
525 }
526
527 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700528 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da72010-05-19 14:11:21 +1000529
530 return 0;
531}
532
533static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700534 int plen, char *out, int secs)
David S. Millerbeb63da72010-05-19 14:11:21 +1000535{
536 unsigned long start, end;
537 int bcount, pcount;
538 int ret;
539
540 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700541 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da72010-05-19 14:11:21 +1000542
Mark Rustad3e3dc252014-07-25 02:53:38 -0700543 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da72010-05-19 14:11:21 +1000544 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800545 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da72010-05-19 14:11:21 +1000546 if (ret)
547 return ret;
548 for (pcount = 0; pcount < blen; pcount += plen) {
549 ret = do_one_ahash_op(req, crypto_ahash_update(req));
550 if (ret)
551 return ret;
552 }
553 /* we assume there is enough space in 'out' for the result */
554 ret = do_one_ahash_op(req, crypto_ahash_final(req));
555 if (ret)
556 return ret;
557 }
558
559 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700560 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da72010-05-19 14:11:21 +1000561
562 return 0;
563}
564
565static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
566 char *out)
567{
568 unsigned long cycles = 0;
569 int ret, i;
570
571 /* Warm-up run. */
572 for (i = 0; i < 4; i++) {
573 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
574 if (ret)
575 goto out;
576 }
577
578 /* The real thing. */
579 for (i = 0; i < 8; i++) {
580 cycles_t start, end;
581
582 start = get_cycles();
583
584 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
585 if (ret)
586 goto out;
587
588 end = get_cycles();
589
590 cycles += end - start;
591 }
592
593out:
594 if (ret)
595 return ret;
596
597 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
598 cycles / 8, cycles / (8 * blen));
599
600 return 0;
601}
602
603static int test_ahash_cycles(struct ahash_request *req, int blen,
604 int plen, char *out)
605{
606 unsigned long cycles = 0;
607 int i, pcount, ret;
608
609 if (plen == blen)
610 return test_ahash_cycles_digest(req, blen, out);
611
612 /* Warm-up run. */
613 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800614 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da72010-05-19 14:11:21 +1000615 if (ret)
616 goto out;
617 for (pcount = 0; pcount < blen; pcount += plen) {
618 ret = do_one_ahash_op(req, crypto_ahash_update(req));
619 if (ret)
620 goto out;
621 }
622 ret = do_one_ahash_op(req, crypto_ahash_final(req));
623 if (ret)
624 goto out;
625 }
626
627 /* The real thing. */
628 for (i = 0; i < 8; i++) {
629 cycles_t start, end;
630
631 start = get_cycles();
632
Herbert Xu43a96072015-04-22 11:02:27 +0800633 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da72010-05-19 14:11:21 +1000634 if (ret)
635 goto out;
636 for (pcount = 0; pcount < blen; pcount += plen) {
637 ret = do_one_ahash_op(req, crypto_ahash_update(req));
638 if (ret)
639 goto out;
640 }
641 ret = do_one_ahash_op(req, crypto_ahash_final(req));
642 if (ret)
643 goto out;
644
645 end = get_cycles();
646
647 cycles += end - start;
648 }
649
650out:
651 if (ret)
652 return ret;
653
654 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
655 cycles / 8, cycles / (8 * blen));
656
657 return 0;
658}
659
Herbert Xu06605112016-02-01 21:36:49 +0800660static void test_ahash_speed_common(const char *algo, unsigned int secs,
661 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da72010-05-19 14:11:21 +1000662{
663 struct scatterlist sg[TVMEMSIZE];
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100664 struct crypto_wait wait;
David S. Millerbeb63da72010-05-19 14:11:21 +1000665 struct ahash_request *req;
666 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +0300667 char *output;
David S. Millerbeb63da72010-05-19 14:11:21 +1000668 int i, ret;
669
Herbert Xu06605112016-02-01 21:36:49 +0800670 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da72010-05-19 14:11:21 +1000671 if (IS_ERR(tfm)) {
672 pr_err("failed to load transform for %s: %ld\n",
673 algo, PTR_ERR(tfm));
674 return;
675 }
676
Luca Clementi263a8df2014-06-25 22:57:42 -0700677 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
678 get_driver_name(crypto_ahash, tfm));
679
Horia Geant?f074f7b2015-08-27 18:38:36 +0300680 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
681 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
682 MAX_DIGEST_SIZE);
David S. Millerbeb63da72010-05-19 14:11:21 +1000683 goto out;
684 }
685
686 test_hash_sg_init(sg);
687 req = ahash_request_alloc(tfm, GFP_KERNEL);
688 if (!req) {
689 pr_err("ahash request allocation failure\n");
690 goto out;
691 }
692
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100693 crypto_init_wait(&wait);
David S. Millerbeb63da72010-05-19 14:11:21 +1000694 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100695 crypto_req_done, &wait);
David S. Millerbeb63da72010-05-19 14:11:21 +1000696
Horia Geant?f074f7b2015-08-27 18:38:36 +0300697 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
698 if (!output)
699 goto out_nomem;
700
David S. Millerbeb63da72010-05-19 14:11:21 +1000701 for (i = 0; speed[i].blen != 0; i++) {
702 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
703 pr_err("template (%u) too big for tvmem (%lu)\n",
704 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
705 break;
706 }
707
708 pr_info("test%3u "
709 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
710 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
711
712 ahash_request_set_crypt(req, sg, output, speed[i].plen);
713
Mark Rustad3e3dc252014-07-25 02:53:38 -0700714 if (secs)
David S. Millerbeb63da72010-05-19 14:11:21 +1000715 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700716 speed[i].plen, output, secs);
David S. Millerbeb63da72010-05-19 14:11:21 +1000717 else
718 ret = test_ahash_cycles(req, speed[i].blen,
719 speed[i].plen, output);
720
721 if (ret) {
722 pr_err("hashing failed ret=%d\n", ret);
723 break;
724 }
725 }
726
Horia Geant?f074f7b2015-08-27 18:38:36 +0300727 kfree(output);
728
729out_nomem:
David S. Millerbeb63da72010-05-19 14:11:21 +1000730 ahash_request_free(req);
731
732out:
733 crypto_free_ahash(tfm);
734}
735
Herbert Xu06605112016-02-01 21:36:49 +0800736static void test_ahash_speed(const char *algo, unsigned int secs,
737 struct hash_speed *speed)
738{
739 return test_ahash_speed_common(algo, secs, speed, 0);
740}
741
742static void test_hash_speed(const char *algo, unsigned int secs,
743 struct hash_speed *speed)
744{
745 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
746}
747
Herbert Xu7166e582016-06-29 18:03:50 +0800748static inline int do_one_acipher_op(struct skcipher_request *req, int ret)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300749{
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100750 struct crypto_wait *wait = req->base.data;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300751
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100752 return crypto_wait_req(ret, wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300753}
754
Herbert Xu7166e582016-06-29 18:03:50 +0800755static int test_acipher_jiffies(struct skcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700756 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300757{
758 unsigned long start, end;
759 int bcount;
760 int ret;
761
Mark Rustad3e3dc252014-07-25 02:53:38 -0700762 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300763 time_before(jiffies, end); bcount++) {
764 if (enc)
765 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +0800766 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300767 else
768 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +0800769 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300770
771 if (ret)
772 return ret;
773 }
774
775 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700776 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300777 return 0;
778}
779
Herbert Xu7166e582016-06-29 18:03:50 +0800780static int test_acipher_cycles(struct skcipher_request *req, int enc,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300781 int blen)
782{
783 unsigned long cycles = 0;
784 int ret = 0;
785 int i;
786
787 /* Warm-up run. */
788 for (i = 0; i < 4; i++) {
789 if (enc)
790 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +0800791 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300792 else
793 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +0800794 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300795
796 if (ret)
797 goto out;
798 }
799
800 /* The real thing. */
801 for (i = 0; i < 8; i++) {
802 cycles_t start, end;
803
804 start = get_cycles();
805 if (enc)
806 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +0800807 crypto_skcipher_encrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300808 else
809 ret = do_one_acipher_op(req,
Herbert Xu7166e582016-06-29 18:03:50 +0800810 crypto_skcipher_decrypt(req));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300811 end = get_cycles();
812
813 if (ret)
814 goto out;
815
816 cycles += end - start;
817 }
818
819out:
820 if (ret == 0)
821 pr_cont("1 operation in %lu cycles (%d bytes)\n",
822 (cycles + 4) / 8, blen);
823
824 return ret;
825}
826
Herbert Xu7166e582016-06-29 18:03:50 +0800827static void test_skcipher_speed(const char *algo, int enc, unsigned int secs,
828 struct cipher_speed_template *template,
829 unsigned int tcount, u8 *keysize, bool async)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300830{
Nicolas Royerde1975332012-07-01 19:19:47 +0200831 unsigned int ret, i, j, k, iv_len;
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100832 struct crypto_wait wait;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300833 const char *key;
834 char iv[128];
Herbert Xu7166e582016-06-29 18:03:50 +0800835 struct skcipher_request *req;
836 struct crypto_skcipher *tfm;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300837 const char *e;
838 u32 *b_size;
839
840 if (enc == ENCRYPT)
841 e = "encryption";
842 else
843 e = "decryption";
844
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100845 crypto_init_wait(&wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300846
Herbert Xu7166e582016-06-29 18:03:50 +0800847 tfm = crypto_alloc_skcipher(algo, 0, async ? 0 : CRYPTO_ALG_ASYNC);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300848
849 if (IS_ERR(tfm)) {
850 pr_err("failed to load transform for %s: %ld\n", algo,
851 PTR_ERR(tfm));
852 return;
853 }
854
Luca Clementi263a8df2014-06-25 22:57:42 -0700855 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
Herbert Xu7166e582016-06-29 18:03:50 +0800856 get_driver_name(crypto_skcipher, tfm), e);
Luca Clementi263a8df2014-06-25 22:57:42 -0700857
Herbert Xu7166e582016-06-29 18:03:50 +0800858 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300859 if (!req) {
860 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
861 algo);
862 goto out;
863 }
864
Herbert Xu7166e582016-06-29 18:03:50 +0800865 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
Gilad Ben-Yossef64671042017-10-18 08:00:48 +0100866 crypto_req_done, &wait);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300867
868 i = 0;
869 do {
870 b_size = block_sizes;
871
872 do {
873 struct scatterlist sg[TVMEMSIZE];
874
875 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
876 pr_err("template (%u) too big for "
877 "tvmem (%lu)\n", *keysize + *b_size,
878 TVMEMSIZE * PAGE_SIZE);
879 goto out_free_req;
880 }
881
882 pr_info("test %u (%d bit key, %d byte blocks): ", i,
883 *keysize * 8, *b_size);
884
885 memset(tvmem[0], 0xff, PAGE_SIZE);
886
887 /* set key, plain text and IV */
888 key = tvmem[0];
889 for (j = 0; j < tcount; j++) {
890 if (template[j].klen == *keysize) {
891 key = template[j].key;
892 break;
893 }
894 }
895
Herbert Xu7166e582016-06-29 18:03:50 +0800896 crypto_skcipher_clear_flags(tfm, ~0);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300897
Herbert Xu7166e582016-06-29 18:03:50 +0800898 ret = crypto_skcipher_setkey(tfm, key, *keysize);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300899 if (ret) {
900 pr_err("setkey() failed flags=%x\n",
Herbert Xu7166e582016-06-29 18:03:50 +0800901 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300902 goto out_free_req;
903 }
904
Nicolas Royerde1975332012-07-01 19:19:47 +0200905 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +0200906 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
907
Nicolas Royerde1975332012-07-01 19:19:47 +0200908 if (k > PAGE_SIZE) {
909 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300910 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +0200911 k -= PAGE_SIZE;
912 j = 1;
913 while (k > PAGE_SIZE) {
914 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
915 memset(tvmem[j], 0xff, PAGE_SIZE);
916 j++;
917 k -= PAGE_SIZE;
918 }
919 sg_set_buf(sg + j, tvmem[j], k);
920 memset(tvmem[j], 0xff, k);
921 } else {
922 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300923 }
924
Herbert Xu7166e582016-06-29 18:03:50 +0800925 iv_len = crypto_skcipher_ivsize(tfm);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300926 if (iv_len)
927 memset(&iv, 0xff, iv_len);
928
Herbert Xu7166e582016-06-29 18:03:50 +0800929 skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300930
Mark Rustad3e3dc252014-07-25 02:53:38 -0700931 if (secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300932 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700933 *b_size, secs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300934 else
935 ret = test_acipher_cycles(req, enc,
936 *b_size);
937
938 if (ret) {
939 pr_err("%s() failed flags=%x\n", e,
Herbert Xu7166e582016-06-29 18:03:50 +0800940 crypto_skcipher_get_flags(tfm));
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300941 break;
942 }
943 b_size++;
944 i++;
945 } while (*b_size);
946 keysize++;
947 } while (*keysize);
948
949out_free_req:
Herbert Xu7166e582016-06-29 18:03:50 +0800950 skcipher_request_free(req);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300951out:
Herbert Xu7166e582016-06-29 18:03:50 +0800952 crypto_free_skcipher(tfm);
953}
954
955static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
956 struct cipher_speed_template *template,
957 unsigned int tcount, u8 *keysize)
958{
959 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
960 true);
961}
962
963static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
964 struct cipher_speed_template *template,
965 unsigned int tcount, u8 *keysize)
966{
967 return test_skcipher_speed(algo, enc, secs, template, tcount, keysize,
968 false);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300969}
970
Herbert Xuef2736f2005-06-22 13:26:03 -0700971static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700972{
973 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -0700974
Linus Torvalds1da177e2005-04-16 15:20:36 -0700975 while (*name) {
976 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +1000977 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +1000978 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700979 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -0700980 }
Linus Torvalds1da177e2005-04-16 15:20:36 -0700981}
982
Herbert Xu01b32322008-07-31 15:41:55 +0800983static inline int tcrypt_test(const char *alg)
984{
Jarod Wilson4e033a62009-05-27 15:10:21 +1000985 int ret;
986
Rabin Vincent76512f22017-01-18 14:54:05 +0100987 pr_debug("testing %s\n", alg);
988
Jarod Wilson4e033a62009-05-27 15:10:21 +1000989 ret = alg_test(alg, alg, 0, 0);
990 /* non-fips algs return -EINVAL in fips mode */
991 if (fips_enabled && ret == -EINVAL)
992 ret = 0;
993 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +0800994}
995
Herbert Xu86068132014-12-04 16:43:29 +0800996static int do_test(const char *alg, u32 type, u32 mask, int m)
Herbert Xu01b32322008-07-31 15:41:55 +0800997{
998 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +1000999 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001000
1001 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001002 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001003 if (alg) {
1004 if (!crypto_has_alg(alg, type,
1005 mask ?: CRYPTO_ALG_TYPE_MASK))
1006 ret = -ENOENT;
1007 break;
1008 }
1009
Herbert Xu01b32322008-07-31 15:41:55 +08001010 for (i = 1; i < 200; i++)
Herbert Xu86068132014-12-04 16:43:29 +08001011 ret += do_test(NULL, 0, 0, i);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001012 break;
1013
1014 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001015 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001016 break;
1017
1018 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001019 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001020 break;
1021
1022 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001023 ret += tcrypt_test("ecb(des)");
1024 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001025 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001026 break;
1027
1028 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001029 ret += tcrypt_test("ecb(des3_ede)");
1030 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001031 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001032 break;
1033
1034 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001035 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001036 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001037
Linus Torvalds1da177e2005-04-16 15:20:36 -07001038 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001039 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001040 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001041
Linus Torvalds1da177e2005-04-16 15:20:36 -07001042 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001043 ret += tcrypt_test("ecb(blowfish)");
1044 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001045 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001046 break;
1047
1048 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001049 ret += tcrypt_test("ecb(twofish)");
1050 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001051 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001052 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001053 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001054 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001055
Linus Torvalds1da177e2005-04-16 15:20:36 -07001056 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001057 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001058 ret += tcrypt_test("cbc(serpent)");
1059 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001060 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001061 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001062 break;
1063
1064 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001065 ret += tcrypt_test("ecb(aes)");
1066 ret += tcrypt_test("cbc(aes)");
1067 ret += tcrypt_test("lrw(aes)");
1068 ret += tcrypt_test("xts(aes)");
1069 ret += tcrypt_test("ctr(aes)");
1070 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001071 break;
1072
1073 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001074 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001075 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001076
Linus Torvalds1da177e2005-04-16 15:20:36 -07001077 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001078 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001079 break;
1080
1081 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001082 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001083 break;
1084
1085 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001086 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001087 ret += tcrypt_test("cbc(cast5)");
1088 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001089 break;
1090
1091 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001092 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001093 ret += tcrypt_test("cbc(cast6)");
1094 ret += tcrypt_test("ctr(cast6)");
1095 ret += tcrypt_test("lrw(cast6)");
1096 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001097 break;
1098
1099 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001100 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001101 break;
1102
1103 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001104 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001105 break;
1106
1107 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001108 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001109 break;
1110
1111 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001112 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001113 break;
1114
1115 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001116 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001117 break;
1118
1119 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001120 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001121 break;
1122
1123 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001124 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001125 break;
1126
1127 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001128 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001129 break;
1130
1131 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001132 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001133 break;
1134
1135 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001136 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001137 break;
1138
1139 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001140 ret += tcrypt_test("ecb(anubis)");
1141 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001142 break;
1143
1144 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001145 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001146 break;
1147
1148 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001149 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001150 break;
1151
1152 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001153 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001154 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001155
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001156 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001157 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001158 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001159
David Howells90831632006-12-16 12:13:14 +11001160 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001161 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001162 break;
1163
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001164 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001165 ret += tcrypt_test("ecb(camellia)");
1166 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001167 ret += tcrypt_test("ctr(camellia)");
1168 ret += tcrypt_test("lrw(camellia)");
1169 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001170 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001171
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001172 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001173 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001174 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001175
Tan Swee Heng2407d602007-11-23 19:45:00 +08001176 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001177 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001178 break;
1179
Herbert Xu8df213d2007-12-02 14:55:47 +11001180 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001181 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001182 break;
1183
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001184 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001185 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001186 break;
1187
Joy Latten93cc74e2007-12-12 20:24:22 +08001188 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001189 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001190 break;
1191
Kevin Coffman76cb9522008-03-24 21:26:16 +08001192 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001193 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001194 break;
1195
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001196 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001197 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001198 break;
1199
1200 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001201 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001202 break;
1203
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001204 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001205 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001206 break;
1207
1208 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001209 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001210 break;
1211
1212 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001213 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001214 break;
1215
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001216 case 44:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001217 ret += tcrypt_test("zlib");
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001218 break;
1219
Jarod Wilson5d667322009-05-04 19:23:40 +08001220 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001221 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001222 break;
1223
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001224 case 46:
1225 ret += tcrypt_test("ghash");
1226 break;
1227
Herbert Xu684115212013-09-07 12:56:26 +10001228 case 47:
1229 ret += tcrypt_test("crct10dif");
1230 break;
1231
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301232 case 48:
1233 ret += tcrypt_test("sha3-224");
1234 break;
1235
1236 case 49:
1237 ret += tcrypt_test("sha3-256");
1238 break;
1239
1240 case 50:
1241 ret += tcrypt_test("sha3-384");
1242 break;
1243
1244 case 51:
1245 ret += tcrypt_test("sha3-512");
1246 break;
1247
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001248 case 52:
1249 ret += tcrypt_test("sm3");
1250 break;
1251
Linus Torvalds1da177e2005-04-16 15:20:36 -07001252 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001253 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001254 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001255
Linus Torvalds1da177e2005-04-16 15:20:36 -07001256 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001257 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001258 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001259
Linus Torvalds1da177e2005-04-16 15:20:36 -07001260 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001261 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001262 break;
1263
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001264 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001265 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001266 break;
1267
1268 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001269 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001270 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001271
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001272 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001273 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001274 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001275
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001276 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001277 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001278 break;
1279
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001280 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001281 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001282 break;
1283
1284 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001285 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001286 break;
1287
Shane Wangf1939f72009-09-02 20:05:22 +10001288 case 109:
1289 ret += tcrypt_test("vmac(aes)");
1290 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001291
Sonic Zhanga482b082012-05-25 17:54:13 +08001292 case 110:
1293 ret += tcrypt_test("hmac(crc32)");
1294 break;
Shane Wangf1939f72009-09-02 20:05:22 +10001295
raveendra padasalagi98eca722016-07-01 11:16:54 +05301296 case 111:
1297 ret += tcrypt_test("hmac(sha3-224)");
1298 break;
1299
1300 case 112:
1301 ret += tcrypt_test("hmac(sha3-256)");
1302 break;
1303
1304 case 113:
1305 ret += tcrypt_test("hmac(sha3-384)");
1306 break;
1307
1308 case 114:
1309 ret += tcrypt_test("hmac(sha3-512)");
1310 break;
1311
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001312 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001313 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001314 break;
1315
Adrian Hoban69435b92010-11-04 15:02:04 -04001316 case 151:
1317 ret += tcrypt_test("rfc4106(gcm(aes))");
1318 break;
1319
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001320 case 152:
1321 ret += tcrypt_test("rfc4543(gcm(aes))");
1322 break;
1323
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001324 case 153:
1325 ret += tcrypt_test("cmac(aes)");
1326 break;
1327
1328 case 154:
1329 ret += tcrypt_test("cmac(des3_ede)");
1330 break;
1331
Horia Geantabbf9c892013-11-28 15:11:16 +02001332 case 155:
1333 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1334 break;
1335
Horia Geantabca4feb2014-03-14 17:46:51 +02001336 case 156:
1337 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1338 break;
1339
1340 case 157:
1341 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
1342 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05301343 case 181:
1344 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
1345 break;
1346 case 182:
1347 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
1348 break;
1349 case 183:
1350 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
1351 break;
1352 case 184:
1353 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
1354 break;
1355 case 185:
1356 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
1357 break;
1358 case 186:
1359 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
1360 break;
1361 case 187:
1362 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
1363 break;
1364 case 188:
1365 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
1366 break;
1367 case 189:
1368 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
1369 break;
1370 case 190:
1371 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
1372 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07001373 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10001374 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001375 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001376 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001377 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001378 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001379 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001380 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001381 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11001382 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001383 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11001384 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001385 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08001386 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03001387 speed_template_32_64);
Rik Snelf19f5112007-09-19 20:23:13 +08001388 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03001389 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08001390 test_cipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
1391 speed_template_16_24_32);
1392 test_cipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
1393 speed_template_16_24_32);
Jan Glauber9996e342011-04-26 16:34:01 +10001394 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1395 speed_template_16_24_32);
1396 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1397 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001398 break;
1399
1400 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10001401 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001402 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001403 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001404 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001405 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001406 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001407 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001408 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001409 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001410 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001411 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001412 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03001413 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
1414 des3_speed_template, DES3_SPEED_VECTORS,
1415 speed_template_24);
1416 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
1417 des3_speed_template, DES3_SPEED_VECTORS,
1418 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001419 break;
1420
1421 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10001422 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001423 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001424 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001425 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001426 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001427 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001428 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001429 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03001430 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1431 speed_template_16_24_32);
1432 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1433 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001434 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1435 speed_template_32_40_48);
1436 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1437 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001438 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1439 speed_template_32_48_64);
1440 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1441 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001442 break;
1443
1444 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10001445 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001446 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001447 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001448 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001449 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001450 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001451 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001452 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03001453 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
1454 speed_template_8_32);
1455 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
1456 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001457 break;
1458
1459 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10001460 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001461 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001462 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001463 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001464 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001465 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001466 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001467 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001468 break;
1469
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001470 case 205:
1471 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001472 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001473 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001474 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001475 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001476 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001477 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001478 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02001479 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
1480 speed_template_16_24_32);
1481 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
1482 speed_template_16_24_32);
1483 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
1484 speed_template_32_40_48);
1485 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
1486 speed_template_32_40_48);
1487 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
1488 speed_template_32_48_64);
1489 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
1490 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001491 break;
1492
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001493 case 206:
1494 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001495 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001496 break;
1497
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001498 case 207:
1499 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1500 speed_template_16_32);
1501 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1502 speed_template_16_32);
1503 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1504 speed_template_16_32);
1505 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1506 speed_template_16_32);
1507 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1508 speed_template_16_32);
1509 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1510 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001511 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1512 speed_template_32_48);
1513 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1514 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001515 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1516 speed_template_32_64);
1517 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1518 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001519 break;
1520
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08001521 case 208:
1522 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1523 speed_template_8);
1524 break;
1525
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001526 case 209:
1527 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
1528 speed_template_8_16);
1529 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
1530 speed_template_8_16);
1531 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
1532 speed_template_8_16);
1533 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
1534 speed_template_8_16);
1535 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
1536 speed_template_8_16);
1537 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
1538 speed_template_8_16);
1539 break;
1540
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001541 case 210:
1542 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
1543 speed_template_16_32);
1544 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
1545 speed_template_16_32);
1546 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
1547 speed_template_16_32);
1548 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
1549 speed_template_16_32);
1550 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
1551 speed_template_16_32);
1552 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
1553 speed_template_16_32);
1554 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
1555 speed_template_32_48);
1556 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
1557 speed_template_32_48);
1558 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
1559 speed_template_32_64);
1560 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
1561 speed_template_32_64);
1562 break;
1563
Tim Chen53f52d72013-12-11 14:28:47 -08001564 case 211:
1565 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08001566 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05301567 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01001568 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08001569 break;
1570
Herbert Xu4e4aab62015-06-17 14:04:21 +08001571 case 212:
1572 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08001573 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08001574 break;
1575
Martin Willi2dce0632015-07-16 19:13:59 +02001576 case 213:
1577 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
1578 NULL, 0, 16, 8, aead_speed_template_36);
1579 break;
1580
1581 case 214:
1582 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
1583 speed_template_32);
1584 break;
1585
Michal Ludvige8057922006-05-30 22:04:19 +10001586 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08001587 if (alg) {
1588 test_hash_speed(alg, sec, generic_hash_speed_template);
1589 break;
1590 }
Michal Ludvige8057922006-05-30 22:04:19 +10001591 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001592 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10001593 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001594 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001595 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001596 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10001597 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001598 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001599 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001600 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10001601 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001602 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001603 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001604 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10001605 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001606 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001607 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001608 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10001609 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001610 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001611 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001612 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10001613 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001614 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001615 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001616 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10001617 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001618 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001619 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001620 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10001621 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001622 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001623 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001624 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10001625 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001626 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001627 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001628 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10001629 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001630 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001631 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001632 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10001633 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001634 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001635 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001636 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10001637 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001638 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001639 /* fall through */
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001640 case 313:
1641 test_hash_speed("sha224", sec, generic_hash_speed_template);
1642 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001643 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001644 case 314:
1645 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1646 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001647 /* fall through */
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001648 case 315:
1649 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1650 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001651 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001652 case 316:
1653 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1654 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001655 /* fall through */
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001656 case 317:
1657 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1658 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001659 /* fall through */
Huang Ying18bcc912010-03-10 18:30:32 +08001660 case 318:
1661 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1662 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001663 /* fall through */
Tim Chene3899e42012-09-27 15:44:24 -07001664 case 319:
1665 test_hash_speed("crc32c", sec, generic_hash_speed_template);
1666 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001667 /* fall through */
Herbert Xu684115212013-09-07 12:56:26 +10001668 case 320:
1669 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
1670 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001671 /* fall through */
Martin Willi2dce0632015-07-16 19:13:59 +02001672 case 321:
1673 test_hash_speed("poly1305", sec, poly1305_speed_template);
1674 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001675 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301676 case 322:
1677 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
1678 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001679 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301680 case 323:
1681 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
1682 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001683 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301684 case 324:
1685 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
1686 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001687 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301688 case 325:
1689 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
1690 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001691 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001692 case 326:
1693 test_hash_speed("sm3", sec, generic_hash_speed_template);
1694 if (mode > 300 && mode < 400) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001695 /* fall through */
Michal Ludvige8057922006-05-30 22:04:19 +10001696 case 399:
1697 break;
1698
David S. Millerbeb63da72010-05-19 14:11:21 +10001699 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08001700 if (alg) {
1701 test_ahash_speed(alg, sec, generic_hash_speed_template);
1702 break;
1703 }
David S. Millerbeb63da72010-05-19 14:11:21 +10001704 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001705 case 401:
1706 test_ahash_speed("md4", sec, generic_hash_speed_template);
1707 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001708 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001709 case 402:
1710 test_ahash_speed("md5", sec, generic_hash_speed_template);
1711 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001712 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001713 case 403:
1714 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1715 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001716 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001717 case 404:
1718 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1719 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001720 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001721 case 405:
1722 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1723 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001724 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001725 case 406:
1726 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1727 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001728 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001729 case 407:
1730 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1731 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001732 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001733 case 408:
1734 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1735 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001736 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001737 case 409:
1738 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1739 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001740 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001741 case 410:
1742 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1743 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001744 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001745 case 411:
1746 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1747 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001748 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001749 case 412:
1750 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1751 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001752 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001753 case 413:
1754 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1755 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001756 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001757 case 414:
1758 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1759 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001760 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001761 case 415:
1762 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1763 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001764 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001765 case 416:
1766 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1767 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001768 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001769 case 417:
1770 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1771 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001772 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301773 case 418:
1774 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
1775 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001776 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301777 case 419:
1778 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
1779 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001780 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301781 case 420:
1782 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
1783 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001784 /* fall through */
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301785 case 421:
1786 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
1787 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001788 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07001789 case 422:
1790 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template);
1791 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001792 /* fall through */
Megha Dey087bcd22016-06-23 18:40:47 -07001793 case 423:
1794 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template);
1795 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001796 /* fall through */
Megha Dey14009c42016-06-27 10:20:09 -07001797 case 424:
1798 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template);
1799 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001800 /* fall through */
Gilad Ben-Yossefb7e27532017-08-21 13:51:29 +03001801 case 425:
1802 test_mb_ahash_speed("sm3", sec, generic_hash_speed_template);
1803 if (mode > 400 && mode < 500) break;
Gustavo A. R. Silva59517222017-10-09 14:43:21 -05001804 /* fall through */
David S. Millerbeb63da72010-05-19 14:11:21 +10001805 case 499:
1806 break;
1807
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001808 case 500:
1809 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1810 speed_template_16_24_32);
1811 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1812 speed_template_16_24_32);
1813 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1814 speed_template_16_24_32);
1815 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1816 speed_template_16_24_32);
1817 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1818 speed_template_32_40_48);
1819 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1820 speed_template_32_40_48);
1821 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03001822 speed_template_32_64);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001823 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Horia Geantăb66ad0b2017-07-19 19:40:32 +03001824 speed_template_32_64);
Herbert Xu1503a242016-06-29 18:04:14 +08001825 test_acipher_speed("cts(cbc(aes))", ENCRYPT, sec, NULL, 0,
1826 speed_template_16_24_32);
1827 test_acipher_speed("cts(cbc(aes))", DECRYPT, sec, NULL, 0,
1828 speed_template_16_24_32);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001829 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1830 speed_template_16_24_32);
1831 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1832 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02001833 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
1834 speed_template_16_24_32);
1835 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
1836 speed_template_16_24_32);
1837 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
1838 speed_template_16_24_32);
1839 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
1840 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02001841 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
1842 speed_template_20_28_36);
1843 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
1844 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001845 break;
1846
1847 case 501:
1848 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1849 des3_speed_template, DES3_SPEED_VECTORS,
1850 speed_template_24);
1851 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
1852 des3_speed_template, DES3_SPEED_VECTORS,
1853 speed_template_24);
1854 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1855 des3_speed_template, DES3_SPEED_VECTORS,
1856 speed_template_24);
1857 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
1858 des3_speed_template, DES3_SPEED_VECTORS,
1859 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02001860 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
1861 des3_speed_template, DES3_SPEED_VECTORS,
1862 speed_template_24);
1863 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
1864 des3_speed_template, DES3_SPEED_VECTORS,
1865 speed_template_24);
1866 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
1867 des3_speed_template, DES3_SPEED_VECTORS,
1868 speed_template_24);
1869 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
1870 des3_speed_template, DES3_SPEED_VECTORS,
1871 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001872 break;
1873
1874 case 502:
1875 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1876 speed_template_8);
1877 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
1878 speed_template_8);
1879 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
1880 speed_template_8);
1881 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
1882 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02001883 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
1884 speed_template_8);
1885 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
1886 speed_template_8);
1887 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
1888 speed_template_8);
1889 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
1890 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001891 break;
1892
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001893 case 503:
1894 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1895 speed_template_16_32);
1896 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1897 speed_template_16_32);
1898 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1899 speed_template_16_32);
1900 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1901 speed_template_16_32);
1902 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1903 speed_template_16_32);
1904 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1905 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001906 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1907 speed_template_32_48);
1908 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1909 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001910 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1911 speed_template_32_64);
1912 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1913 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001914 break;
1915
Johannes Goetzfried107778b52012-05-28 15:54:24 +02001916 case 504:
1917 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
1918 speed_template_16_24_32);
1919 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
1920 speed_template_16_24_32);
1921 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
1922 speed_template_16_24_32);
1923 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
1924 speed_template_16_24_32);
1925 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1926 speed_template_16_24_32);
1927 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1928 speed_template_16_24_32);
1929 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1930 speed_template_32_40_48);
1931 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1932 speed_template_32_40_48);
1933 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1934 speed_template_32_48_64);
1935 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1936 speed_template_32_48_64);
1937 break;
1938
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08001939 case 505:
1940 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1941 speed_template_8);
1942 break;
1943
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001944 case 506:
1945 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
1946 speed_template_8_16);
1947 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
1948 speed_template_8_16);
1949 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
1950 speed_template_8_16);
1951 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
1952 speed_template_8_16);
1953 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
1954 speed_template_8_16);
1955 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
1956 speed_template_8_16);
1957 break;
1958
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001959 case 507:
1960 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
1961 speed_template_16_32);
1962 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
1963 speed_template_16_32);
1964 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
1965 speed_template_16_32);
1966 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
1967 speed_template_16_32);
1968 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
1969 speed_template_16_32);
1970 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
1971 speed_template_16_32);
1972 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
1973 speed_template_32_48);
1974 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
1975 speed_template_32_48);
1976 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
1977 speed_template_32_64);
1978 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
1979 speed_template_32_64);
1980 break;
1981
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03001982 case 508:
1983 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
1984 speed_template_16_32);
1985 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
1986 speed_template_16_32);
1987 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
1988 speed_template_16_32);
1989 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
1990 speed_template_16_32);
1991 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
1992 speed_template_16_32);
1993 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
1994 speed_template_16_32);
1995 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
1996 speed_template_32_48);
1997 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
1998 speed_template_32_48);
1999 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2000 speed_template_32_64);
2001 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2002 speed_template_32_64);
2003 break;
2004
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002005 case 509:
2006 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2007 speed_template_8_32);
2008 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2009 speed_template_8_32);
2010 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2011 speed_template_8_32);
2012 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2013 speed_template_8_32);
2014 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2015 speed_template_8_32);
2016 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2017 speed_template_8_32);
2018 break;
2019
Linus Torvalds1da177e2005-04-16 15:20:36 -07002020 case 1000:
2021 test_available();
2022 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002023 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10002024
2025 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002026}
2027
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002028static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07002029{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002030 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08002031 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002032
Herbert Xuf139cfa2008-07-31 12:23:53 +08002033 for (i = 0; i < TVMEMSIZE; i++) {
2034 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2035 if (!tvmem[i])
2036 goto err_free_tv;
2037 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002038
Herbert Xu86068132014-12-04 16:43:29 +08002039 err = do_test(alg, type, mask, mode);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002040
Jarod Wilson4e033a62009-05-27 15:10:21 +10002041 if (err) {
2042 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2043 goto err_free_tv;
Rabin Vincent76512f22017-01-18 14:54:05 +01002044 } else {
2045 pr_debug("all tests passed\n");
Jarod Wilson4e033a62009-05-27 15:10:21 +10002046 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002047
Jarod Wilson4e033a62009-05-27 15:10:21 +10002048 /* We intentionaly return -EAGAIN to prevent keeping the module,
2049 * unless we're running in fips mode. It does all its work from
2050 * init() and doesn't offer any runtime functionality, but in
2051 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002052 * => we don't need it in the memory, do we?
2053 * -- mludvig
2054 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002055 if (!fips_enabled)
2056 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002057
Herbert Xuf139cfa2008-07-31 12:23:53 +08002058err_free_tv:
2059 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2060 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002061
2062 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002063}
2064
2065/*
2066 * If an init function is provided, an exit function must also be provided
2067 * to allow module unload.
2068 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002069static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002070
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002071module_init(tcrypt_mod_init);
2072module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002073
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002074module_param(alg, charp, 0);
2075module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08002076module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002077module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002078module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07002079MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2080 "(defaults to zero which uses CPU cycles instead)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07002081
2082MODULE_LICENSE("GPL");
2083MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2084MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");