blob: 8893ba5321b5eb6a985ccefae063d7d59be71a00 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Herbert Xu1ce5a042015-04-22 15:06:30 +080025#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080026#include <crypto/hash.h>
Herbert Xucba83562006-08-13 08:26:09 +100027#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080028#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090030#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070031#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100032#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070033#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070035#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070036#include <linux/timex.h>
37#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070038#include "tcrypt.h"
39
40/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080041 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070042 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080043#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070044
45/*
Herbert Xuda7f0332008-07-31 17:08:25 +080046* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070047*/
48#define ENCRYPT 1
49#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070050
Horia Geant?f074f7b2015-08-27 18:38:36 +030051#define MAX_DIGEST_SIZE 64
52
Harald Welteebfd9bc2005-06-22 13:27:23 -070053/*
Luca Clementi263a8df2014-06-25 22:57:42 -070054 * return a string with the driver name
55 */
56#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
57
58/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070059 * Used by test_cipher_speed()
60 */
Herbert Xu6a179442005-06-22 13:29:03 -070061static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070062
Steffen Klasserta873a5f2009-06-19 19:46:53 +080063static char *alg = NULL;
64static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080065static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070066static int mode;
Herbert Xuf139cfa2008-07-31 12:23:53 +080067static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070068
69static char *check[] = {
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080070 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
71 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
72 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110073 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080074 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +053075 "lzo", "cts", "zlib", "sha3-224", "sha3-256", "sha3-384", "sha3-512",
76 NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070077};
78
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +053079struct tcrypt_result {
80 struct completion completion;
81 int err;
82};
83
84static void tcrypt_complete(struct crypto_async_request *req, int err)
85{
86 struct tcrypt_result *res = req->data;
87
88 if (err == -EINPROGRESS)
89 return;
90
91 res->err = err;
92 complete(&res->completion);
93}
94
Herbert Xuf139cfa2008-07-31 12:23:53 +080095static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -070096 struct scatterlist *sg, int blen, int secs)
Herbert Xu6a179442005-06-22 13:29:03 -070097{
Herbert Xu6a179442005-06-22 13:29:03 -070098 unsigned long start, end;
99 int bcount;
100 int ret;
101
Mark Rustad3e3dc252014-07-25 02:53:38 -0700102 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Herbert Xu6a179442005-06-22 13:29:03 -0700103 time_before(jiffies, end); bcount++) {
104 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000105 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700106 else
Herbert Xucba83562006-08-13 08:26:09 +1000107 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700108
109 if (ret)
110 return ret;
111 }
112
113 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700114 bcount, secs, (long)bcount * blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700115 return 0;
116}
117
Herbert Xuf139cfa2008-07-31 12:23:53 +0800118static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
119 struct scatterlist *sg, int blen)
Herbert Xu6a179442005-06-22 13:29:03 -0700120{
Herbert Xu6a179442005-06-22 13:29:03 -0700121 unsigned long cycles = 0;
122 int ret = 0;
123 int i;
124
Herbert Xu6a179442005-06-22 13:29:03 -0700125 local_irq_disable();
126
127 /* Warm-up run. */
128 for (i = 0; i < 4; i++) {
129 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000130 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700131 else
Herbert Xucba83562006-08-13 08:26:09 +1000132 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700133
134 if (ret)
135 goto out;
136 }
137
138 /* The real thing. */
139 for (i = 0; i < 8; i++) {
140 cycles_t start, end;
141
142 start = get_cycles();
143 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000144 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700145 else
Herbert Xucba83562006-08-13 08:26:09 +1000146 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700147 end = get_cycles();
148
149 if (ret)
150 goto out;
151
152 cycles += end - start;
153 }
154
155out:
156 local_irq_enable();
Herbert Xu6a179442005-06-22 13:29:03 -0700157
158 if (ret == 0)
159 printk("1 operation in %lu cycles (%d bytes)\n",
160 (cycles + 4) / 8, blen);
161
162 return ret;
163}
164
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530165static inline int do_one_aead_op(struct aead_request *req, int ret)
166{
167 if (ret == -EINPROGRESS || ret == -EBUSY) {
168 struct tcrypt_result *tr = req->base.data;
169
170 ret = wait_for_completion_interruptible(&tr->completion);
171 if (!ret)
172 ret = tr->err;
173 reinit_completion(&tr->completion);
174 }
175
176 return ret;
177}
178
Tim Chen53f52d72013-12-11 14:28:47 -0800179static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700180 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800181{
182 unsigned long start, end;
183 int bcount;
184 int ret;
185
Mark Rustad3e3dc252014-07-25 02:53:38 -0700186 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800187 time_before(jiffies, end); bcount++) {
188 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530189 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800190 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530191 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800192
193 if (ret)
194 return ret;
195 }
196
197 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700198 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800199 return 0;
200}
201
202static int test_aead_cycles(struct aead_request *req, int enc, int blen)
203{
204 unsigned long cycles = 0;
205 int ret = 0;
206 int i;
207
208 local_irq_disable();
209
210 /* Warm-up run. */
211 for (i = 0; i < 4; i++) {
212 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530213 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800214 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530215 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800216
217 if (ret)
218 goto out;
219 }
220
221 /* The real thing. */
222 for (i = 0; i < 8; i++) {
223 cycles_t start, end;
224
225 start = get_cycles();
226 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530227 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800228 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530229 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800230 end = get_cycles();
231
232 if (ret)
233 goto out;
234
235 cycles += end - start;
236 }
237
238out:
239 local_irq_enable();
240
241 if (ret == 0)
242 printk("1 operation in %lu cycles (%d bytes)\n",
243 (cycles + 4) / 8, blen);
244
245 return ret;
246}
247
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800248static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
Tim Chen53f52d72013-12-11 14:28:47 -0800249static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
250
251#define XBUFSIZE 8
252#define MAX_IVLEN 32
253
254static int testmgr_alloc_buf(char *buf[XBUFSIZE])
255{
256 int i;
257
258 for (i = 0; i < XBUFSIZE; i++) {
259 buf[i] = (void *)__get_free_page(GFP_KERNEL);
260 if (!buf[i])
261 goto err_free_buf;
262 }
263
264 return 0;
265
266err_free_buf:
267 while (i-- > 0)
268 free_page((unsigned long)buf[i]);
269
270 return -ENOMEM;
271}
272
273static void testmgr_free_buf(char *buf[XBUFSIZE])
274{
275 int i;
276
277 for (i = 0; i < XBUFSIZE; i++)
278 free_page((unsigned long)buf[i]);
279}
280
281static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
282 unsigned int buflen)
283{
284 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
285 int k, rem;
286
Tim Chen53f52d72013-12-11 14:28:47 -0800287 if (np > XBUFSIZE) {
288 rem = PAGE_SIZE;
289 np = XBUFSIZE;
Cristian Stoicac4768992015-01-27 11:54:27 +0200290 } else {
291 rem = buflen % PAGE_SIZE;
Tim Chen53f52d72013-12-11 14:28:47 -0800292 }
Cristian Stoicac4768992015-01-27 11:54:27 +0200293
Herbert Xu31267272015-06-17 14:05:26 +0800294 sg_init_table(sg, np + 1);
Cristian Stoicac4768992015-01-27 11:54:27 +0200295 np--;
296 for (k = 0; k < np; k++)
Herbert Xu31267272015-06-17 14:05:26 +0800297 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
Cristian Stoicac4768992015-01-27 11:54:27 +0200298
Herbert Xu31267272015-06-17 14:05:26 +0800299 sg_set_buf(&sg[k + 1], xbuf[k], rem);
Tim Chen53f52d72013-12-11 14:28:47 -0800300}
301
Mark Rustad3e3dc252014-07-25 02:53:38 -0700302static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800303 struct aead_speed_template *template,
304 unsigned int tcount, u8 authsize,
305 unsigned int aad_size, u8 *keysize)
306{
307 unsigned int i, j;
308 struct crypto_aead *tfm;
309 int ret = -ENOMEM;
310 const char *key;
311 struct aead_request *req;
312 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800313 struct scatterlist *sgout;
314 const char *e;
315 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200316 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800317 char *xbuf[XBUFSIZE];
318 char *xoutbuf[XBUFSIZE];
319 char *axbuf[XBUFSIZE];
320 unsigned int *b_size;
321 unsigned int iv_len;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530322 struct tcrypt_result result;
Tim Chen53f52d72013-12-11 14:28:47 -0800323
Cristian Stoica96692a732015-01-28 13:07:32 +0200324 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
325 if (!iv)
326 return;
327
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200328 if (aad_size >= PAGE_SIZE) {
329 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200330 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200331 }
332
Tim Chen53f52d72013-12-11 14:28:47 -0800333 if (enc == ENCRYPT)
334 e = "encryption";
335 else
336 e = "decryption";
337
338 if (testmgr_alloc_buf(xbuf))
339 goto out_noxbuf;
340 if (testmgr_alloc_buf(axbuf))
341 goto out_noaxbuf;
342 if (testmgr_alloc_buf(xoutbuf))
343 goto out_nooutbuf;
344
Herbert Xua3f21852015-05-27 16:03:51 +0800345 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800346 if (!sg)
347 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800348 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800349
Herbert Xu5e4b8c12015-08-13 17:29:06 +0800350 tfm = crypto_alloc_aead(algo, 0, 0);
Tim Chen53f52d72013-12-11 14:28:47 -0800351
352 if (IS_ERR(tfm)) {
353 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
354 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200355 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800356 }
357
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530358 init_completion(&result.completion);
Luca Clementi263a8df2014-06-25 22:57:42 -0700359 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
360 get_driver_name(crypto_aead, tfm), e);
361
Tim Chen53f52d72013-12-11 14:28:47 -0800362 req = aead_request_alloc(tfm, GFP_KERNEL);
363 if (!req) {
364 pr_err("alg: aead: Failed to allocate request for %s\n",
365 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200366 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800367 }
368
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530369 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
370 tcrypt_complete, &result);
371
Tim Chen53f52d72013-12-11 14:28:47 -0800372 i = 0;
373 do {
374 b_size = aead_sizes;
375 do {
376 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200377 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800378
379 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
380 pr_err("template (%u) too big for tvmem (%lu)\n",
381 *keysize + *b_size,
382 TVMEMSIZE * PAGE_SIZE);
383 goto out;
384 }
385
386 key = tvmem[0];
387 for (j = 0; j < tcount; j++) {
388 if (template[j].klen == *keysize) {
389 key = template[j].key;
390 break;
391 }
392 }
393 ret = crypto_aead_setkey(tfm, key, *keysize);
394 ret = crypto_aead_setauthsize(tfm, authsize);
395
396 iv_len = crypto_aead_ivsize(tfm);
397 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200398 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800399
400 crypto_aead_clear_flags(tfm, ~0);
401 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
402 i, *keysize * 8, *b_size);
403
404
405 memset(tvmem[0], 0xff, PAGE_SIZE);
406
407 if (ret) {
408 pr_err("setkey() failed flags=%x\n",
409 crypto_aead_get_flags(tfm));
410 goto out;
411 }
412
Herbert Xu31267272015-06-17 14:05:26 +0800413 sg_init_aead(sg, xbuf,
Tim Chen53f52d72013-12-11 14:28:47 -0800414 *b_size + (enc ? authsize : 0));
415
Herbert Xu31267272015-06-17 14:05:26 +0800416 sg_init_aead(sgout, xoutbuf,
Tim Chen53f52d72013-12-11 14:28:47 -0800417 *b_size + (enc ? authsize : 0));
418
Herbert Xu31267272015-06-17 14:05:26 +0800419 sg_set_buf(&sg[0], assoc, aad_size);
420 sg_set_buf(&sgout[0], assoc, aad_size);
421
Tim Chen53f52d72013-12-11 14:28:47 -0800422 aead_request_set_crypt(req, sg, sgout, *b_size, iv);
Herbert Xua3f21852015-05-27 16:03:51 +0800423 aead_request_set_ad(req, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800424
Mark Rustad3e3dc252014-07-25 02:53:38 -0700425 if (secs)
426 ret = test_aead_jiffies(req, enc, *b_size,
427 secs);
Tim Chen53f52d72013-12-11 14:28:47 -0800428 else
429 ret = test_aead_cycles(req, enc, *b_size);
430
431 if (ret) {
432 pr_err("%s() failed return code=%d\n", e, ret);
433 break;
434 }
435 b_size++;
436 i++;
437 } while (*b_size);
438 keysize++;
439 } while (*keysize);
440
441out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200442 aead_request_free(req);
443out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800444 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200445out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800446 kfree(sg);
447out_nosg:
448 testmgr_free_buf(xoutbuf);
449out_nooutbuf:
450 testmgr_free_buf(axbuf);
451out_noaxbuf:
452 testmgr_free_buf(xbuf);
453out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200454 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800455 return;
456}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800457
Mark Rustad3e3dc252014-07-25 02:53:38 -0700458static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
Herbert Xuda7f0332008-07-31 17:08:25 +0800459 struct cipher_speed_template *template,
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800460 unsigned int tcount, u8 *keysize)
Harald Welteebfd9bc2005-06-22 13:27:23 -0700461{
Herbert Xudce907c2005-06-22 13:27:51 -0700462 unsigned int ret, i, j, iv_len;
David Sterbaf07ef1d2011-03-04 15:28:52 +0800463 const char *key;
464 char iv[128];
Herbert Xucba83562006-08-13 08:26:09 +1000465 struct crypto_blkcipher *tfm;
466 struct blkcipher_desc desc;
467 const char *e;
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800468 u32 *b_size;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700469
470 if (enc == ENCRYPT)
471 e = "encryption";
472 else
473 e = "decryption";
Harald Welteebfd9bc2005-06-22 13:27:23 -0700474
Herbert Xucba83562006-08-13 08:26:09 +1000475 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700476
Herbert Xucba83562006-08-13 08:26:09 +1000477 if (IS_ERR(tfm)) {
478 printk("failed to load transform for %s: %ld\n", algo,
479 PTR_ERR(tfm));
Harald Welteebfd9bc2005-06-22 13:27:23 -0700480 return;
481 }
Herbert Xucba83562006-08-13 08:26:09 +1000482 desc.tfm = tfm;
483 desc.flags = 0;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700484
Luca Clementi263a8df2014-06-25 22:57:42 -0700485 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
486 get_driver_name(crypto_blkcipher, tfm), e);
487
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800488 i = 0;
489 do {
Harald Welteebfd9bc2005-06-22 13:27:23 -0700490
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800491 b_size = block_sizes;
492 do {
Herbert Xuf139cfa2008-07-31 12:23:53 +0800493 struct scatterlist sg[TVMEMSIZE];
Harald Welteebfd9bc2005-06-22 13:27:23 -0700494
Herbert Xuf139cfa2008-07-31 12:23:53 +0800495 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
496 printk("template (%u) too big for "
497 "tvmem (%lu)\n", *keysize + *b_size,
498 TVMEMSIZE * PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800499 goto out;
500 }
Harald Welteebfd9bc2005-06-22 13:27:23 -0700501
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800502 printk("test %u (%d bit key, %d byte blocks): ", i,
503 *keysize * 8, *b_size);
504
Herbert Xuf139cfa2008-07-31 12:23:53 +0800505 memset(tvmem[0], 0xff, PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800506
507 /* set key, plain text and IV */
Herbert Xuda7f0332008-07-31 17:08:25 +0800508 key = tvmem[0];
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800509 for (j = 0; j < tcount; j++) {
510 if (template[j].klen == *keysize) {
511 key = template[j].key;
512 break;
513 }
514 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800515
516 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
517 if (ret) {
518 printk("setkey() failed flags=%x\n",
519 crypto_blkcipher_get_flags(tfm));
520 goto out;
521 }
522
Herbert Xuf139cfa2008-07-31 12:23:53 +0800523 sg_init_table(sg, TVMEMSIZE);
524 sg_set_buf(sg, tvmem[0] + *keysize,
525 PAGE_SIZE - *keysize);
526 for (j = 1; j < TVMEMSIZE; j++) {
527 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
528 memset (tvmem[j], 0xff, PAGE_SIZE);
529 }
530
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800531 iv_len = crypto_blkcipher_ivsize(tfm);
532 if (iv_len) {
533 memset(&iv, 0xff, iv_len);
534 crypto_blkcipher_set_iv(tfm, iv, iv_len);
535 }
536
Mark Rustad3e3dc252014-07-25 02:53:38 -0700537 if (secs)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800538 ret = test_cipher_jiffies(&desc, enc, sg,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700539 *b_size, secs);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800540 else
Herbert Xuf139cfa2008-07-31 12:23:53 +0800541 ret = test_cipher_cycles(&desc, enc, sg,
542 *b_size);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800543
544 if (ret) {
545 printk("%s() failed flags=%x\n", e, desc.flags);
Herbert Xudce907c2005-06-22 13:27:51 -0700546 break;
547 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800548 b_size++;
549 i++;
550 } while (*b_size);
551 keysize++;
552 } while (*keysize);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700553
554out:
Herbert Xucba83562006-08-13 08:26:09 +1000555 crypto_free_blkcipher(tfm);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700556}
557
David S. Millerbeb63da2010-05-19 14:11:21 +1000558static void test_hash_sg_init(struct scatterlist *sg)
559{
560 int i;
561
562 sg_init_table(sg, TVMEMSIZE);
563 for (i = 0; i < TVMEMSIZE; i++) {
564 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
565 memset(tvmem[i], 0xff, PAGE_SIZE);
566 }
567}
568
David S. Millerbeb63da2010-05-19 14:11:21 +1000569static inline int do_one_ahash_op(struct ahash_request *req, int ret)
570{
571 if (ret == -EINPROGRESS || ret == -EBUSY) {
572 struct tcrypt_result *tr = req->base.data;
573
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100574 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -0800575 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100576 ret = tr->err;
David S. Millerbeb63da2010-05-19 14:11:21 +1000577 }
578 return ret;
579}
580
Megha Dey087bcd22016-06-23 18:40:47 -0700581char ptext[4096];
582struct scatterlist sg[8][8];
583char result[8][64];
584struct ahash_request *req[8];
585struct tcrypt_result tresult[8];
586char *xbuf[8][XBUFSIZE];
587cycles_t start[8], end[8], mid;
588
589static void test_mb_ahash_speed(const char *algo, unsigned int sec,
590 struct hash_speed *speed)
591{
592 unsigned int i, j, k;
593 void *hash_buff;
594 int ret = -ENOMEM;
595 struct crypto_ahash *tfm;
596
597 tfm = crypto_alloc_ahash(algo, 0, 0);
598 if (IS_ERR(tfm)) {
599 pr_err("failed to load transform for %s: %ld\n",
600 algo, PTR_ERR(tfm));
601 return;
602 }
603 for (i = 0; i < 8; ++i) {
604 if (testmgr_alloc_buf(xbuf[i]))
605 goto out_nobuf;
606
607 init_completion(&tresult[i].completion);
608
609 req[i] = ahash_request_alloc(tfm, GFP_KERNEL);
610 if (!req[i]) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200611 pr_err("alg: hash: Failed to allocate request for %s\n",
612 algo);
Megha Dey087bcd22016-06-23 18:40:47 -0700613 goto out_noreq;
614 }
615 ahash_request_set_callback(req[i], CRYPTO_TFM_REQ_MAY_BACKLOG,
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200616 tcrypt_complete, &tresult[i]);
Megha Dey087bcd22016-06-23 18:40:47 -0700617
618 hash_buff = xbuf[i][0];
619 memcpy(hash_buff, ptext, 4096);
620 }
621
622 j = 0;
623
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200624 pr_err("\ntesting speed of %s (%s)\n", algo,
625 get_driver_name(crypto_ahash, tfm));
Megha Dey087bcd22016-06-23 18:40:47 -0700626
627 for (i = 0; speed[i].blen != 0; i++) {
628 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200629 pr_err("template (%u) too big for tvmem (%lu)\n",
630 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
631 goto out;
Megha Dey087bcd22016-06-23 18:40:47 -0700632 }
633
634 if (speed[i].klen)
635 crypto_ahash_setkey(tfm, tvmem[0], speed[i].klen);
636
637 for (k = 0; k < 8; ++k) {
638 sg_init_one(&sg[k][0], (void *) xbuf[k][0],
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200639 speed[i].blen);
Megha Dey087bcd22016-06-23 18:40:47 -0700640 ahash_request_set_crypt(req[k], sg[k],
641 result[k], speed[i].blen);
642 }
643
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200644 pr_err("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ",
Megha Dey087bcd22016-06-23 18:40:47 -0700645 i, speed[i].blen, speed[i].plen,
646 speed[i].blen / speed[i].plen);
647
648 for (k = 0; k < 8; ++k) {
649 start[k] = get_cycles();
650 ret = crypto_ahash_digest(req[k]);
651 if (ret == -EBUSY || ret == -EINPROGRESS)
652 continue;
653 if (ret) {
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200654 pr_err("alg (%s) something wrong, ret = %d ...\n",
655 algo, ret);
Megha Dey087bcd22016-06-23 18:40:47 -0700656 goto out;
657 }
658 }
659 mid = get_cycles();
660
661 for (k = 0; k < 8; ++k) {
662 struct tcrypt_result *tr = &tresult[k];
663
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200664 ret = wait_for_completion_interruptible(&tr->completion);
Megha Dey087bcd22016-06-23 18:40:47 -0700665 if (ret)
Krzysztof Kozlowskif83f5b12016-06-28 09:23:06 +0200666 pr_err("alg(%s): hash: digest failed\n", algo);
Megha Dey087bcd22016-06-23 18:40:47 -0700667 end[k] = get_cycles();
668 }
669
670 printk("\nBlock: %lld cycles (%lld cycles/byte), %d bytes\n",
671 (s64) (end[7]-start[0])/1,
672 (s64) (end[7]-start[0])/(8*speed[i].blen),
673 8*speed[i].blen);
674 }
675 ret = 0;
676
677out:
678 for (k = 0; k < 8; ++k)
679 ahash_request_free(req[k]);
680out_noreq:
681 for (k = 0; k < 8; ++k)
682 testmgr_free_buf(xbuf[k]);
683out_nobuf:
684 return;
685}
686
David S. Millerbeb63da2010-05-19 14:11:21 +1000687static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700688 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000689{
690 unsigned long start, end;
691 int bcount;
692 int ret;
693
Mark Rustad3e3dc252014-07-25 02:53:38 -0700694 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000695 time_before(jiffies, end); bcount++) {
696 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
697 if (ret)
698 return ret;
699 }
700
701 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700702 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000703
704 return 0;
705}
706
707static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700708 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000709{
710 unsigned long start, end;
711 int bcount, pcount;
712 int ret;
713
714 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700715 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000716
Mark Rustad3e3dc252014-07-25 02:53:38 -0700717 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000718 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800719 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000720 if (ret)
721 return ret;
722 for (pcount = 0; pcount < blen; pcount += plen) {
723 ret = do_one_ahash_op(req, crypto_ahash_update(req));
724 if (ret)
725 return ret;
726 }
727 /* we assume there is enough space in 'out' for the result */
728 ret = do_one_ahash_op(req, crypto_ahash_final(req));
729 if (ret)
730 return ret;
731 }
732
733 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700734 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000735
736 return 0;
737}
738
739static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
740 char *out)
741{
742 unsigned long cycles = 0;
743 int ret, i;
744
745 /* Warm-up run. */
746 for (i = 0; i < 4; i++) {
747 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
748 if (ret)
749 goto out;
750 }
751
752 /* The real thing. */
753 for (i = 0; i < 8; i++) {
754 cycles_t start, end;
755
756 start = get_cycles();
757
758 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
759 if (ret)
760 goto out;
761
762 end = get_cycles();
763
764 cycles += end - start;
765 }
766
767out:
768 if (ret)
769 return ret;
770
771 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
772 cycles / 8, cycles / (8 * blen));
773
774 return 0;
775}
776
777static int test_ahash_cycles(struct ahash_request *req, int blen,
778 int plen, char *out)
779{
780 unsigned long cycles = 0;
781 int i, pcount, ret;
782
783 if (plen == blen)
784 return test_ahash_cycles_digest(req, blen, out);
785
786 /* Warm-up run. */
787 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800788 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000789 if (ret)
790 goto out;
791 for (pcount = 0; pcount < blen; pcount += plen) {
792 ret = do_one_ahash_op(req, crypto_ahash_update(req));
793 if (ret)
794 goto out;
795 }
796 ret = do_one_ahash_op(req, crypto_ahash_final(req));
797 if (ret)
798 goto out;
799 }
800
801 /* The real thing. */
802 for (i = 0; i < 8; i++) {
803 cycles_t start, end;
804
805 start = get_cycles();
806
Herbert Xu43a96072015-04-22 11:02:27 +0800807 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000808 if (ret)
809 goto out;
810 for (pcount = 0; pcount < blen; pcount += plen) {
811 ret = do_one_ahash_op(req, crypto_ahash_update(req));
812 if (ret)
813 goto out;
814 }
815 ret = do_one_ahash_op(req, crypto_ahash_final(req));
816 if (ret)
817 goto out;
818
819 end = get_cycles();
820
821 cycles += end - start;
822 }
823
824out:
825 if (ret)
826 return ret;
827
828 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
829 cycles / 8, cycles / (8 * blen));
830
831 return 0;
832}
833
Herbert Xu06605112016-02-01 21:36:49 +0800834static void test_ahash_speed_common(const char *algo, unsigned int secs,
835 struct hash_speed *speed, unsigned mask)
David S. Millerbeb63da2010-05-19 14:11:21 +1000836{
837 struct scatterlist sg[TVMEMSIZE];
838 struct tcrypt_result tresult;
839 struct ahash_request *req;
840 struct crypto_ahash *tfm;
Horia Geant?f074f7b2015-08-27 18:38:36 +0300841 char *output;
David S. Millerbeb63da2010-05-19 14:11:21 +1000842 int i, ret;
843
Herbert Xu06605112016-02-01 21:36:49 +0800844 tfm = crypto_alloc_ahash(algo, 0, mask);
David S. Millerbeb63da2010-05-19 14:11:21 +1000845 if (IS_ERR(tfm)) {
846 pr_err("failed to load transform for %s: %ld\n",
847 algo, PTR_ERR(tfm));
848 return;
849 }
850
Luca Clementi263a8df2014-06-25 22:57:42 -0700851 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
852 get_driver_name(crypto_ahash, tfm));
853
Horia Geant?f074f7b2015-08-27 18:38:36 +0300854 if (crypto_ahash_digestsize(tfm) > MAX_DIGEST_SIZE) {
855 pr_err("digestsize(%u) > %d\n", crypto_ahash_digestsize(tfm),
856 MAX_DIGEST_SIZE);
David S. Millerbeb63da2010-05-19 14:11:21 +1000857 goto out;
858 }
859
860 test_hash_sg_init(sg);
861 req = ahash_request_alloc(tfm, GFP_KERNEL);
862 if (!req) {
863 pr_err("ahash request allocation failure\n");
864 goto out;
865 }
866
867 init_completion(&tresult.completion);
868 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
869 tcrypt_complete, &tresult);
870
Horia Geant?f074f7b2015-08-27 18:38:36 +0300871 output = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
872 if (!output)
873 goto out_nomem;
874
David S. Millerbeb63da2010-05-19 14:11:21 +1000875 for (i = 0; speed[i].blen != 0; i++) {
876 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
877 pr_err("template (%u) too big for tvmem (%lu)\n",
878 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
879 break;
880 }
881
882 pr_info("test%3u "
883 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
884 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
885
886 ahash_request_set_crypt(req, sg, output, speed[i].plen);
887
Mark Rustad3e3dc252014-07-25 02:53:38 -0700888 if (secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000889 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700890 speed[i].plen, output, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000891 else
892 ret = test_ahash_cycles(req, speed[i].blen,
893 speed[i].plen, output);
894
895 if (ret) {
896 pr_err("hashing failed ret=%d\n", ret);
897 break;
898 }
899 }
900
Horia Geant?f074f7b2015-08-27 18:38:36 +0300901 kfree(output);
902
903out_nomem:
David S. Millerbeb63da2010-05-19 14:11:21 +1000904 ahash_request_free(req);
905
906out:
907 crypto_free_ahash(tfm);
908}
909
Herbert Xu06605112016-02-01 21:36:49 +0800910static void test_ahash_speed(const char *algo, unsigned int secs,
911 struct hash_speed *speed)
912{
913 return test_ahash_speed_common(algo, secs, speed, 0);
914}
915
916static void test_hash_speed(const char *algo, unsigned int secs,
917 struct hash_speed *speed)
918{
919 return test_ahash_speed_common(algo, secs, speed, CRYPTO_ALG_ASYNC);
920}
921
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300922static inline int do_one_acipher_op(struct ablkcipher_request *req, int ret)
923{
924 if (ret == -EINPROGRESS || ret == -EBUSY) {
925 struct tcrypt_result *tr = req->base.data;
926
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100927 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -0800928 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100929 ret = tr->err;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300930 }
931
932 return ret;
933}
934
935static int test_acipher_jiffies(struct ablkcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700936 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300937{
938 unsigned long start, end;
939 int bcount;
940 int ret;
941
Mark Rustad3e3dc252014-07-25 02:53:38 -0700942 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300943 time_before(jiffies, end); bcount++) {
944 if (enc)
945 ret = do_one_acipher_op(req,
946 crypto_ablkcipher_encrypt(req));
947 else
948 ret = do_one_acipher_op(req,
949 crypto_ablkcipher_decrypt(req));
950
951 if (ret)
952 return ret;
953 }
954
955 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700956 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +0300957 return 0;
958}
959
960static int test_acipher_cycles(struct ablkcipher_request *req, int enc,
961 int blen)
962{
963 unsigned long cycles = 0;
964 int ret = 0;
965 int i;
966
967 /* Warm-up run. */
968 for (i = 0; i < 4; i++) {
969 if (enc)
970 ret = do_one_acipher_op(req,
971 crypto_ablkcipher_encrypt(req));
972 else
973 ret = do_one_acipher_op(req,
974 crypto_ablkcipher_decrypt(req));
975
976 if (ret)
977 goto out;
978 }
979
980 /* The real thing. */
981 for (i = 0; i < 8; i++) {
982 cycles_t start, end;
983
984 start = get_cycles();
985 if (enc)
986 ret = do_one_acipher_op(req,
987 crypto_ablkcipher_encrypt(req));
988 else
989 ret = do_one_acipher_op(req,
990 crypto_ablkcipher_decrypt(req));
991 end = get_cycles();
992
993 if (ret)
994 goto out;
995
996 cycles += end - start;
997 }
998
999out:
1000 if (ret == 0)
1001 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1002 (cycles + 4) / 8, blen);
1003
1004 return ret;
1005}
1006
Mark Rustad3e3dc252014-07-25 02:53:38 -07001007static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001008 struct cipher_speed_template *template,
1009 unsigned int tcount, u8 *keysize)
1010{
Nicolas Royerde1975332012-07-01 19:19:47 +02001011 unsigned int ret, i, j, k, iv_len;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001012 struct tcrypt_result tresult;
1013 const char *key;
1014 char iv[128];
1015 struct ablkcipher_request *req;
1016 struct crypto_ablkcipher *tfm;
1017 const char *e;
1018 u32 *b_size;
1019
1020 if (enc == ENCRYPT)
1021 e = "encryption";
1022 else
1023 e = "decryption";
1024
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001025 init_completion(&tresult.completion);
1026
1027 tfm = crypto_alloc_ablkcipher(algo, 0, 0);
1028
1029 if (IS_ERR(tfm)) {
1030 pr_err("failed to load transform for %s: %ld\n", algo,
1031 PTR_ERR(tfm));
1032 return;
1033 }
1034
Luca Clementi263a8df2014-06-25 22:57:42 -07001035 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
1036 get_driver_name(crypto_ablkcipher, tfm), e);
1037
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001038 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
1039 if (!req) {
1040 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1041 algo);
1042 goto out;
1043 }
1044
1045 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1046 tcrypt_complete, &tresult);
1047
1048 i = 0;
1049 do {
1050 b_size = block_sizes;
1051
1052 do {
1053 struct scatterlist sg[TVMEMSIZE];
1054
1055 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1056 pr_err("template (%u) too big for "
1057 "tvmem (%lu)\n", *keysize + *b_size,
1058 TVMEMSIZE * PAGE_SIZE);
1059 goto out_free_req;
1060 }
1061
1062 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1063 *keysize * 8, *b_size);
1064
1065 memset(tvmem[0], 0xff, PAGE_SIZE);
1066
1067 /* set key, plain text and IV */
1068 key = tvmem[0];
1069 for (j = 0; j < tcount; j++) {
1070 if (template[j].klen == *keysize) {
1071 key = template[j].key;
1072 break;
1073 }
1074 }
1075
1076 crypto_ablkcipher_clear_flags(tfm, ~0);
1077
1078 ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
1079 if (ret) {
1080 pr_err("setkey() failed flags=%x\n",
1081 crypto_ablkcipher_get_flags(tfm));
1082 goto out_free_req;
1083 }
1084
Nicolas Royerde1975332012-07-01 19:19:47 +02001085 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001086 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1087
Nicolas Royerde1975332012-07-01 19:19:47 +02001088 if (k > PAGE_SIZE) {
1089 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001090 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001091 k -= PAGE_SIZE;
1092 j = 1;
1093 while (k > PAGE_SIZE) {
1094 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1095 memset(tvmem[j], 0xff, PAGE_SIZE);
1096 j++;
1097 k -= PAGE_SIZE;
1098 }
1099 sg_set_buf(sg + j, tvmem[j], k);
1100 memset(tvmem[j], 0xff, k);
1101 } else {
1102 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001103 }
1104
1105 iv_len = crypto_ablkcipher_ivsize(tfm);
1106 if (iv_len)
1107 memset(&iv, 0xff, iv_len);
1108
1109 ablkcipher_request_set_crypt(req, sg, sg, *b_size, iv);
1110
Mark Rustad3e3dc252014-07-25 02:53:38 -07001111 if (secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001112 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001113 *b_size, secs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001114 else
1115 ret = test_acipher_cycles(req, enc,
1116 *b_size);
1117
1118 if (ret) {
1119 pr_err("%s() failed flags=%x\n", e,
1120 crypto_ablkcipher_get_flags(tfm));
1121 break;
1122 }
1123 b_size++;
1124 i++;
1125 } while (*b_size);
1126 keysize++;
1127 } while (*keysize);
1128
1129out_free_req:
1130 ablkcipher_request_free(req);
1131out:
1132 crypto_free_ablkcipher(tfm);
1133}
1134
Herbert Xuef2736f2005-06-22 13:26:03 -07001135static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001136{
1137 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001138
Linus Torvalds1da177e2005-04-16 15:20:36 -07001139 while (*name) {
1140 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001141 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001142 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001143 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001144 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001145}
1146
Herbert Xu01b32322008-07-31 15:41:55 +08001147static inline int tcrypt_test(const char *alg)
1148{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001149 int ret;
1150
1151 ret = alg_test(alg, alg, 0, 0);
1152 /* non-fips algs return -EINVAL in fips mode */
1153 if (fips_enabled && ret == -EINVAL)
1154 ret = 0;
1155 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001156}
1157
Herbert Xu86068132014-12-04 16:43:29 +08001158static int do_test(const char *alg, u32 type, u32 mask, int m)
Herbert Xu01b32322008-07-31 15:41:55 +08001159{
1160 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001161 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001162
1163 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001164 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001165 if (alg) {
1166 if (!crypto_has_alg(alg, type,
1167 mask ?: CRYPTO_ALG_TYPE_MASK))
1168 ret = -ENOENT;
1169 break;
1170 }
1171
Herbert Xu01b32322008-07-31 15:41:55 +08001172 for (i = 1; i < 200; i++)
Herbert Xu86068132014-12-04 16:43:29 +08001173 ret += do_test(NULL, 0, 0, i);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001174 break;
1175
1176 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001177 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001178 break;
1179
1180 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001181 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001182 break;
1183
1184 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001185 ret += tcrypt_test("ecb(des)");
1186 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001187 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001188 break;
1189
1190 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001191 ret += tcrypt_test("ecb(des3_ede)");
1192 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001193 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001194 break;
1195
1196 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001197 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001198 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001199
Linus Torvalds1da177e2005-04-16 15:20:36 -07001200 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001201 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001202 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001203
Linus Torvalds1da177e2005-04-16 15:20:36 -07001204 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001205 ret += tcrypt_test("ecb(blowfish)");
1206 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001207 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001208 break;
1209
1210 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001211 ret += tcrypt_test("ecb(twofish)");
1212 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001213 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001214 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001215 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001216 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001217
Linus Torvalds1da177e2005-04-16 15:20:36 -07001218 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001219 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001220 ret += tcrypt_test("cbc(serpent)");
1221 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001222 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001223 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001224 break;
1225
1226 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001227 ret += tcrypt_test("ecb(aes)");
1228 ret += tcrypt_test("cbc(aes)");
1229 ret += tcrypt_test("lrw(aes)");
1230 ret += tcrypt_test("xts(aes)");
1231 ret += tcrypt_test("ctr(aes)");
1232 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001233 break;
1234
1235 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001236 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001237 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001238
Linus Torvalds1da177e2005-04-16 15:20:36 -07001239 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001240 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001241 break;
1242
1243 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001244 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001245 break;
1246
1247 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001248 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001249 ret += tcrypt_test("cbc(cast5)");
1250 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001251 break;
1252
1253 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001254 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001255 ret += tcrypt_test("cbc(cast6)");
1256 ret += tcrypt_test("ctr(cast6)");
1257 ret += tcrypt_test("lrw(cast6)");
1258 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001259 break;
1260
1261 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001262 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001263 break;
1264
1265 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001266 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001267 break;
1268
1269 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001270 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001271 break;
1272
1273 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001274 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001275 break;
1276
1277 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001278 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001279 break;
1280
1281 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001282 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001283 break;
1284
1285 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001286 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001287 break;
1288
1289 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001290 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001291 break;
1292
1293 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001294 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001295 break;
1296
1297 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001298 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001299 break;
1300
1301 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001302 ret += tcrypt_test("ecb(anubis)");
1303 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001304 break;
1305
1306 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001307 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001308 break;
1309
1310 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001311 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001312 break;
1313
1314 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001315 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001316 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001317
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001318 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001319 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001320 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001321
David Howells90831632006-12-16 12:13:14 +11001322 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001323 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001324 break;
1325
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001326 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001327 ret += tcrypt_test("ecb(camellia)");
1328 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001329 ret += tcrypt_test("ctr(camellia)");
1330 ret += tcrypt_test("lrw(camellia)");
1331 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001332 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001333
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001334 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001335 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001336 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001337
Tan Swee Heng2407d602007-11-23 19:45:00 +08001338 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001339 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001340 break;
1341
Herbert Xu8df213d2007-12-02 14:55:47 +11001342 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001343 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001344 break;
1345
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001346 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001347 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001348 break;
1349
Joy Latten93cc74e2007-12-12 20:24:22 +08001350 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001351 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001352 break;
1353
Kevin Coffman76cb9522008-03-24 21:26:16 +08001354 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001355 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001356 break;
1357
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001358 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001359 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001360 break;
1361
1362 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001363 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001364 break;
1365
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001366 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001367 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001368 break;
1369
1370 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001371 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001372 break;
1373
1374 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001375 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001376 break;
1377
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001378 case 44:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001379 ret += tcrypt_test("zlib");
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001380 break;
1381
Jarod Wilson5d667322009-05-04 19:23:40 +08001382 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001383 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001384 break;
1385
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001386 case 46:
1387 ret += tcrypt_test("ghash");
1388 break;
1389
Herbert Xu684115212013-09-07 12:56:26 +10001390 case 47:
1391 ret += tcrypt_test("crct10dif");
1392 break;
1393
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301394 case 48:
1395 ret += tcrypt_test("sha3-224");
1396 break;
1397
1398 case 49:
1399 ret += tcrypt_test("sha3-256");
1400 break;
1401
1402 case 50:
1403 ret += tcrypt_test("sha3-384");
1404 break;
1405
1406 case 51:
1407 ret += tcrypt_test("sha3-512");
1408 break;
1409
Linus Torvalds1da177e2005-04-16 15:20:36 -07001410 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001411 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001412 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001413
Linus Torvalds1da177e2005-04-16 15:20:36 -07001414 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001415 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001416 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001417
Linus Torvalds1da177e2005-04-16 15:20:36 -07001418 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001419 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001420 break;
1421
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001422 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001423 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001424 break;
1425
1426 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001427 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001428 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001429
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001430 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001431 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001432 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001433
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001434 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001435 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001436 break;
1437
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001438 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001439 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001440 break;
1441
1442 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001443 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001444 break;
1445
Shane Wangf1939f72009-09-02 20:05:22 +10001446 case 109:
1447 ret += tcrypt_test("vmac(aes)");
1448 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001449
Sonic Zhanga482b082012-05-25 17:54:13 +08001450 case 110:
1451 ret += tcrypt_test("hmac(crc32)");
1452 break;
Shane Wangf1939f72009-09-02 20:05:22 +10001453
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001454 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001455 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001456 break;
1457
Adrian Hoban69435b92010-11-04 15:02:04 -04001458 case 151:
1459 ret += tcrypt_test("rfc4106(gcm(aes))");
1460 break;
1461
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001462 case 152:
1463 ret += tcrypt_test("rfc4543(gcm(aes))");
1464 break;
1465
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001466 case 153:
1467 ret += tcrypt_test("cmac(aes)");
1468 break;
1469
1470 case 154:
1471 ret += tcrypt_test("cmac(des3_ede)");
1472 break;
1473
Horia Geantabbf9c892013-11-28 15:11:16 +02001474 case 155:
1475 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1476 break;
1477
Horia Geantabca4feb2014-03-14 17:46:51 +02001478 case 156:
1479 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1480 break;
1481
1482 case 157:
1483 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
1484 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05301485 case 181:
1486 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
1487 break;
1488 case 182:
1489 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
1490 break;
1491 case 183:
1492 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
1493 break;
1494 case 184:
1495 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
1496 break;
1497 case 185:
1498 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
1499 break;
1500 case 186:
1501 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
1502 break;
1503 case 187:
1504 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
1505 break;
1506 case 188:
1507 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
1508 break;
1509 case 189:
1510 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
1511 break;
1512 case 190:
1513 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
1514 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07001515 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10001516 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001517 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001518 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001519 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001520 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001521 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001522 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001523 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11001524 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001525 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11001526 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001527 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08001528 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001529 speed_template_32_48_64);
Rik Snelf19f5112007-09-19 20:23:13 +08001530 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001531 speed_template_32_48_64);
Jan Glauber9996e342011-04-26 16:34:01 +10001532 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1533 speed_template_16_24_32);
1534 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1535 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001536 break;
1537
1538 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10001539 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001540 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001541 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001542 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001543 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001544 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001545 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001546 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001547 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001548 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001549 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001550 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03001551 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
1552 des3_speed_template, DES3_SPEED_VECTORS,
1553 speed_template_24);
1554 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
1555 des3_speed_template, DES3_SPEED_VECTORS,
1556 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001557 break;
1558
1559 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10001560 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001561 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001562 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001563 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001564 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001565 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001566 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001567 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03001568 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1569 speed_template_16_24_32);
1570 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1571 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001572 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1573 speed_template_32_40_48);
1574 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1575 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001576 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1577 speed_template_32_48_64);
1578 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1579 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001580 break;
1581
1582 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10001583 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001584 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001585 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001586 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001587 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001588 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001589 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001590 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03001591 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
1592 speed_template_8_32);
1593 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
1594 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001595 break;
1596
1597 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10001598 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001599 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001600 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001601 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001602 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001603 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001604 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001605 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001606 break;
1607
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001608 case 205:
1609 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001610 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001611 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001612 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001613 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001614 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001615 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001616 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02001617 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
1618 speed_template_16_24_32);
1619 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
1620 speed_template_16_24_32);
1621 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
1622 speed_template_32_40_48);
1623 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
1624 speed_template_32_40_48);
1625 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
1626 speed_template_32_48_64);
1627 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
1628 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001629 break;
1630
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001631 case 206:
1632 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001633 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001634 break;
1635
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001636 case 207:
1637 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1638 speed_template_16_32);
1639 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1640 speed_template_16_32);
1641 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1642 speed_template_16_32);
1643 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1644 speed_template_16_32);
1645 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1646 speed_template_16_32);
1647 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1648 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001649 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1650 speed_template_32_48);
1651 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1652 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001653 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1654 speed_template_32_64);
1655 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1656 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001657 break;
1658
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08001659 case 208:
1660 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1661 speed_template_8);
1662 break;
1663
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001664 case 209:
1665 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
1666 speed_template_8_16);
1667 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
1668 speed_template_8_16);
1669 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
1670 speed_template_8_16);
1671 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
1672 speed_template_8_16);
1673 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
1674 speed_template_8_16);
1675 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
1676 speed_template_8_16);
1677 break;
1678
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001679 case 210:
1680 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
1681 speed_template_16_32);
1682 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
1683 speed_template_16_32);
1684 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
1685 speed_template_16_32);
1686 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
1687 speed_template_16_32);
1688 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
1689 speed_template_16_32);
1690 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
1691 speed_template_16_32);
1692 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
1693 speed_template_32_48);
1694 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
1695 speed_template_32_48);
1696 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
1697 speed_template_32_64);
1698 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
1699 speed_template_32_64);
1700 break;
1701
Tim Chen53f52d72013-12-11 14:28:47 -08001702 case 211:
1703 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08001704 NULL, 0, 16, 16, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05301705 test_aead_speed("gcm(aes)", ENCRYPT, sec,
Cyrille Pitchenf18611d2015-11-17 13:37:10 +01001706 NULL, 0, 16, 8, speed_template_16_24_32);
Tim Chen53f52d72013-12-11 14:28:47 -08001707 break;
1708
Herbert Xu4e4aab62015-06-17 14:04:21 +08001709 case 212:
1710 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
Herbert Xu34a1c742015-07-09 07:17:26 +08001711 NULL, 0, 16, 16, aead_speed_template_19);
Herbert Xu4e4aab62015-06-17 14:04:21 +08001712 break;
1713
Martin Willi2dce0632015-07-16 19:13:59 +02001714 case 213:
1715 test_aead_speed("rfc7539esp(chacha20,poly1305)", ENCRYPT, sec,
1716 NULL, 0, 16, 8, aead_speed_template_36);
1717 break;
1718
1719 case 214:
1720 test_cipher_speed("chacha20", ENCRYPT, sec, NULL, 0,
1721 speed_template_32);
1722 break;
1723
1724
Michal Ludvige8057922006-05-30 22:04:19 +10001725 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08001726 if (alg) {
1727 test_hash_speed(alg, sec, generic_hash_speed_template);
1728 break;
1729 }
1730
Michal Ludvige8057922006-05-30 22:04:19 +10001731 /* fall through */
1732
1733 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10001734 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001735 if (mode > 300 && mode < 400) break;
1736
1737 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10001738 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001739 if (mode > 300 && mode < 400) break;
1740
1741 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10001742 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001743 if (mode > 300 && mode < 400) break;
1744
1745 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10001746 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001747 if (mode > 300 && mode < 400) break;
1748
1749 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10001750 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001751 if (mode > 300 && mode < 400) break;
1752
1753 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10001754 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001755 if (mode > 300 && mode < 400) break;
1756
1757 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10001758 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001759 if (mode > 300 && mode < 400) break;
1760
1761 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10001762 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001763 if (mode > 300 && mode < 400) break;
1764
1765 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10001766 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001767 if (mode > 300 && mode < 400) break;
1768
1769 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10001770 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001771 if (mode > 300 && mode < 400) break;
1772
1773 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10001774 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001775 if (mode > 300 && mode < 400) break;
1776
1777 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10001778 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001779 if (mode > 300 && mode < 400) break;
1780
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001781 case 313:
1782 test_hash_speed("sha224", sec, generic_hash_speed_template);
1783 if (mode > 300 && mode < 400) break;
1784
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001785 case 314:
1786 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1787 if (mode > 300 && mode < 400) break;
1788
1789 case 315:
1790 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1791 if (mode > 300 && mode < 400) break;
1792
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001793 case 316:
1794 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1795 if (mode > 300 && mode < 400) break;
1796
1797 case 317:
1798 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1799 if (mode > 300 && mode < 400) break;
1800
Huang Ying18bcc912010-03-10 18:30:32 +08001801 case 318:
1802 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1803 if (mode > 300 && mode < 400) break;
1804
Tim Chene3899e42012-09-27 15:44:24 -07001805 case 319:
1806 test_hash_speed("crc32c", sec, generic_hash_speed_template);
1807 if (mode > 300 && mode < 400) break;
1808
Herbert Xu684115212013-09-07 12:56:26 +10001809 case 320:
1810 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
1811 if (mode > 300 && mode < 400) break;
1812
Martin Willi2dce0632015-07-16 19:13:59 +02001813 case 321:
1814 test_hash_speed("poly1305", sec, poly1305_speed_template);
1815 if (mode > 300 && mode < 400) break;
1816
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301817 case 322:
1818 test_hash_speed("sha3-224", sec, generic_hash_speed_template);
1819 if (mode > 300 && mode < 400) break;
1820
1821 case 323:
1822 test_hash_speed("sha3-256", sec, generic_hash_speed_template);
1823 if (mode > 300 && mode < 400) break;
1824
1825 case 324:
1826 test_hash_speed("sha3-384", sec, generic_hash_speed_template);
1827 if (mode > 300 && mode < 400) break;
1828
1829 case 325:
1830 test_hash_speed("sha3-512", sec, generic_hash_speed_template);
1831 if (mode > 300 && mode < 400) break;
1832
Michal Ludvige8057922006-05-30 22:04:19 +10001833 case 399:
1834 break;
1835
David S. Millerbeb63da2010-05-19 14:11:21 +10001836 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08001837 if (alg) {
1838 test_ahash_speed(alg, sec, generic_hash_speed_template);
1839 break;
1840 }
1841
David S. Millerbeb63da2010-05-19 14:11:21 +10001842 /* fall through */
1843
1844 case 401:
1845 test_ahash_speed("md4", sec, generic_hash_speed_template);
1846 if (mode > 400 && mode < 500) break;
1847
1848 case 402:
1849 test_ahash_speed("md5", sec, generic_hash_speed_template);
1850 if (mode > 400 && mode < 500) break;
1851
1852 case 403:
1853 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1854 if (mode > 400 && mode < 500) break;
1855
1856 case 404:
1857 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1858 if (mode > 400 && mode < 500) break;
1859
1860 case 405:
1861 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1862 if (mode > 400 && mode < 500) break;
1863
1864 case 406:
1865 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1866 if (mode > 400 && mode < 500) break;
1867
1868 case 407:
1869 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1870 if (mode > 400 && mode < 500) break;
1871
1872 case 408:
1873 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1874 if (mode > 400 && mode < 500) break;
1875
1876 case 409:
1877 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1878 if (mode > 400 && mode < 500) break;
1879
1880 case 410:
1881 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1882 if (mode > 400 && mode < 500) break;
1883
1884 case 411:
1885 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1886 if (mode > 400 && mode < 500) break;
1887
1888 case 412:
1889 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1890 if (mode > 400 && mode < 500) break;
1891
1892 case 413:
1893 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1894 if (mode > 400 && mode < 500) break;
1895
1896 case 414:
1897 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1898 if (mode > 400 && mode < 500) break;
1899
1900 case 415:
1901 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1902 if (mode > 400 && mode < 500) break;
1903
1904 case 416:
1905 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1906 if (mode > 400 && mode < 500) break;
1907
1908 case 417:
1909 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1910 if (mode > 400 && mode < 500) break;
1911
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301912 case 418:
1913 test_ahash_speed("sha3-224", sec, generic_hash_speed_template);
1914 if (mode > 400 && mode < 500) break;
1915
1916 case 419:
1917 test_ahash_speed("sha3-256", sec, generic_hash_speed_template);
1918 if (mode > 400 && mode < 500) break;
1919
1920 case 420:
1921 test_ahash_speed("sha3-384", sec, generic_hash_speed_template);
1922 if (mode > 400 && mode < 500) break;
1923
1924
1925 case 421:
1926 test_ahash_speed("sha3-512", sec, generic_hash_speed_template);
1927 if (mode > 400 && mode < 500) break;
1928
Megha Dey087bcd22016-06-23 18:40:47 -07001929 case 422:
1930 test_mb_ahash_speed("sha1", sec, generic_hash_speed_template);
1931 if (mode > 400 && mode < 500) break;
1932
1933 case 423:
1934 test_mb_ahash_speed("sha256", sec, generic_hash_speed_template);
1935 if (mode > 400 && mode < 500) break;
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05301936
Megha Dey14009c42016-06-27 10:20:09 -07001937 case 424:
1938 test_mb_ahash_speed("sha512", sec, generic_hash_speed_template);
1939 if (mode > 400 && mode < 500) break;
1940
David S. Millerbeb63da2010-05-19 14:11:21 +10001941 case 499:
1942 break;
1943
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001944 case 500:
1945 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1946 speed_template_16_24_32);
1947 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1948 speed_template_16_24_32);
1949 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1950 speed_template_16_24_32);
1951 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1952 speed_template_16_24_32);
1953 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1954 speed_template_32_40_48);
1955 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1956 speed_template_32_40_48);
1957 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1958 speed_template_32_48_64);
1959 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1960 speed_template_32_48_64);
1961 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1962 speed_template_16_24_32);
1963 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1964 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02001965 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
1966 speed_template_16_24_32);
1967 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
1968 speed_template_16_24_32);
1969 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
1970 speed_template_16_24_32);
1971 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
1972 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02001973 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
1974 speed_template_20_28_36);
1975 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
1976 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001977 break;
1978
1979 case 501:
1980 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1981 des3_speed_template, DES3_SPEED_VECTORS,
1982 speed_template_24);
1983 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
1984 des3_speed_template, DES3_SPEED_VECTORS,
1985 speed_template_24);
1986 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1987 des3_speed_template, DES3_SPEED_VECTORS,
1988 speed_template_24);
1989 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
1990 des3_speed_template, DES3_SPEED_VECTORS,
1991 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02001992 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
1993 des3_speed_template, DES3_SPEED_VECTORS,
1994 speed_template_24);
1995 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
1996 des3_speed_template, DES3_SPEED_VECTORS,
1997 speed_template_24);
1998 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
1999 des3_speed_template, DES3_SPEED_VECTORS,
2000 speed_template_24);
2001 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2002 des3_speed_template, DES3_SPEED_VECTORS,
2003 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002004 break;
2005
2006 case 502:
2007 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2008 speed_template_8);
2009 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2010 speed_template_8);
2011 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2012 speed_template_8);
2013 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2014 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002015 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2016 speed_template_8);
2017 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2018 speed_template_8);
2019 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2020 speed_template_8);
2021 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2022 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002023 break;
2024
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002025 case 503:
2026 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2027 speed_template_16_32);
2028 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2029 speed_template_16_32);
2030 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2031 speed_template_16_32);
2032 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2033 speed_template_16_32);
2034 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2035 speed_template_16_32);
2036 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2037 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002038 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2039 speed_template_32_48);
2040 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2041 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002042 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2043 speed_template_32_64);
2044 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2045 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002046 break;
2047
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002048 case 504:
2049 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2050 speed_template_16_24_32);
2051 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2052 speed_template_16_24_32);
2053 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2054 speed_template_16_24_32);
2055 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2056 speed_template_16_24_32);
2057 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2058 speed_template_16_24_32);
2059 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2060 speed_template_16_24_32);
2061 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2062 speed_template_32_40_48);
2063 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2064 speed_template_32_40_48);
2065 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2066 speed_template_32_48_64);
2067 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2068 speed_template_32_48_64);
2069 break;
2070
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002071 case 505:
2072 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2073 speed_template_8);
2074 break;
2075
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002076 case 506:
2077 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2078 speed_template_8_16);
2079 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2080 speed_template_8_16);
2081 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2082 speed_template_8_16);
2083 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2084 speed_template_8_16);
2085 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2086 speed_template_8_16);
2087 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2088 speed_template_8_16);
2089 break;
2090
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002091 case 507:
2092 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2093 speed_template_16_32);
2094 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2095 speed_template_16_32);
2096 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2097 speed_template_16_32);
2098 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2099 speed_template_16_32);
2100 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2101 speed_template_16_32);
2102 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2103 speed_template_16_32);
2104 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2105 speed_template_32_48);
2106 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2107 speed_template_32_48);
2108 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2109 speed_template_32_64);
2110 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2111 speed_template_32_64);
2112 break;
2113
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002114 case 508:
2115 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2116 speed_template_16_32);
2117 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2118 speed_template_16_32);
2119 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2120 speed_template_16_32);
2121 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2122 speed_template_16_32);
2123 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2124 speed_template_16_32);
2125 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2126 speed_template_16_32);
2127 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2128 speed_template_32_48);
2129 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2130 speed_template_32_48);
2131 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2132 speed_template_32_64);
2133 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2134 speed_template_32_64);
2135 break;
2136
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002137 case 509:
2138 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2139 speed_template_8_32);
2140 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2141 speed_template_8_32);
2142 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2143 speed_template_8_32);
2144 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2145 speed_template_8_32);
2146 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2147 speed_template_8_32);
2148 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2149 speed_template_8_32);
2150 break;
2151
Linus Torvalds1da177e2005-04-16 15:20:36 -07002152 case 1000:
2153 test_available();
2154 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002155 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10002156
2157 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002158}
2159
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002160static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07002161{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002162 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08002163 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002164
Herbert Xuf139cfa2008-07-31 12:23:53 +08002165 for (i = 0; i < TVMEMSIZE; i++) {
2166 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2167 if (!tvmem[i])
2168 goto err_free_tv;
2169 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002170
Herbert Xu86068132014-12-04 16:43:29 +08002171 err = do_test(alg, type, mask, mode);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002172
Jarod Wilson4e033a62009-05-27 15:10:21 +10002173 if (err) {
2174 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2175 goto err_free_tv;
2176 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002177
Jarod Wilson4e033a62009-05-27 15:10:21 +10002178 /* We intentionaly return -EAGAIN to prevent keeping the module,
2179 * unless we're running in fips mode. It does all its work from
2180 * init() and doesn't offer any runtime functionality, but in
2181 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002182 * => we don't need it in the memory, do we?
2183 * -- mludvig
2184 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002185 if (!fips_enabled)
2186 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002187
Herbert Xuf139cfa2008-07-31 12:23:53 +08002188err_free_tv:
2189 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2190 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002191
2192 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002193}
2194
2195/*
2196 * If an init function is provided, an exit function must also be provided
2197 * to allow module unload.
2198 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002199static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002200
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002201module_init(tcrypt_mod_init);
2202module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002203
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002204module_param(alg, charp, 0);
2205module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08002206module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002207module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002208module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07002209MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2210 "(defaults to zero which uses CPU cycles instead)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07002211
2212MODULE_LICENSE("GPL");
2213MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2214MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");