blob: 3603c7c12d3cb7cc04d52747401a541678740350 [file] [log] [blame]
Herbert Xuef2736f2005-06-22 13:26:03 -07001/*
Linus Torvalds1da177e2005-04-16 15:20:36 -07002 * Quick & dirty crypto testing module.
3 *
4 * This will only exist until we have a better testing mechanism
5 * (e.g. a char device).
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
Mikko Herranene3a4ea42007-11-26 22:12:07 +08009 * Copyright (c) 2007 Nokia Siemens Networks
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 *
Adrian Hoban69435b92010-11-04 15:02:04 -040011 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
17 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070018 * This program is free software; you can redistribute it and/or modify it
19 * under the terms of the GNU General Public License as published by the Free
Herbert Xuef2736f2005-06-22 13:26:03 -070020 * Software Foundation; either version 2 of the License, or (at your option)
Linus Torvalds1da177e2005-04-16 15:20:36 -070021 * any later version.
22 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070023 */
24
Herbert Xu1ce5a042015-04-22 15:06:30 +080025#include <crypto/aead.h>
Herbert Xu18e33e62008-07-10 16:01:22 +080026#include <crypto/hash.h>
Herbert Xucba83562006-08-13 08:26:09 +100027#include <linux/err.h>
Herbert Xudaf09442015-04-22 13:25:57 +080028#include <linux/fips.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#include <linux/init.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090030#include <linux/gfp.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070031#include <linux/module.h>
David Hardeman378f0582005-09-17 17:55:31 +100032#include <linux/scatterlist.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070033#include <linux/string.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070034#include <linux/moduleparam.h>
Harald Welteebfd9bc2005-06-22 13:27:23 -070035#include <linux/jiffies.h>
Herbert Xu6a179442005-06-22 13:29:03 -070036#include <linux/timex.h>
37#include <linux/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070038#include "tcrypt.h"
39
40/*
Herbert Xuf139cfa2008-07-31 12:23:53 +080041 * Need slab memory for testing (size in number of pages).
Linus Torvalds1da177e2005-04-16 15:20:36 -070042 */
Herbert Xuf139cfa2008-07-31 12:23:53 +080043#define TVMEMSIZE 4
Linus Torvalds1da177e2005-04-16 15:20:36 -070044
45/*
Herbert Xuda7f0332008-07-31 17:08:25 +080046* Used by test_cipher_speed()
Linus Torvalds1da177e2005-04-16 15:20:36 -070047*/
48#define ENCRYPT 1
49#define DECRYPT 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070050
Harald Welteebfd9bc2005-06-22 13:27:23 -070051/*
Luca Clementi263a8df2014-06-25 22:57:42 -070052 * return a string with the driver name
53 */
54#define get_driver_name(tfm_type, tfm) crypto_tfm_alg_driver_name(tfm_type ## _tfm(tfm))
55
56/*
Harald Welteebfd9bc2005-06-22 13:27:23 -070057 * Used by test_cipher_speed()
58 */
Herbert Xu6a179442005-06-22 13:29:03 -070059static unsigned int sec;
Harald Welteebfd9bc2005-06-22 13:27:23 -070060
Steffen Klasserta873a5f2009-06-19 19:46:53 +080061static char *alg = NULL;
62static u32 type;
Herbert Xu7be380f2009-07-14 16:06:54 +080063static u32 mask;
Linus Torvalds1da177e2005-04-16 15:20:36 -070064static int mode;
Herbert Xuf139cfa2008-07-31 12:23:53 +080065static char *tvmem[TVMEMSIZE];
Linus Torvalds1da177e2005-04-16 15:20:36 -070066
67static char *check[] = {
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080068 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
69 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
70 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
David Howells90831632006-12-16 12:13:14 +110071 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +080072 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +080073 "lzo", "cts", "zlib", NULL
Linus Torvalds1da177e2005-04-16 15:20:36 -070074};
75
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +053076struct tcrypt_result {
77 struct completion completion;
78 int err;
79};
80
81static void tcrypt_complete(struct crypto_async_request *req, int err)
82{
83 struct tcrypt_result *res = req->data;
84
85 if (err == -EINPROGRESS)
86 return;
87
88 res->err = err;
89 complete(&res->completion);
90}
91
Herbert Xuf139cfa2008-07-31 12:23:53 +080092static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -070093 struct scatterlist *sg, int blen, int secs)
Herbert Xu6a179442005-06-22 13:29:03 -070094{
Herbert Xu6a179442005-06-22 13:29:03 -070095 unsigned long start, end;
96 int bcount;
97 int ret;
98
Mark Rustad3e3dc252014-07-25 02:53:38 -070099 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Herbert Xu6a179442005-06-22 13:29:03 -0700100 time_before(jiffies, end); bcount++) {
101 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000102 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700103 else
Herbert Xucba83562006-08-13 08:26:09 +1000104 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700105
106 if (ret)
107 return ret;
108 }
109
110 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700111 bcount, secs, (long)bcount * blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700112 return 0;
113}
114
Herbert Xuf139cfa2008-07-31 12:23:53 +0800115static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
116 struct scatterlist *sg, int blen)
Herbert Xu6a179442005-06-22 13:29:03 -0700117{
Herbert Xu6a179442005-06-22 13:29:03 -0700118 unsigned long cycles = 0;
119 int ret = 0;
120 int i;
121
Herbert Xu6a179442005-06-22 13:29:03 -0700122 local_irq_disable();
123
124 /* Warm-up run. */
125 for (i = 0; i < 4; i++) {
126 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000127 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700128 else
Herbert Xucba83562006-08-13 08:26:09 +1000129 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700130
131 if (ret)
132 goto out;
133 }
134
135 /* The real thing. */
136 for (i = 0; i < 8; i++) {
137 cycles_t start, end;
138
139 start = get_cycles();
140 if (enc)
Herbert Xucba83562006-08-13 08:26:09 +1000141 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700142 else
Herbert Xucba83562006-08-13 08:26:09 +1000143 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
Herbert Xu6a179442005-06-22 13:29:03 -0700144 end = get_cycles();
145
146 if (ret)
147 goto out;
148
149 cycles += end - start;
150 }
151
152out:
153 local_irq_enable();
Herbert Xu6a179442005-06-22 13:29:03 -0700154
155 if (ret == 0)
156 printk("1 operation in %lu cycles (%d bytes)\n",
157 (cycles + 4) / 8, blen);
158
159 return ret;
160}
161
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530162static inline int do_one_aead_op(struct aead_request *req, int ret)
163{
164 if (ret == -EINPROGRESS || ret == -EBUSY) {
165 struct tcrypt_result *tr = req->base.data;
166
167 ret = wait_for_completion_interruptible(&tr->completion);
168 if (!ret)
169 ret = tr->err;
170 reinit_completion(&tr->completion);
171 }
172
173 return ret;
174}
175
Tim Chen53f52d72013-12-11 14:28:47 -0800176static int test_aead_jiffies(struct aead_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700177 int blen, int secs)
Tim Chen53f52d72013-12-11 14:28:47 -0800178{
179 unsigned long start, end;
180 int bcount;
181 int ret;
182
Mark Rustad3e3dc252014-07-25 02:53:38 -0700183 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Tim Chen53f52d72013-12-11 14:28:47 -0800184 time_before(jiffies, end); bcount++) {
185 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530186 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800187 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530188 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800189
190 if (ret)
191 return ret;
192 }
193
194 printk("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700195 bcount, secs, (long)bcount * blen);
Tim Chen53f52d72013-12-11 14:28:47 -0800196 return 0;
197}
198
199static int test_aead_cycles(struct aead_request *req, int enc, int blen)
200{
201 unsigned long cycles = 0;
202 int ret = 0;
203 int i;
204
205 local_irq_disable();
206
207 /* Warm-up run. */
208 for (i = 0; i < 4; i++) {
209 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530210 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800211 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530212 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800213
214 if (ret)
215 goto out;
216 }
217
218 /* The real thing. */
219 for (i = 0; i < 8; i++) {
220 cycles_t start, end;
221
222 start = get_cycles();
223 if (enc)
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530224 ret = do_one_aead_op(req, crypto_aead_encrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800225 else
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530226 ret = do_one_aead_op(req, crypto_aead_decrypt(req));
Tim Chen53f52d72013-12-11 14:28:47 -0800227 end = get_cycles();
228
229 if (ret)
230 goto out;
231
232 cycles += end - start;
233 }
234
235out:
236 local_irq_enable();
237
238 if (ret == 0)
239 printk("1 operation in %lu cycles (%d bytes)\n",
240 (cycles + 4) / 8, blen);
241
242 return ret;
243}
244
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800245static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
Tim Chen53f52d72013-12-11 14:28:47 -0800246static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
247
248#define XBUFSIZE 8
249#define MAX_IVLEN 32
250
251static int testmgr_alloc_buf(char *buf[XBUFSIZE])
252{
253 int i;
254
255 for (i = 0; i < XBUFSIZE; i++) {
256 buf[i] = (void *)__get_free_page(GFP_KERNEL);
257 if (!buf[i])
258 goto err_free_buf;
259 }
260
261 return 0;
262
263err_free_buf:
264 while (i-- > 0)
265 free_page((unsigned long)buf[i]);
266
267 return -ENOMEM;
268}
269
270static void testmgr_free_buf(char *buf[XBUFSIZE])
271{
272 int i;
273
274 for (i = 0; i < XBUFSIZE; i++)
275 free_page((unsigned long)buf[i]);
276}
277
278static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
279 unsigned int buflen)
280{
281 int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
282 int k, rem;
283
Tim Chen53f52d72013-12-11 14:28:47 -0800284 if (np > XBUFSIZE) {
285 rem = PAGE_SIZE;
286 np = XBUFSIZE;
Cristian Stoicac4768992015-01-27 11:54:27 +0200287 } else {
288 rem = buflen % PAGE_SIZE;
Tim Chen53f52d72013-12-11 14:28:47 -0800289 }
Cristian Stoicac4768992015-01-27 11:54:27 +0200290
Herbert Xu31267272015-06-17 14:05:26 +0800291 sg_init_table(sg, np + 1);
Cristian Stoicac4768992015-01-27 11:54:27 +0200292 np--;
293 for (k = 0; k < np; k++)
Herbert Xu31267272015-06-17 14:05:26 +0800294 sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
Cristian Stoicac4768992015-01-27 11:54:27 +0200295
Herbert Xu31267272015-06-17 14:05:26 +0800296 sg_set_buf(&sg[k + 1], xbuf[k], rem);
Tim Chen53f52d72013-12-11 14:28:47 -0800297}
298
Mark Rustad3e3dc252014-07-25 02:53:38 -0700299static void test_aead_speed(const char *algo, int enc, unsigned int secs,
Tim Chen53f52d72013-12-11 14:28:47 -0800300 struct aead_speed_template *template,
301 unsigned int tcount, u8 authsize,
302 unsigned int aad_size, u8 *keysize)
303{
304 unsigned int i, j;
305 struct crypto_aead *tfm;
306 int ret = -ENOMEM;
307 const char *key;
308 struct aead_request *req;
309 struct scatterlist *sg;
Tim Chen53f52d72013-12-11 14:28:47 -0800310 struct scatterlist *sgout;
311 const char *e;
312 void *assoc;
Cristian Stoica96692a732015-01-28 13:07:32 +0200313 char *iv;
Tim Chen53f52d72013-12-11 14:28:47 -0800314 char *xbuf[XBUFSIZE];
315 char *xoutbuf[XBUFSIZE];
316 char *axbuf[XBUFSIZE];
317 unsigned int *b_size;
318 unsigned int iv_len;
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530319 struct tcrypt_result result;
Tim Chen53f52d72013-12-11 14:28:47 -0800320
Cristian Stoica96692a732015-01-28 13:07:32 +0200321 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
322 if (!iv)
323 return;
324
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200325 if (aad_size >= PAGE_SIZE) {
326 pr_err("associate data length (%u) too big\n", aad_size);
Cristian Stoica96692a732015-01-28 13:07:32 +0200327 goto out_noxbuf;
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200328 }
329
Tim Chen53f52d72013-12-11 14:28:47 -0800330 if (enc == ENCRYPT)
331 e = "encryption";
332 else
333 e = "decryption";
334
335 if (testmgr_alloc_buf(xbuf))
336 goto out_noxbuf;
337 if (testmgr_alloc_buf(axbuf))
338 goto out_noaxbuf;
339 if (testmgr_alloc_buf(xoutbuf))
340 goto out_nooutbuf;
341
Herbert Xua3f21852015-05-27 16:03:51 +0800342 sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
Tim Chen53f52d72013-12-11 14:28:47 -0800343 if (!sg)
344 goto out_nosg;
Herbert Xua3f21852015-05-27 16:03:51 +0800345 sgout = &sg[9];
Tim Chen53f52d72013-12-11 14:28:47 -0800346
Tim Chen53f52d72013-12-11 14:28:47 -0800347 tfm = crypto_alloc_aead(algo, 0, 0);
348
349 if (IS_ERR(tfm)) {
350 pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
351 PTR_ERR(tfm));
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200352 goto out_notfm;
Tim Chen53f52d72013-12-11 14:28:47 -0800353 }
354
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530355 init_completion(&result.completion);
Luca Clementi263a8df2014-06-25 22:57:42 -0700356 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
357 get_driver_name(crypto_aead, tfm), e);
358
Tim Chen53f52d72013-12-11 14:28:47 -0800359 req = aead_request_alloc(tfm, GFP_KERNEL);
360 if (!req) {
361 pr_err("alg: aead: Failed to allocate request for %s\n",
362 algo);
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200363 goto out_noreq;
Tim Chen53f52d72013-12-11 14:28:47 -0800364 }
365
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +0530366 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
367 tcrypt_complete, &result);
368
Tim Chen53f52d72013-12-11 14:28:47 -0800369 i = 0;
370 do {
371 b_size = aead_sizes;
372 do {
373 assoc = axbuf[0];
Christian Engelmayerac5f8632014-04-21 20:45:59 +0200374 memset(assoc, 0xff, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800375
376 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
377 pr_err("template (%u) too big for tvmem (%lu)\n",
378 *keysize + *b_size,
379 TVMEMSIZE * PAGE_SIZE);
380 goto out;
381 }
382
383 key = tvmem[0];
384 for (j = 0; j < tcount; j++) {
385 if (template[j].klen == *keysize) {
386 key = template[j].key;
387 break;
388 }
389 }
390 ret = crypto_aead_setkey(tfm, key, *keysize);
391 ret = crypto_aead_setauthsize(tfm, authsize);
392
393 iv_len = crypto_aead_ivsize(tfm);
394 if (iv_len)
Cristian Stoica96692a732015-01-28 13:07:32 +0200395 memset(iv, 0xff, iv_len);
Tim Chen53f52d72013-12-11 14:28:47 -0800396
397 crypto_aead_clear_flags(tfm, ~0);
398 printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
399 i, *keysize * 8, *b_size);
400
401
402 memset(tvmem[0], 0xff, PAGE_SIZE);
403
404 if (ret) {
405 pr_err("setkey() failed flags=%x\n",
406 crypto_aead_get_flags(tfm));
407 goto out;
408 }
409
Herbert Xu31267272015-06-17 14:05:26 +0800410 sg_init_aead(sg, xbuf,
Tim Chen53f52d72013-12-11 14:28:47 -0800411 *b_size + (enc ? authsize : 0));
412
Herbert Xu31267272015-06-17 14:05:26 +0800413 sg_init_aead(sgout, xoutbuf,
Tim Chen53f52d72013-12-11 14:28:47 -0800414 *b_size + (enc ? authsize : 0));
415
Herbert Xu31267272015-06-17 14:05:26 +0800416 sg_set_buf(&sg[0], assoc, aad_size);
417 sg_set_buf(&sgout[0], assoc, aad_size);
418
Tim Chen53f52d72013-12-11 14:28:47 -0800419 aead_request_set_crypt(req, sg, sgout, *b_size, iv);
Herbert Xua3f21852015-05-27 16:03:51 +0800420 aead_request_set_ad(req, aad_size);
Tim Chen53f52d72013-12-11 14:28:47 -0800421
Mark Rustad3e3dc252014-07-25 02:53:38 -0700422 if (secs)
423 ret = test_aead_jiffies(req, enc, *b_size,
424 secs);
Tim Chen53f52d72013-12-11 14:28:47 -0800425 else
426 ret = test_aead_cycles(req, enc, *b_size);
427
428 if (ret) {
429 pr_err("%s() failed return code=%d\n", e, ret);
430 break;
431 }
432 b_size++;
433 i++;
434 } while (*b_size);
435 keysize++;
436 } while (*keysize);
437
438out:
Christian Engelmayer6af1f932014-04-21 20:47:05 +0200439 aead_request_free(req);
440out_noreq:
Tim Chen53f52d72013-12-11 14:28:47 -0800441 crypto_free_aead(tfm);
Christian Engelmayera2ea6ed2014-04-21 20:46:40 +0200442out_notfm:
Tim Chen53f52d72013-12-11 14:28:47 -0800443 kfree(sg);
444out_nosg:
445 testmgr_free_buf(xoutbuf);
446out_nooutbuf:
447 testmgr_free_buf(axbuf);
448out_noaxbuf:
449 testmgr_free_buf(xbuf);
450out_noxbuf:
Cristian Stoica96692a732015-01-28 13:07:32 +0200451 kfree(iv);
Tim Chen53f52d72013-12-11 14:28:47 -0800452 return;
453}
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800454
Mark Rustad3e3dc252014-07-25 02:53:38 -0700455static void test_cipher_speed(const char *algo, int enc, unsigned int secs,
Herbert Xuda7f0332008-07-31 17:08:25 +0800456 struct cipher_speed_template *template,
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800457 unsigned int tcount, u8 *keysize)
Harald Welteebfd9bc2005-06-22 13:27:23 -0700458{
Herbert Xudce907c2005-06-22 13:27:51 -0700459 unsigned int ret, i, j, iv_len;
David Sterbaf07ef1d2011-03-04 15:28:52 +0800460 const char *key;
461 char iv[128];
Herbert Xucba83562006-08-13 08:26:09 +1000462 struct crypto_blkcipher *tfm;
463 struct blkcipher_desc desc;
464 const char *e;
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800465 u32 *b_size;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700466
467 if (enc == ENCRYPT)
468 e = "encryption";
469 else
470 e = "decryption";
Harald Welteebfd9bc2005-06-22 13:27:23 -0700471
Herbert Xucba83562006-08-13 08:26:09 +1000472 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700473
Herbert Xucba83562006-08-13 08:26:09 +1000474 if (IS_ERR(tfm)) {
475 printk("failed to load transform for %s: %ld\n", algo,
476 PTR_ERR(tfm));
Harald Welteebfd9bc2005-06-22 13:27:23 -0700477 return;
478 }
Herbert Xucba83562006-08-13 08:26:09 +1000479 desc.tfm = tfm;
480 desc.flags = 0;
Harald Welteebfd9bc2005-06-22 13:27:23 -0700481
Luca Clementi263a8df2014-06-25 22:57:42 -0700482 printk(KERN_INFO "\ntesting speed of %s (%s) %s\n", algo,
483 get_driver_name(crypto_blkcipher, tfm), e);
484
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800485 i = 0;
486 do {
Harald Welteebfd9bc2005-06-22 13:27:23 -0700487
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800488 b_size = block_sizes;
489 do {
Herbert Xuf139cfa2008-07-31 12:23:53 +0800490 struct scatterlist sg[TVMEMSIZE];
Harald Welteebfd9bc2005-06-22 13:27:23 -0700491
Herbert Xuf139cfa2008-07-31 12:23:53 +0800492 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
493 printk("template (%u) too big for "
494 "tvmem (%lu)\n", *keysize + *b_size,
495 TVMEMSIZE * PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800496 goto out;
497 }
Harald Welteebfd9bc2005-06-22 13:27:23 -0700498
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800499 printk("test %u (%d bit key, %d byte blocks): ", i,
500 *keysize * 8, *b_size);
501
Herbert Xuf139cfa2008-07-31 12:23:53 +0800502 memset(tvmem[0], 0xff, PAGE_SIZE);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800503
504 /* set key, plain text and IV */
Herbert Xuda7f0332008-07-31 17:08:25 +0800505 key = tvmem[0];
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800506 for (j = 0; j < tcount; j++) {
507 if (template[j].klen == *keysize) {
508 key = template[j].key;
509 break;
510 }
511 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800512
513 ret = crypto_blkcipher_setkey(tfm, key, *keysize);
514 if (ret) {
515 printk("setkey() failed flags=%x\n",
516 crypto_blkcipher_get_flags(tfm));
517 goto out;
518 }
519
Herbert Xuf139cfa2008-07-31 12:23:53 +0800520 sg_init_table(sg, TVMEMSIZE);
521 sg_set_buf(sg, tvmem[0] + *keysize,
522 PAGE_SIZE - *keysize);
523 for (j = 1; j < TVMEMSIZE; j++) {
524 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
525 memset (tvmem[j], 0xff, PAGE_SIZE);
526 }
527
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800528 iv_len = crypto_blkcipher_ivsize(tfm);
529 if (iv_len) {
530 memset(&iv, 0xff, iv_len);
531 crypto_blkcipher_set_iv(tfm, iv, iv_len);
532 }
533
Mark Rustad3e3dc252014-07-25 02:53:38 -0700534 if (secs)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800535 ret = test_cipher_jiffies(&desc, enc, sg,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700536 *b_size, secs);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800537 else
Herbert Xuf139cfa2008-07-31 12:23:53 +0800538 ret = test_cipher_cycles(&desc, enc, sg,
539 *b_size);
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800540
541 if (ret) {
542 printk("%s() failed flags=%x\n", e, desc.flags);
Herbert Xudce907c2005-06-22 13:27:51 -0700543 break;
544 }
Sebastian Siewiord5dc3922008-03-11 21:27:11 +0800545 b_size++;
546 i++;
547 } while (*b_size);
548 keysize++;
549 } while (*keysize);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700550
551out:
Herbert Xucba83562006-08-13 08:26:09 +1000552 crypto_free_blkcipher(tfm);
Harald Welteebfd9bc2005-06-22 13:27:23 -0700553}
554
Herbert Xuf139cfa2008-07-31 12:23:53 +0800555static int test_hash_jiffies_digest(struct hash_desc *desc,
556 struct scatterlist *sg, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700557 char *out, int secs)
Michal Ludvige8057922006-05-30 22:04:19 +1000558{
Michal Ludvige8057922006-05-30 22:04:19 +1000559 unsigned long start, end;
Herbert Xue9d41162006-08-19 21:38:49 +1000560 int bcount;
561 int ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000562
Mark Rustad3e3dc252014-07-25 02:53:38 -0700563 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Michal Ludvige8057922006-05-30 22:04:19 +1000564 time_before(jiffies, end); bcount++) {
Herbert Xue9d41162006-08-19 21:38:49 +1000565 ret = crypto_hash_digest(desc, sg, blen, out);
566 if (ret)
567 return ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000568 }
569
570 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700571 bcount / secs, ((long)bcount * blen) / secs);
Michal Ludvige8057922006-05-30 22:04:19 +1000572
Herbert Xue9d41162006-08-19 21:38:49 +1000573 return 0;
Michal Ludvige8057922006-05-30 22:04:19 +1000574}
575
Herbert Xuf139cfa2008-07-31 12:23:53 +0800576static int test_hash_jiffies(struct hash_desc *desc, struct scatterlist *sg,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700577 int blen, int plen, char *out, int secs)
Herbert Xue9d41162006-08-19 21:38:49 +1000578{
Herbert Xue9d41162006-08-19 21:38:49 +1000579 unsigned long start, end;
580 int bcount, pcount;
581 int ret;
582
583 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700584 return test_hash_jiffies_digest(desc, sg, blen, out, secs);
Herbert Xua5a613a2007-10-27 00:51:21 -0700585
Mark Rustad3e3dc252014-07-25 02:53:38 -0700586 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Herbert Xue9d41162006-08-19 21:38:49 +1000587 time_before(jiffies, end); bcount++) {
588 ret = crypto_hash_init(desc);
589 if (ret)
590 return ret;
591 for (pcount = 0; pcount < blen; pcount += plen) {
Herbert Xue9d41162006-08-19 21:38:49 +1000592 ret = crypto_hash_update(desc, sg, plen);
593 if (ret)
594 return ret;
595 }
596 /* we assume there is enough space in 'out' for the result */
597 ret = crypto_hash_final(desc, out);
598 if (ret)
599 return ret;
600 }
601
602 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700603 bcount / secs, ((long)bcount * blen) / secs);
Herbert Xue9d41162006-08-19 21:38:49 +1000604
605 return 0;
606}
607
Herbert Xuf139cfa2008-07-31 12:23:53 +0800608static int test_hash_cycles_digest(struct hash_desc *desc,
609 struct scatterlist *sg, int blen, char *out)
Michal Ludvige8057922006-05-30 22:04:19 +1000610{
Michal Ludvige8057922006-05-30 22:04:19 +1000611 unsigned long cycles = 0;
Herbert Xue9d41162006-08-19 21:38:49 +1000612 int i;
613 int ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000614
Michal Ludvige8057922006-05-30 22:04:19 +1000615 local_irq_disable();
616
617 /* Warm-up run. */
618 for (i = 0; i < 4; i++) {
Herbert Xue9d41162006-08-19 21:38:49 +1000619 ret = crypto_hash_digest(desc, sg, blen, out);
620 if (ret)
621 goto out;
Michal Ludvige8057922006-05-30 22:04:19 +1000622 }
623
624 /* The real thing. */
625 for (i = 0; i < 8; i++) {
626 cycles_t start, end;
627
Michal Ludvige8057922006-05-30 22:04:19 +1000628 start = get_cycles();
629
Herbert Xue9d41162006-08-19 21:38:49 +1000630 ret = crypto_hash_digest(desc, sg, blen, out);
631 if (ret)
632 goto out;
Michal Ludvige8057922006-05-30 22:04:19 +1000633
634 end = get_cycles();
635
636 cycles += end - start;
637 }
638
Herbert Xue9d41162006-08-19 21:38:49 +1000639out:
Michal Ludvige8057922006-05-30 22:04:19 +1000640 local_irq_enable();
Michal Ludvige8057922006-05-30 22:04:19 +1000641
Herbert Xue9d41162006-08-19 21:38:49 +1000642 if (ret)
643 return ret;
644
Michal Ludvige8057922006-05-30 22:04:19 +1000645 printk("%6lu cycles/operation, %4lu cycles/byte\n",
646 cycles / 8, cycles / (8 * blen));
647
Herbert Xue9d41162006-08-19 21:38:49 +1000648 return 0;
Michal Ludvige8057922006-05-30 22:04:19 +1000649}
650
Herbert Xuf139cfa2008-07-31 12:23:53 +0800651static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
652 int blen, int plen, char *out)
Michal Ludvige8057922006-05-30 22:04:19 +1000653{
Herbert Xue9d41162006-08-19 21:38:49 +1000654 unsigned long cycles = 0;
655 int i, pcount;
656 int ret;
657
658 if (plen == blen)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800659 return test_hash_cycles_digest(desc, sg, blen, out);
Herbert Xua5a613a2007-10-27 00:51:21 -0700660
Herbert Xue9d41162006-08-19 21:38:49 +1000661 local_irq_disable();
662
663 /* Warm-up run. */
664 for (i = 0; i < 4; i++) {
665 ret = crypto_hash_init(desc);
666 if (ret)
667 goto out;
668 for (pcount = 0; pcount < blen; pcount += plen) {
Herbert Xue9d41162006-08-19 21:38:49 +1000669 ret = crypto_hash_update(desc, sg, plen);
670 if (ret)
671 goto out;
672 }
Herbert Xu29059d12007-05-18 16:25:19 +1000673 ret = crypto_hash_final(desc, out);
Herbert Xue9d41162006-08-19 21:38:49 +1000674 if (ret)
675 goto out;
676 }
677
678 /* The real thing. */
679 for (i = 0; i < 8; i++) {
680 cycles_t start, end;
681
682 start = get_cycles();
683
684 ret = crypto_hash_init(desc);
685 if (ret)
686 goto out;
687 for (pcount = 0; pcount < blen; pcount += plen) {
Herbert Xue9d41162006-08-19 21:38:49 +1000688 ret = crypto_hash_update(desc, sg, plen);
689 if (ret)
690 goto out;
691 }
692 ret = crypto_hash_final(desc, out);
693 if (ret)
694 goto out;
695
696 end = get_cycles();
697
698 cycles += end - start;
699 }
700
701out:
702 local_irq_enable();
Herbert Xue9d41162006-08-19 21:38:49 +1000703
704 if (ret)
705 return ret;
706
707 printk("%6lu cycles/operation, %4lu cycles/byte\n",
708 cycles / 8, cycles / (8 * blen));
709
710 return 0;
711}
712
David S. Millerbeb63da2010-05-19 14:11:21 +1000713static void test_hash_sg_init(struct scatterlist *sg)
714{
715 int i;
716
717 sg_init_table(sg, TVMEMSIZE);
718 for (i = 0; i < TVMEMSIZE; i++) {
719 sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
720 memset(tvmem[i], 0xff, PAGE_SIZE);
721 }
722}
723
Mark Rustad3e3dc252014-07-25 02:53:38 -0700724static void test_hash_speed(const char *algo, unsigned int secs,
Herbert Xu01b32322008-07-31 15:41:55 +0800725 struct hash_speed *speed)
Herbert Xue9d41162006-08-19 21:38:49 +1000726{
Herbert Xuf139cfa2008-07-31 12:23:53 +0800727 struct scatterlist sg[TVMEMSIZE];
Herbert Xue9d41162006-08-19 21:38:49 +1000728 struct crypto_hash *tfm;
729 struct hash_desc desc;
Frank Seidel376bacb2009-03-29 15:18:39 +0800730 static char output[1024];
Michal Ludvige8057922006-05-30 22:04:19 +1000731 int i;
Herbert Xue9d41162006-08-19 21:38:49 +1000732 int ret;
Michal Ludvige8057922006-05-30 22:04:19 +1000733
Herbert Xue9d41162006-08-19 21:38:49 +1000734 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
Michal Ludvige8057922006-05-30 22:04:19 +1000735
Herbert Xue9d41162006-08-19 21:38:49 +1000736 if (IS_ERR(tfm)) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800737 printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
Herbert Xue9d41162006-08-19 21:38:49 +1000738 PTR_ERR(tfm));
Michal Ludvige8057922006-05-30 22:04:19 +1000739 return;
740 }
741
Luca Clementi263a8df2014-06-25 22:57:42 -0700742 printk(KERN_INFO "\ntesting speed of %s (%s)\n", algo,
743 get_driver_name(crypto_hash, tfm));
744
Herbert Xue9d41162006-08-19 21:38:49 +1000745 desc.tfm = tfm;
746 desc.flags = 0;
747
748 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800749 printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
Herbert Xue9d41162006-08-19 21:38:49 +1000750 crypto_hash_digestsize(tfm), sizeof(output));
Michal Ludvige8057922006-05-30 22:04:19 +1000751 goto out;
752 }
753
David S. Millerbeb63da2010-05-19 14:11:21 +1000754 test_hash_sg_init(sg);
Michal Ludvige8057922006-05-30 22:04:19 +1000755 for (i = 0; speed[i].blen != 0; i++) {
Herbert Xuf139cfa2008-07-31 12:23:53 +0800756 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800757 printk(KERN_ERR
758 "template (%u) too big for tvmem (%lu)\n",
Herbert Xuf139cfa2008-07-31 12:23:53 +0800759 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
Michal Ludvige8057922006-05-30 22:04:19 +1000760 goto out;
761 }
762
Huang Ying18bcc912010-03-10 18:30:32 +0800763 if (speed[i].klen)
764 crypto_hash_setkey(tfm, tvmem[0], speed[i].klen);
765
Frank Seidel376bacb2009-03-29 15:18:39 +0800766 printk(KERN_INFO "test%3u "
767 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
Michal Ludvige8057922006-05-30 22:04:19 +1000768 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
769
Mark Rustad3e3dc252014-07-25 02:53:38 -0700770 if (secs)
Herbert Xuf139cfa2008-07-31 12:23:53 +0800771 ret = test_hash_jiffies(&desc, sg, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700772 speed[i].plen, output, secs);
Michal Ludvige8057922006-05-30 22:04:19 +1000773 else
Herbert Xuf139cfa2008-07-31 12:23:53 +0800774 ret = test_hash_cycles(&desc, sg, speed[i].blen,
Herbert Xue9d41162006-08-19 21:38:49 +1000775 speed[i].plen, output);
776
777 if (ret) {
Frank Seidel376bacb2009-03-29 15:18:39 +0800778 printk(KERN_ERR "hashing failed ret=%d\n", ret);
Herbert Xue9d41162006-08-19 21:38:49 +1000779 break;
780 }
Michal Ludvige8057922006-05-30 22:04:19 +1000781 }
782
783out:
Herbert Xue9d41162006-08-19 21:38:49 +1000784 crypto_free_hash(tfm);
Michal Ludvige8057922006-05-30 22:04:19 +1000785}
786
David S. Millerbeb63da2010-05-19 14:11:21 +1000787static inline int do_one_ahash_op(struct ahash_request *req, int ret)
788{
789 if (ret == -EINPROGRESS || ret == -EBUSY) {
790 struct tcrypt_result *tr = req->base.data;
791
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100792 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -0800793 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100794 ret = tr->err;
David S. Millerbeb63da2010-05-19 14:11:21 +1000795 }
796 return ret;
797}
798
799static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700800 char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000801{
802 unsigned long start, end;
803 int bcount;
804 int ret;
805
Mark Rustad3e3dc252014-07-25 02:53:38 -0700806 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000807 time_before(jiffies, end); bcount++) {
808 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
809 if (ret)
810 return ret;
811 }
812
813 printk("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700814 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000815
816 return 0;
817}
818
819static int test_ahash_jiffies(struct ahash_request *req, int blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700820 int plen, char *out, int secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000821{
822 unsigned long start, end;
823 int bcount, pcount;
824 int ret;
825
826 if (plen == blen)
Mark Rustad3e3dc252014-07-25 02:53:38 -0700827 return test_ahash_jiffies_digest(req, blen, out, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000828
Mark Rustad3e3dc252014-07-25 02:53:38 -0700829 for (start = jiffies, end = start + secs * HZ, bcount = 0;
David S. Millerbeb63da2010-05-19 14:11:21 +1000830 time_before(jiffies, end); bcount++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800831 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000832 if (ret)
833 return ret;
834 for (pcount = 0; pcount < blen; pcount += plen) {
835 ret = do_one_ahash_op(req, crypto_ahash_update(req));
836 if (ret)
837 return ret;
838 }
839 /* we assume there is enough space in 'out' for the result */
840 ret = do_one_ahash_op(req, crypto_ahash_final(req));
841 if (ret)
842 return ret;
843 }
844
845 pr_cont("%6u opers/sec, %9lu bytes/sec\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -0700846 bcount / secs, ((long)bcount * blen) / secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000847
848 return 0;
849}
850
851static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
852 char *out)
853{
854 unsigned long cycles = 0;
855 int ret, i;
856
857 /* Warm-up run. */
858 for (i = 0; i < 4; i++) {
859 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
860 if (ret)
861 goto out;
862 }
863
864 /* The real thing. */
865 for (i = 0; i < 8; i++) {
866 cycles_t start, end;
867
868 start = get_cycles();
869
870 ret = do_one_ahash_op(req, crypto_ahash_digest(req));
871 if (ret)
872 goto out;
873
874 end = get_cycles();
875
876 cycles += end - start;
877 }
878
879out:
880 if (ret)
881 return ret;
882
883 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
884 cycles / 8, cycles / (8 * blen));
885
886 return 0;
887}
888
889static int test_ahash_cycles(struct ahash_request *req, int blen,
890 int plen, char *out)
891{
892 unsigned long cycles = 0;
893 int i, pcount, ret;
894
895 if (plen == blen)
896 return test_ahash_cycles_digest(req, blen, out);
897
898 /* Warm-up run. */
899 for (i = 0; i < 4; i++) {
Herbert Xu43a96072015-04-22 11:02:27 +0800900 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000901 if (ret)
902 goto out;
903 for (pcount = 0; pcount < blen; pcount += plen) {
904 ret = do_one_ahash_op(req, crypto_ahash_update(req));
905 if (ret)
906 goto out;
907 }
908 ret = do_one_ahash_op(req, crypto_ahash_final(req));
909 if (ret)
910 goto out;
911 }
912
913 /* The real thing. */
914 for (i = 0; i < 8; i++) {
915 cycles_t start, end;
916
917 start = get_cycles();
918
Herbert Xu43a96072015-04-22 11:02:27 +0800919 ret = do_one_ahash_op(req, crypto_ahash_init(req));
David S. Millerbeb63da2010-05-19 14:11:21 +1000920 if (ret)
921 goto out;
922 for (pcount = 0; pcount < blen; pcount += plen) {
923 ret = do_one_ahash_op(req, crypto_ahash_update(req));
924 if (ret)
925 goto out;
926 }
927 ret = do_one_ahash_op(req, crypto_ahash_final(req));
928 if (ret)
929 goto out;
930
931 end = get_cycles();
932
933 cycles += end - start;
934 }
935
936out:
937 if (ret)
938 return ret;
939
940 pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
941 cycles / 8, cycles / (8 * blen));
942
943 return 0;
944}
945
Mark Rustad3e3dc252014-07-25 02:53:38 -0700946static void test_ahash_speed(const char *algo, unsigned int secs,
David S. Millerbeb63da2010-05-19 14:11:21 +1000947 struct hash_speed *speed)
948{
949 struct scatterlist sg[TVMEMSIZE];
950 struct tcrypt_result tresult;
951 struct ahash_request *req;
952 struct crypto_ahash *tfm;
953 static char output[1024];
954 int i, ret;
955
David S. Millerbeb63da2010-05-19 14:11:21 +1000956 tfm = crypto_alloc_ahash(algo, 0, 0);
957 if (IS_ERR(tfm)) {
958 pr_err("failed to load transform for %s: %ld\n",
959 algo, PTR_ERR(tfm));
960 return;
961 }
962
Luca Clementi263a8df2014-06-25 22:57:42 -0700963 printk(KERN_INFO "\ntesting speed of async %s (%s)\n", algo,
964 get_driver_name(crypto_ahash, tfm));
965
David S. Millerbeb63da2010-05-19 14:11:21 +1000966 if (crypto_ahash_digestsize(tfm) > sizeof(output)) {
967 pr_err("digestsize(%u) > outputbuffer(%zu)\n",
968 crypto_ahash_digestsize(tfm), sizeof(output));
969 goto out;
970 }
971
972 test_hash_sg_init(sg);
973 req = ahash_request_alloc(tfm, GFP_KERNEL);
974 if (!req) {
975 pr_err("ahash request allocation failure\n");
976 goto out;
977 }
978
979 init_completion(&tresult.completion);
980 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
981 tcrypt_complete, &tresult);
982
983 for (i = 0; speed[i].blen != 0; i++) {
984 if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
985 pr_err("template (%u) too big for tvmem (%lu)\n",
986 speed[i].blen, TVMEMSIZE * PAGE_SIZE);
987 break;
988 }
989
990 pr_info("test%3u "
991 "(%5u byte blocks,%5u bytes per update,%4u updates): ",
992 i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
993
994 ahash_request_set_crypt(req, sg, output, speed[i].plen);
995
Mark Rustad3e3dc252014-07-25 02:53:38 -0700996 if (secs)
David S. Millerbeb63da2010-05-19 14:11:21 +1000997 ret = test_ahash_jiffies(req, speed[i].blen,
Mark Rustad3e3dc252014-07-25 02:53:38 -0700998 speed[i].plen, output, secs);
David S. Millerbeb63da2010-05-19 14:11:21 +1000999 else
1000 ret = test_ahash_cycles(req, speed[i].blen,
1001 speed[i].plen, output);
1002
1003 if (ret) {
1004 pr_err("hashing failed ret=%d\n", ret);
1005 break;
1006 }
1007 }
1008
1009 ahash_request_free(req);
1010
1011out:
1012 crypto_free_ahash(tfm);
1013}
1014
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001015static inline int do_one_acipher_op(struct ablkcipher_request *req, int ret)
1016{
1017 if (ret == -EINPROGRESS || ret == -EBUSY) {
1018 struct tcrypt_result *tr = req->base.data;
1019
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001020 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -08001021 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001022 ret = tr->err;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001023 }
1024
1025 return ret;
1026}
1027
1028static int test_acipher_jiffies(struct ablkcipher_request *req, int enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001029 int blen, int secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001030{
1031 unsigned long start, end;
1032 int bcount;
1033 int ret;
1034
Mark Rustad3e3dc252014-07-25 02:53:38 -07001035 for (start = jiffies, end = start + secs * HZ, bcount = 0;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001036 time_before(jiffies, end); bcount++) {
1037 if (enc)
1038 ret = do_one_acipher_op(req,
1039 crypto_ablkcipher_encrypt(req));
1040 else
1041 ret = do_one_acipher_op(req,
1042 crypto_ablkcipher_decrypt(req));
1043
1044 if (ret)
1045 return ret;
1046 }
1047
1048 pr_cont("%d operations in %d seconds (%ld bytes)\n",
Mark Rustad3e3dc252014-07-25 02:53:38 -07001049 bcount, secs, (long)bcount * blen);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001050 return 0;
1051}
1052
1053static int test_acipher_cycles(struct ablkcipher_request *req, int enc,
1054 int blen)
1055{
1056 unsigned long cycles = 0;
1057 int ret = 0;
1058 int i;
1059
1060 /* Warm-up run. */
1061 for (i = 0; i < 4; i++) {
1062 if (enc)
1063 ret = do_one_acipher_op(req,
1064 crypto_ablkcipher_encrypt(req));
1065 else
1066 ret = do_one_acipher_op(req,
1067 crypto_ablkcipher_decrypt(req));
1068
1069 if (ret)
1070 goto out;
1071 }
1072
1073 /* The real thing. */
1074 for (i = 0; i < 8; i++) {
1075 cycles_t start, end;
1076
1077 start = get_cycles();
1078 if (enc)
1079 ret = do_one_acipher_op(req,
1080 crypto_ablkcipher_encrypt(req));
1081 else
1082 ret = do_one_acipher_op(req,
1083 crypto_ablkcipher_decrypt(req));
1084 end = get_cycles();
1085
1086 if (ret)
1087 goto out;
1088
1089 cycles += end - start;
1090 }
1091
1092out:
1093 if (ret == 0)
1094 pr_cont("1 operation in %lu cycles (%d bytes)\n",
1095 (cycles + 4) / 8, blen);
1096
1097 return ret;
1098}
1099
Mark Rustad3e3dc252014-07-25 02:53:38 -07001100static void test_acipher_speed(const char *algo, int enc, unsigned int secs,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001101 struct cipher_speed_template *template,
1102 unsigned int tcount, u8 *keysize)
1103{
Nicolas Royerde1975332012-07-01 19:19:47 +02001104 unsigned int ret, i, j, k, iv_len;
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001105 struct tcrypt_result tresult;
1106 const char *key;
1107 char iv[128];
1108 struct ablkcipher_request *req;
1109 struct crypto_ablkcipher *tfm;
1110 const char *e;
1111 u32 *b_size;
1112
1113 if (enc == ENCRYPT)
1114 e = "encryption";
1115 else
1116 e = "decryption";
1117
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001118 init_completion(&tresult.completion);
1119
1120 tfm = crypto_alloc_ablkcipher(algo, 0, 0);
1121
1122 if (IS_ERR(tfm)) {
1123 pr_err("failed to load transform for %s: %ld\n", algo,
1124 PTR_ERR(tfm));
1125 return;
1126 }
1127
Luca Clementi263a8df2014-06-25 22:57:42 -07001128 pr_info("\ntesting speed of async %s (%s) %s\n", algo,
1129 get_driver_name(crypto_ablkcipher, tfm), e);
1130
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001131 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
1132 if (!req) {
1133 pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
1134 algo);
1135 goto out;
1136 }
1137
1138 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1139 tcrypt_complete, &tresult);
1140
1141 i = 0;
1142 do {
1143 b_size = block_sizes;
1144
1145 do {
1146 struct scatterlist sg[TVMEMSIZE];
1147
1148 if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
1149 pr_err("template (%u) too big for "
1150 "tvmem (%lu)\n", *keysize + *b_size,
1151 TVMEMSIZE * PAGE_SIZE);
1152 goto out_free_req;
1153 }
1154
1155 pr_info("test %u (%d bit key, %d byte blocks): ", i,
1156 *keysize * 8, *b_size);
1157
1158 memset(tvmem[0], 0xff, PAGE_SIZE);
1159
1160 /* set key, plain text and IV */
1161 key = tvmem[0];
1162 for (j = 0; j < tcount; j++) {
1163 if (template[j].klen == *keysize) {
1164 key = template[j].key;
1165 break;
1166 }
1167 }
1168
1169 crypto_ablkcipher_clear_flags(tfm, ~0);
1170
1171 ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
1172 if (ret) {
1173 pr_err("setkey() failed flags=%x\n",
1174 crypto_ablkcipher_get_flags(tfm));
1175 goto out_free_req;
1176 }
1177
Nicolas Royerde1975332012-07-01 19:19:47 +02001178 k = *keysize + *b_size;
Horia Geant?007ee8d2015-03-09 16:14:58 +02001179 sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
1180
Nicolas Royerde1975332012-07-01 19:19:47 +02001181 if (k > PAGE_SIZE) {
1182 sg_set_buf(sg, tvmem[0] + *keysize,
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001183 PAGE_SIZE - *keysize);
Nicolas Royerde1975332012-07-01 19:19:47 +02001184 k -= PAGE_SIZE;
1185 j = 1;
1186 while (k > PAGE_SIZE) {
1187 sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
1188 memset(tvmem[j], 0xff, PAGE_SIZE);
1189 j++;
1190 k -= PAGE_SIZE;
1191 }
1192 sg_set_buf(sg + j, tvmem[j], k);
1193 memset(tvmem[j], 0xff, k);
1194 } else {
1195 sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001196 }
1197
1198 iv_len = crypto_ablkcipher_ivsize(tfm);
1199 if (iv_len)
1200 memset(&iv, 0xff, iv_len);
1201
1202 ablkcipher_request_set_crypt(req, sg, sg, *b_size, iv);
1203
Mark Rustad3e3dc252014-07-25 02:53:38 -07001204 if (secs)
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001205 ret = test_acipher_jiffies(req, enc,
Mark Rustad3e3dc252014-07-25 02:53:38 -07001206 *b_size, secs);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001207 else
1208 ret = test_acipher_cycles(req, enc,
1209 *b_size);
1210
1211 if (ret) {
1212 pr_err("%s() failed flags=%x\n", e,
1213 crypto_ablkcipher_get_flags(tfm));
1214 break;
1215 }
1216 b_size++;
1217 i++;
1218 } while (*b_size);
1219 keysize++;
1220 } while (*keysize);
1221
1222out_free_req:
1223 ablkcipher_request_free(req);
1224out:
1225 crypto_free_ablkcipher(tfm);
1226}
1227
Herbert Xuef2736f2005-06-22 13:26:03 -07001228static void test_available(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07001229{
1230 char **name = check;
Herbert Xuef2736f2005-06-22 13:26:03 -07001231
Linus Torvalds1da177e2005-04-16 15:20:36 -07001232 while (*name) {
1233 printk("alg %s ", *name);
Herbert Xu6158efc2007-04-04 17:41:07 +10001234 printk(crypto_has_alg(*name, 0, 0) ?
Herbert Xue4d5b792006-08-26 18:12:40 +10001235 "found\n" : "not found\n");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001236 name++;
Herbert Xuef2736f2005-06-22 13:26:03 -07001237 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07001238}
1239
Herbert Xu01b32322008-07-31 15:41:55 +08001240static inline int tcrypt_test(const char *alg)
1241{
Jarod Wilson4e033a62009-05-27 15:10:21 +10001242 int ret;
1243
1244 ret = alg_test(alg, alg, 0, 0);
1245 /* non-fips algs return -EINVAL in fips mode */
1246 if (fips_enabled && ret == -EINVAL)
1247 ret = 0;
1248 return ret;
Herbert Xu01b32322008-07-31 15:41:55 +08001249}
1250
Herbert Xu86068132014-12-04 16:43:29 +08001251static int do_test(const char *alg, u32 type, u32 mask, int m)
Herbert Xu01b32322008-07-31 15:41:55 +08001252{
1253 int i;
Jarod Wilson4e033a62009-05-27 15:10:21 +10001254 int ret = 0;
Herbert Xu01b32322008-07-31 15:41:55 +08001255
1256 switch (m) {
Linus Torvalds1da177e2005-04-16 15:20:36 -07001257 case 0:
Herbert Xu86068132014-12-04 16:43:29 +08001258 if (alg) {
1259 if (!crypto_has_alg(alg, type,
1260 mask ?: CRYPTO_ALG_TYPE_MASK))
1261 ret = -ENOENT;
1262 break;
1263 }
1264
Herbert Xu01b32322008-07-31 15:41:55 +08001265 for (i = 1; i < 200; i++)
Herbert Xu86068132014-12-04 16:43:29 +08001266 ret += do_test(NULL, 0, 0, i);
Linus Torvalds1da177e2005-04-16 15:20:36 -07001267 break;
1268
1269 case 1:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001270 ret += tcrypt_test("md5");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001271 break;
1272
1273 case 2:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001274 ret += tcrypt_test("sha1");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001275 break;
1276
1277 case 3:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001278 ret += tcrypt_test("ecb(des)");
1279 ret += tcrypt_test("cbc(des)");
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03001280 ret += tcrypt_test("ctr(des)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001281 break;
1282
1283 case 4:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001284 ret += tcrypt_test("ecb(des3_ede)");
1285 ret += tcrypt_test("cbc(des3_ede)");
Jussi Kivilinnae080b172012-10-20 14:53:12 +03001286 ret += tcrypt_test("ctr(des3_ede)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001287 break;
1288
1289 case 5:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001290 ret += tcrypt_test("md4");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001291 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001292
Linus Torvalds1da177e2005-04-16 15:20:36 -07001293 case 6:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001294 ret += tcrypt_test("sha256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001295 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001296
Linus Torvalds1da177e2005-04-16 15:20:36 -07001297 case 7:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001298 ret += tcrypt_test("ecb(blowfish)");
1299 ret += tcrypt_test("cbc(blowfish)");
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03001300 ret += tcrypt_test("ctr(blowfish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001301 break;
1302
1303 case 8:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001304 ret += tcrypt_test("ecb(twofish)");
1305 ret += tcrypt_test("cbc(twofish)");
Jussi Kivilinna573da622011-10-10 23:03:12 +03001306 ret += tcrypt_test("ctr(twofish)");
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001307 ret += tcrypt_test("lrw(twofish)");
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001308 ret += tcrypt_test("xts(twofish)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001309 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001310
Linus Torvalds1da177e2005-04-16 15:20:36 -07001311 case 9:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001312 ret += tcrypt_test("ecb(serpent)");
Jussi Kivilinna9d259172011-10-18 00:02:53 +03001313 ret += tcrypt_test("cbc(serpent)");
1314 ret += tcrypt_test("ctr(serpent)");
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001315 ret += tcrypt_test("lrw(serpent)");
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001316 ret += tcrypt_test("xts(serpent)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001317 break;
1318
1319 case 10:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001320 ret += tcrypt_test("ecb(aes)");
1321 ret += tcrypt_test("cbc(aes)");
1322 ret += tcrypt_test("lrw(aes)");
1323 ret += tcrypt_test("xts(aes)");
1324 ret += tcrypt_test("ctr(aes)");
1325 ret += tcrypt_test("rfc3686(ctr(aes))");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001326 break;
1327
1328 case 11:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001329 ret += tcrypt_test("sha384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001330 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001331
Linus Torvalds1da177e2005-04-16 15:20:36 -07001332 case 12:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001333 ret += tcrypt_test("sha512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001334 break;
1335
1336 case 13:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001337 ret += tcrypt_test("deflate");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001338 break;
1339
1340 case 14:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001341 ret += tcrypt_test("ecb(cast5)");
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001342 ret += tcrypt_test("cbc(cast5)");
1343 ret += tcrypt_test("ctr(cast5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001344 break;
1345
1346 case 15:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001347 ret += tcrypt_test("ecb(cast6)");
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001348 ret += tcrypt_test("cbc(cast6)");
1349 ret += tcrypt_test("ctr(cast6)");
1350 ret += tcrypt_test("lrw(cast6)");
1351 ret += tcrypt_test("xts(cast6)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001352 break;
1353
1354 case 16:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001355 ret += tcrypt_test("ecb(arc4)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001356 break;
1357
1358 case 17:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001359 ret += tcrypt_test("michael_mic");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001360 break;
1361
1362 case 18:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001363 ret += tcrypt_test("crc32c");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001364 break;
1365
1366 case 19:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001367 ret += tcrypt_test("ecb(tea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001368 break;
1369
1370 case 20:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001371 ret += tcrypt_test("ecb(xtea)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001372 break;
1373
1374 case 21:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001375 ret += tcrypt_test("ecb(khazad)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001376 break;
1377
1378 case 22:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001379 ret += tcrypt_test("wp512");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001380 break;
1381
1382 case 23:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001383 ret += tcrypt_test("wp384");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001384 break;
1385
1386 case 24:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001387 ret += tcrypt_test("wp256");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001388 break;
1389
1390 case 25:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001391 ret += tcrypt_test("ecb(tnepres)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001392 break;
1393
1394 case 26:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001395 ret += tcrypt_test("ecb(anubis)");
1396 ret += tcrypt_test("cbc(anubis)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001397 break;
1398
1399 case 27:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001400 ret += tcrypt_test("tgr192");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001401 break;
1402
1403 case 28:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001404 ret += tcrypt_test("tgr160");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001405 break;
1406
1407 case 29:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001408 ret += tcrypt_test("tgr128");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001409 break;
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001410
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001411 case 30:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001412 ret += tcrypt_test("ecb(xeta)");
Aaron Grothefb4f10e2005-09-01 17:42:46 -07001413 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001414
David Howells90831632006-12-16 12:13:14 +11001415 case 31:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001416 ret += tcrypt_test("pcbc(fcrypt)");
David Howells90831632006-12-16 12:13:14 +11001417 break;
1418
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001419 case 32:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001420 ret += tcrypt_test("ecb(camellia)");
1421 ret += tcrypt_test("cbc(camellia)");
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001422 ret += tcrypt_test("ctr(camellia)");
1423 ret += tcrypt_test("lrw(camellia)");
1424 ret += tcrypt_test("xts(camellia)");
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001425 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001426
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001427 case 33:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001428 ret += tcrypt_test("sha224");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001429 break;
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001430
Tan Swee Heng2407d602007-11-23 19:45:00 +08001431 case 34:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001432 ret += tcrypt_test("salsa20");
Tan Swee Heng2407d602007-11-23 19:45:00 +08001433 break;
1434
Herbert Xu8df213d2007-12-02 14:55:47 +11001435 case 35:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001436 ret += tcrypt_test("gcm(aes)");
Herbert Xu8df213d2007-12-02 14:55:47 +11001437 break;
1438
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001439 case 36:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001440 ret += tcrypt_test("lzo");
Zoltan Sogor0b77abb2007-12-07 16:53:23 +08001441 break;
1442
Joy Latten93cc74e2007-12-12 20:24:22 +08001443 case 37:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001444 ret += tcrypt_test("ccm(aes)");
Joy Latten93cc74e2007-12-12 20:24:22 +08001445 break;
1446
Kevin Coffman76cb9522008-03-24 21:26:16 +08001447 case 38:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001448 ret += tcrypt_test("cts(cbc(aes))");
Kevin Coffman76cb9522008-03-24 21:26:16 +08001449 break;
1450
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001451 case 39:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001452 ret += tcrypt_test("rmd128");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001453 break;
1454
1455 case 40:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001456 ret += tcrypt_test("rmd160");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001457 break;
1458
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001459 case 41:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001460 ret += tcrypt_test("rmd256");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001461 break;
1462
1463 case 42:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001464 ret += tcrypt_test("rmd320");
Herbert Xu01b32322008-07-31 15:41:55 +08001465 break;
1466
1467 case 43:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001468 ret += tcrypt_test("ecb(seed)");
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001469 break;
1470
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001471 case 44:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001472 ret += tcrypt_test("zlib");
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08001473 break;
1474
Jarod Wilson5d667322009-05-04 19:23:40 +08001475 case 45:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001476 ret += tcrypt_test("rfc4309(ccm(aes))");
Jarod Wilson5d667322009-05-04 19:23:40 +08001477 break;
1478
Jussi Kivilinna54216bb2012-09-21 10:27:10 +03001479 case 46:
1480 ret += tcrypt_test("ghash");
1481 break;
1482
Herbert Xu684115212013-09-07 12:56:26 +10001483 case 47:
1484 ret += tcrypt_test("crct10dif");
1485 break;
1486
Linus Torvalds1da177e2005-04-16 15:20:36 -07001487 case 100:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001488 ret += tcrypt_test("hmac(md5)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001489 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001490
Linus Torvalds1da177e2005-04-16 15:20:36 -07001491 case 101:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001492 ret += tcrypt_test("hmac(sha1)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001493 break;
Herbert Xuef2736f2005-06-22 13:26:03 -07001494
Linus Torvalds1da177e2005-04-16 15:20:36 -07001495 case 102:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001496 ret += tcrypt_test("hmac(sha256)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07001497 break;
1498
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001499 case 103:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001500 ret += tcrypt_test("hmac(sha384)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001501 break;
1502
1503 case 104:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001504 ret += tcrypt_test("hmac(sha512)");
Andrew Donofrioa28091a2006-12-10 12:10:20 +11001505 break;
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001506
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001507 case 105:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001508 ret += tcrypt_test("hmac(sha224)");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001509 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07001510
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001511 case 106:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001512 ret += tcrypt_test("xcbc(aes)");
Herbert Xu38ed9ab2008-01-01 15:59:28 +11001513 break;
1514
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001515 case 107:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001516 ret += tcrypt_test("hmac(rmd128)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001517 break;
1518
1519 case 108:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001520 ret += tcrypt_test("hmac(rmd160)");
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001521 break;
1522
Shane Wangf1939f72009-09-02 20:05:22 +10001523 case 109:
1524 ret += tcrypt_test("vmac(aes)");
1525 break;
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001526
Sonic Zhanga482b082012-05-25 17:54:13 +08001527 case 110:
1528 ret += tcrypt_test("hmac(crc32)");
1529 break;
Shane Wangf1939f72009-09-02 20:05:22 +10001530
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001531 case 150:
Jarod Wilson4e033a62009-05-27 15:10:21 +10001532 ret += tcrypt_test("ansi_cprng");
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08001533 break;
1534
Adrian Hoban69435b92010-11-04 15:02:04 -04001535 case 151:
1536 ret += tcrypt_test("rfc4106(gcm(aes))");
1537 break;
1538
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03001539 case 152:
1540 ret += tcrypt_test("rfc4543(gcm(aes))");
1541 break;
1542
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03001543 case 153:
1544 ret += tcrypt_test("cmac(aes)");
1545 break;
1546
1547 case 154:
1548 ret += tcrypt_test("cmac(des3_ede)");
1549 break;
1550
Horia Geantabbf9c892013-11-28 15:11:16 +02001551 case 155:
1552 ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
1553 break;
1554
Horia Geantabca4feb2014-03-14 17:46:51 +02001555 case 156:
1556 ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
1557 break;
1558
1559 case 157:
1560 ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
1561 break;
Nitesh Lal5208ed22014-05-21 17:09:08 +05301562 case 181:
1563 ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
1564 break;
1565 case 182:
1566 ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
1567 break;
1568 case 183:
1569 ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
1570 break;
1571 case 184:
1572 ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
1573 break;
1574 case 185:
1575 ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
1576 break;
1577 case 186:
1578 ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
1579 break;
1580 case 187:
1581 ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
1582 break;
1583 case 188:
1584 ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
1585 break;
1586 case 189:
1587 ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
1588 break;
1589 case 190:
1590 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
1591 break;
Harald Welteebfd9bc2005-06-22 13:27:23 -07001592 case 200:
Herbert Xucba83562006-08-13 08:26:09 +10001593 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001594 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001595 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001596 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001597 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001598 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001599 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001600 speed_template_16_24_32);
Rik Snelf3d10442006-11-29 19:01:41 +11001601 test_cipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001602 speed_template_32_40_48);
Rik Snelf3d10442006-11-29 19:01:41 +11001603 test_cipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001604 speed_template_32_40_48);
Rik Snelf19f5112007-09-19 20:23:13 +08001605 test_cipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001606 speed_template_32_48_64);
Rik Snelf19f5112007-09-19 20:23:13 +08001607 test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001608 speed_template_32_48_64);
Jan Glauber9996e342011-04-26 16:34:01 +10001609 test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1610 speed_template_16_24_32);
1611 test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1612 speed_template_16_24_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001613 break;
1614
1615 case 201:
Herbert Xucba83562006-08-13 08:26:09 +10001616 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001617 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001618 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001619 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001620 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001621 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001622 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001623 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001624 speed_template_24);
Herbert Xucba83562006-08-13 08:26:09 +10001625 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
Herbert Xuda7f0332008-07-31 17:08:25 +08001626 des3_speed_template, DES3_SPEED_VECTORS,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001627 speed_template_24);
Jussi Kivilinna87131502014-06-09 20:59:49 +03001628 test_cipher_speed("ctr(des3_ede)", ENCRYPT, sec,
1629 des3_speed_template, DES3_SPEED_VECTORS,
1630 speed_template_24);
1631 test_cipher_speed("ctr(des3_ede)", DECRYPT, sec,
1632 des3_speed_template, DES3_SPEED_VECTORS,
1633 speed_template_24);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001634 break;
1635
1636 case 202:
Herbert Xucba83562006-08-13 08:26:09 +10001637 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001638 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001639 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001640 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001641 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001642 speed_template_16_24_32);
Herbert Xucba83562006-08-13 08:26:09 +10001643 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001644 speed_template_16_24_32);
Jussi Kivilinnaee5002a2011-09-26 16:47:15 +03001645 test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
1646 speed_template_16_24_32);
1647 test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
1648 speed_template_16_24_32);
Jussi Kivilinnabee3a902011-10-18 13:32:56 +03001649 test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
1650 speed_template_32_40_48);
1651 test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
1652 speed_template_32_40_48);
Jussi Kivilinna131f7542011-10-18 13:33:38 +03001653 test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
1654 speed_template_32_48_64);
1655 test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
1656 speed_template_32_48_64);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001657 break;
1658
1659 case 203:
Herbert Xucba83562006-08-13 08:26:09 +10001660 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001661 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001662 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001663 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001664 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001665 speed_template_8_32);
Herbert Xucba83562006-08-13 08:26:09 +10001666 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001667 speed_template_8_32);
Jussi Kivilinna7d47b862011-09-02 01:45:17 +03001668 test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
1669 speed_template_8_32);
1670 test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
1671 speed_template_8_32);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001672 break;
1673
1674 case 204:
Herbert Xucba83562006-08-13 08:26:09 +10001675 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001676 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001677 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001678 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001679 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001680 speed_template_8);
Herbert Xucba83562006-08-13 08:26:09 +10001681 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001682 speed_template_8);
Harald Welteebfd9bc2005-06-22 13:27:23 -07001683 break;
1684
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001685 case 205:
1686 test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001687 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001688 test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001689 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001690 test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001691 speed_template_16_24_32);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001692 test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001693 speed_template_16_24_32);
Jussi Kivilinna4de59332012-03-05 20:26:26 +02001694 test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
1695 speed_template_16_24_32);
1696 test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
1697 speed_template_16_24_32);
1698 test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
1699 speed_template_32_40_48);
1700 test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
1701 speed_template_32_40_48);
1702 test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
1703 speed_template_32_48_64);
1704 test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
1705 speed_template_32_48_64);
Noriaki TAKAMIYA02ab5a72007-01-24 21:48:19 +11001706 break;
1707
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001708 case 206:
1709 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
Sebastian Siewior477035c2008-03-11 21:24:26 +08001710 speed_template_16_32);
Tan Swee Heng5de8f1b2007-12-07 17:17:43 +08001711 break;
1712
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001713 case 207:
1714 test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
1715 speed_template_16_32);
1716 test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
1717 speed_template_16_32);
1718 test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
1719 speed_template_16_32);
1720 test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
1721 speed_template_16_32);
1722 test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
1723 speed_template_16_32);
1724 test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
1725 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03001726 test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
1727 speed_template_32_48);
1728 test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
1729 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03001730 test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
1731 speed_template_32_64);
1732 test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
1733 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03001734 break;
1735
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08001736 case 208:
1737 test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
1738 speed_template_8);
1739 break;
1740
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02001741 case 209:
1742 test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
1743 speed_template_8_16);
1744 test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
1745 speed_template_8_16);
1746 test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
1747 speed_template_8_16);
1748 test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
1749 speed_template_8_16);
1750 test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
1751 speed_template_8_16);
1752 test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
1753 speed_template_8_16);
1754 break;
1755
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02001756 case 210:
1757 test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
1758 speed_template_16_32);
1759 test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
1760 speed_template_16_32);
1761 test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
1762 speed_template_16_32);
1763 test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
1764 speed_template_16_32);
1765 test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
1766 speed_template_16_32);
1767 test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
1768 speed_template_16_32);
1769 test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
1770 speed_template_32_48);
1771 test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
1772 speed_template_32_48);
1773 test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
1774 speed_template_32_64);
1775 test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
1776 speed_template_32_64);
1777 break;
1778
Tim Chen53f52d72013-12-11 14:28:47 -08001779 case 211:
1780 test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
1781 NULL, 0, 16, 8, aead_speed_template_20);
Vutla, Lokesh1425d2d2015-07-07 21:01:49 +05301782 test_aead_speed("gcm(aes)", ENCRYPT, sec,
1783 NULL, 0, 16, 8, aead_speed_template_20);
Tim Chen53f52d72013-12-11 14:28:47 -08001784 break;
1785
Herbert Xu4e4aab62015-06-17 14:04:21 +08001786 case 212:
1787 test_aead_speed("rfc4309(ccm(aes))", ENCRYPT, sec,
1788 NULL, 0, 16, 8, aead_speed_template_19);
1789 break;
1790
Michal Ludvige8057922006-05-30 22:04:19 +10001791 case 300:
Herbert Xu86068132014-12-04 16:43:29 +08001792 if (alg) {
1793 test_hash_speed(alg, sec, generic_hash_speed_template);
1794 break;
1795 }
1796
Michal Ludvige8057922006-05-30 22:04:19 +10001797 /* fall through */
1798
1799 case 301:
Herbert Xue9d41162006-08-19 21:38:49 +10001800 test_hash_speed("md4", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001801 if (mode > 300 && mode < 400) break;
1802
1803 case 302:
Herbert Xue9d41162006-08-19 21:38:49 +10001804 test_hash_speed("md5", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001805 if (mode > 300 && mode < 400) break;
1806
1807 case 303:
Herbert Xue9d41162006-08-19 21:38:49 +10001808 test_hash_speed("sha1", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001809 if (mode > 300 && mode < 400) break;
1810
1811 case 304:
Herbert Xue9d41162006-08-19 21:38:49 +10001812 test_hash_speed("sha256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001813 if (mode > 300 && mode < 400) break;
1814
1815 case 305:
Herbert Xue9d41162006-08-19 21:38:49 +10001816 test_hash_speed("sha384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001817 if (mode > 300 && mode < 400) break;
1818
1819 case 306:
Herbert Xue9d41162006-08-19 21:38:49 +10001820 test_hash_speed("sha512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001821 if (mode > 300 && mode < 400) break;
1822
1823 case 307:
Herbert Xue9d41162006-08-19 21:38:49 +10001824 test_hash_speed("wp256", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001825 if (mode > 300 && mode < 400) break;
1826
1827 case 308:
Herbert Xue9d41162006-08-19 21:38:49 +10001828 test_hash_speed("wp384", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001829 if (mode > 300 && mode < 400) break;
1830
1831 case 309:
Herbert Xue9d41162006-08-19 21:38:49 +10001832 test_hash_speed("wp512", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001833 if (mode > 300 && mode < 400) break;
1834
1835 case 310:
Herbert Xue9d41162006-08-19 21:38:49 +10001836 test_hash_speed("tgr128", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001837 if (mode > 300 && mode < 400) break;
1838
1839 case 311:
Herbert Xue9d41162006-08-19 21:38:49 +10001840 test_hash_speed("tgr160", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001841 if (mode > 300 && mode < 400) break;
1842
1843 case 312:
Herbert Xue9d41162006-08-19 21:38:49 +10001844 test_hash_speed("tgr192", sec, generic_hash_speed_template);
Michal Ludvige8057922006-05-30 22:04:19 +10001845 if (mode > 300 && mode < 400) break;
1846
Jonathan Lynchcd12fb92007-11-10 20:08:25 +08001847 case 313:
1848 test_hash_speed("sha224", sec, generic_hash_speed_template);
1849 if (mode > 300 && mode < 400) break;
1850
Adrian-Ken Rueegseggerfd4adf12008-05-07 22:16:36 +08001851 case 314:
1852 test_hash_speed("rmd128", sec, generic_hash_speed_template);
1853 if (mode > 300 && mode < 400) break;
1854
1855 case 315:
1856 test_hash_speed("rmd160", sec, generic_hash_speed_template);
1857 if (mode > 300 && mode < 400) break;
1858
Adrian-Ken Rueegsegger2998db32008-05-09 21:29:35 +08001859 case 316:
1860 test_hash_speed("rmd256", sec, generic_hash_speed_template);
1861 if (mode > 300 && mode < 400) break;
1862
1863 case 317:
1864 test_hash_speed("rmd320", sec, generic_hash_speed_template);
1865 if (mode > 300 && mode < 400) break;
1866
Huang Ying18bcc912010-03-10 18:30:32 +08001867 case 318:
1868 test_hash_speed("ghash-generic", sec, hash_speed_template_16);
1869 if (mode > 300 && mode < 400) break;
1870
Tim Chene3899e42012-09-27 15:44:24 -07001871 case 319:
1872 test_hash_speed("crc32c", sec, generic_hash_speed_template);
1873 if (mode > 300 && mode < 400) break;
1874
Herbert Xu684115212013-09-07 12:56:26 +10001875 case 320:
1876 test_hash_speed("crct10dif", sec, generic_hash_speed_template);
1877 if (mode > 300 && mode < 400) break;
1878
Michal Ludvige8057922006-05-30 22:04:19 +10001879 case 399:
1880 break;
1881
David S. Millerbeb63da2010-05-19 14:11:21 +10001882 case 400:
Herbert Xu86068132014-12-04 16:43:29 +08001883 if (alg) {
1884 test_ahash_speed(alg, sec, generic_hash_speed_template);
1885 break;
1886 }
1887
David S. Millerbeb63da2010-05-19 14:11:21 +10001888 /* fall through */
1889
1890 case 401:
1891 test_ahash_speed("md4", sec, generic_hash_speed_template);
1892 if (mode > 400 && mode < 500) break;
1893
1894 case 402:
1895 test_ahash_speed("md5", sec, generic_hash_speed_template);
1896 if (mode > 400 && mode < 500) break;
1897
1898 case 403:
1899 test_ahash_speed("sha1", sec, generic_hash_speed_template);
1900 if (mode > 400 && mode < 500) break;
1901
1902 case 404:
1903 test_ahash_speed("sha256", sec, generic_hash_speed_template);
1904 if (mode > 400 && mode < 500) break;
1905
1906 case 405:
1907 test_ahash_speed("sha384", sec, generic_hash_speed_template);
1908 if (mode > 400 && mode < 500) break;
1909
1910 case 406:
1911 test_ahash_speed("sha512", sec, generic_hash_speed_template);
1912 if (mode > 400 && mode < 500) break;
1913
1914 case 407:
1915 test_ahash_speed("wp256", sec, generic_hash_speed_template);
1916 if (mode > 400 && mode < 500) break;
1917
1918 case 408:
1919 test_ahash_speed("wp384", sec, generic_hash_speed_template);
1920 if (mode > 400 && mode < 500) break;
1921
1922 case 409:
1923 test_ahash_speed("wp512", sec, generic_hash_speed_template);
1924 if (mode > 400 && mode < 500) break;
1925
1926 case 410:
1927 test_ahash_speed("tgr128", sec, generic_hash_speed_template);
1928 if (mode > 400 && mode < 500) break;
1929
1930 case 411:
1931 test_ahash_speed("tgr160", sec, generic_hash_speed_template);
1932 if (mode > 400 && mode < 500) break;
1933
1934 case 412:
1935 test_ahash_speed("tgr192", sec, generic_hash_speed_template);
1936 if (mode > 400 && mode < 500) break;
1937
1938 case 413:
1939 test_ahash_speed("sha224", sec, generic_hash_speed_template);
1940 if (mode > 400 && mode < 500) break;
1941
1942 case 414:
1943 test_ahash_speed("rmd128", sec, generic_hash_speed_template);
1944 if (mode > 400 && mode < 500) break;
1945
1946 case 415:
1947 test_ahash_speed("rmd160", sec, generic_hash_speed_template);
1948 if (mode > 400 && mode < 500) break;
1949
1950 case 416:
1951 test_ahash_speed("rmd256", sec, generic_hash_speed_template);
1952 if (mode > 400 && mode < 500) break;
1953
1954 case 417:
1955 test_ahash_speed("rmd320", sec, generic_hash_speed_template);
1956 if (mode > 400 && mode < 500) break;
1957
1958 case 499:
1959 break;
1960
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001961 case 500:
1962 test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1963 speed_template_16_24_32);
1964 test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1965 speed_template_16_24_32);
1966 test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1967 speed_template_16_24_32);
1968 test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1969 speed_template_16_24_32);
1970 test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
1971 speed_template_32_40_48);
1972 test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
1973 speed_template_32_40_48);
1974 test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
1975 speed_template_32_48_64);
1976 test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
1977 speed_template_32_48_64);
1978 test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
1979 speed_template_16_24_32);
1980 test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
1981 speed_template_16_24_32);
Nicolas Royerde1975332012-07-01 19:19:47 +02001982 test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
1983 speed_template_16_24_32);
1984 test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
1985 speed_template_16_24_32);
1986 test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
1987 speed_template_16_24_32);
1988 test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
1989 speed_template_16_24_32);
Jussi Kivilinna69d31502012-12-28 12:04:58 +02001990 test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
1991 speed_template_20_28_36);
1992 test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
1993 speed_template_20_28_36);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03001994 break;
1995
1996 case 501:
1997 test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1998 des3_speed_template, DES3_SPEED_VECTORS,
1999 speed_template_24);
2000 test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
2001 des3_speed_template, DES3_SPEED_VECTORS,
2002 speed_template_24);
2003 test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
2004 des3_speed_template, DES3_SPEED_VECTORS,
2005 speed_template_24);
2006 test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
2007 des3_speed_template, DES3_SPEED_VECTORS,
2008 speed_template_24);
Nicolas Royerde1975332012-07-01 19:19:47 +02002009 test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
2010 des3_speed_template, DES3_SPEED_VECTORS,
2011 speed_template_24);
2012 test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
2013 des3_speed_template, DES3_SPEED_VECTORS,
2014 speed_template_24);
2015 test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
2016 des3_speed_template, DES3_SPEED_VECTORS,
2017 speed_template_24);
2018 test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
2019 des3_speed_template, DES3_SPEED_VECTORS,
2020 speed_template_24);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002021 break;
2022
2023 case 502:
2024 test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
2025 speed_template_8);
2026 test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
2027 speed_template_8);
2028 test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
2029 speed_template_8);
2030 test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
2031 speed_template_8);
Nicolas Royerde1975332012-07-01 19:19:47 +02002032 test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
2033 speed_template_8);
2034 test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
2035 speed_template_8);
2036 test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
2037 speed_template_8);
2038 test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
2039 speed_template_8);
Jussi Kivilinna3f3baf32011-10-18 00:02:58 +03002040 break;
2041
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002042 case 503:
2043 test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
2044 speed_template_16_32);
2045 test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
2046 speed_template_16_32);
2047 test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
2048 speed_template_16_32);
2049 test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
2050 speed_template_16_32);
2051 test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
2052 speed_template_16_32);
2053 test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
2054 speed_template_16_32);
Jussi Kivilinna87aae4b2011-10-18 13:32:39 +03002055 test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
2056 speed_template_32_48);
2057 test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
2058 speed_template_32_48);
Jussi Kivilinna5209c072011-10-18 13:33:22 +03002059 test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
2060 speed_template_32_64);
2061 test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
2062 speed_template_32_64);
Jussi Kivilinna7fb7fe42011-10-18 00:03:03 +03002063 break;
2064
Johannes Goetzfried107778b52012-05-28 15:54:24 +02002065 case 504:
2066 test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
2067 speed_template_16_24_32);
2068 test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
2069 speed_template_16_24_32);
2070 test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
2071 speed_template_16_24_32);
2072 test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
2073 speed_template_16_24_32);
2074 test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
2075 speed_template_16_24_32);
2076 test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
2077 speed_template_16_24_32);
2078 test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
2079 speed_template_32_40_48);
2080 test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
2081 speed_template_32_40_48);
2082 test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
2083 speed_template_32_48_64);
2084 test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
2085 speed_template_32_48_64);
2086 break;
2087
Jussi Kivilinna31b4cd292012-06-12 16:52:04 +08002088 case 505:
2089 test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
2090 speed_template_8);
2091 break;
2092
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002093 case 506:
2094 test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
2095 speed_template_8_16);
2096 test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
2097 speed_template_8_16);
2098 test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
2099 speed_template_8_16);
2100 test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
2101 speed_template_8_16);
2102 test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
2103 speed_template_8_16);
2104 test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
2105 speed_template_8_16);
2106 break;
2107
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002108 case 507:
2109 test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
2110 speed_template_16_32);
2111 test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
2112 speed_template_16_32);
2113 test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
2114 speed_template_16_32);
2115 test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
2116 speed_template_16_32);
2117 test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
2118 speed_template_16_32);
2119 test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
2120 speed_template_16_32);
2121 test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
2122 speed_template_32_48);
2123 test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
2124 speed_template_32_48);
2125 test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
2126 speed_template_32_64);
2127 test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
2128 speed_template_32_64);
2129 break;
2130
Jussi Kivilinnabf9c5182012-10-26 14:48:51 +03002131 case 508:
2132 test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
2133 speed_template_16_32);
2134 test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
2135 speed_template_16_32);
2136 test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
2137 speed_template_16_32);
2138 test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
2139 speed_template_16_32);
2140 test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
2141 speed_template_16_32);
2142 test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
2143 speed_template_16_32);
2144 test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
2145 speed_template_32_48);
2146 test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
2147 speed_template_32_48);
2148 test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
2149 speed_template_32_64);
2150 test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
2151 speed_template_32_64);
2152 break;
2153
Jussi Kivilinnaad8b7c32013-04-13 13:46:40 +03002154 case 509:
2155 test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
2156 speed_template_8_32);
2157 test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
2158 speed_template_8_32);
2159 test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
2160 speed_template_8_32);
2161 test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
2162 speed_template_8_32);
2163 test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
2164 speed_template_8_32);
2165 test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
2166 speed_template_8_32);
2167 break;
2168
Linus Torvalds1da177e2005-04-16 15:20:36 -07002169 case 1000:
2170 test_available();
2171 break;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002172 }
Jarod Wilson4e033a62009-05-27 15:10:21 +10002173
2174 return ret;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002175}
2176
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002177static int __init tcrypt_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -07002178{
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002179 int err = -ENOMEM;
Herbert Xuf139cfa2008-07-31 12:23:53 +08002180 int i;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002181
Herbert Xuf139cfa2008-07-31 12:23:53 +08002182 for (i = 0; i < TVMEMSIZE; i++) {
2183 tvmem[i] = (void *)__get_free_page(GFP_KERNEL);
2184 if (!tvmem[i])
2185 goto err_free_tv;
2186 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002187
Herbert Xu86068132014-12-04 16:43:29 +08002188 err = do_test(alg, type, mask, mode);
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002189
Jarod Wilson4e033a62009-05-27 15:10:21 +10002190 if (err) {
2191 printk(KERN_ERR "tcrypt: one or more tests failed!\n");
2192 goto err_free_tv;
2193 }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002194
Jarod Wilson4e033a62009-05-27 15:10:21 +10002195 /* We intentionaly return -EAGAIN to prevent keeping the module,
2196 * unless we're running in fips mode. It does all its work from
2197 * init() and doesn't offer any runtime functionality, but in
2198 * the fips case, checking for a successful load is helpful.
Michal Ludvig14fdf472006-05-30 14:49:38 +10002199 * => we don't need it in the memory, do we?
2200 * -- mludvig
2201 */
Jarod Wilson4e033a62009-05-27 15:10:21 +10002202 if (!fips_enabled)
2203 err = -EAGAIN;
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002204
Herbert Xuf139cfa2008-07-31 12:23:53 +08002205err_free_tv:
2206 for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
2207 free_page((unsigned long)tvmem[i]);
Mikko Herranene3a4ea42007-11-26 22:12:07 +08002208
2209 return err;
Linus Torvalds1da177e2005-04-16 15:20:36 -07002210}
2211
2212/*
2213 * If an init function is provided, an exit function must also be provided
2214 * to allow module unload.
2215 */
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002216static void __exit tcrypt_mod_fini(void) { }
Linus Torvalds1da177e2005-04-16 15:20:36 -07002217
Kamalesh Babulal3af5b902008-04-05 21:00:57 +08002218module_init(tcrypt_mod_init);
2219module_exit(tcrypt_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002220
Steffen Klasserta873a5f2009-06-19 19:46:53 +08002221module_param(alg, charp, 0);
2222module_param(type, uint, 0);
Herbert Xu7be380f2009-07-14 16:06:54 +08002223module_param(mask, uint, 0);
Linus Torvalds1da177e2005-04-16 15:20:36 -07002224module_param(mode, int, 0);
Harald Welteebfd9bc2005-06-22 13:27:23 -07002225module_param(sec, uint, 0);
Herbert Xu6a179442005-06-22 13:29:03 -07002226MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
2227 "(defaults to zero which uses CPU cycles instead)");
Linus Torvalds1da177e2005-04-16 15:20:36 -07002228
2229MODULE_LICENSE("GPL");
2230MODULE_DESCRIPTION("Quick & dirty crypto testing module");
2231MODULE_AUTHOR("James Morris <jmorris@intercode.com.au>");