blob: f616ad74cce756fb2d0a0657d153483ed05f56d7 [file] [log] [blame]
Herbert Xuda7f0332008-07-31 17:08:25 +08001/*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
Adrian Hoban69435b92010-11-04 15:02:04 -04009 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
Herbert Xuda7f0332008-07-31 17:08:25 +080016 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
Herbert Xu1ce33112015-04-22 15:06:31 +080023#include <crypto/aead.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080024#include <crypto/hash.h>
Herbert Xu12773d92015-08-20 15:21:46 +080025#include <crypto/skcipher.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080026#include <linux/err.h>
Herbert Xu1c41b882015-04-22 13:25:58 +080027#include <linux/fips.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080028#include <linux/module.h>
29#include <linux/scatterlist.h>
30#include <linux/slab.h>
31#include <linux/string.h>
Jarod Wilson7647d6c2009-05-04 19:44:50 +080032#include <crypto/rng.h>
Stephan Mueller64d1cdf2014-05-31 17:25:36 +020033#include <crypto/drbg.h>
Tadeusz Struk946cc462015-06-16 10:31:06 -070034#include <crypto/akcipher.h>
Salvatore Benedetto802c7f12016-06-22 17:49:14 +010035#include <crypto/kpp.h>
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +010036#include <crypto/acompress.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080037
38#include "internal.h"
Alexander Shishkin0b767f92010-06-03 20:53:43 +100039
Richard W.M. Jones9e5c9fe2016-05-03 10:00:17 +010040static bool notests;
41module_param(notests, bool, 0644);
42MODULE_PARM_DESC(notests, "disable crypto self-tests");
43
Herbert Xu326a6342010-08-06 09:40:28 +080044#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
Alexander Shishkin0b767f92010-06-03 20:53:43 +100045
46/* a perfect nop */
47int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
48{
49 return 0;
50}
51
52#else
53
Herbert Xuda7f0332008-07-31 17:08:25 +080054#include "testmgr.h"
55
56/*
57 * Need slab memory for testing (size in number of pages).
58 */
59#define XBUFSIZE 8
60
61/*
62 * Indexes into the xbuf to simulate cross-page access.
63 */
64#define IDX1 32
65#define IDX2 32400
Ard Biesheuvel04b46fb2016-12-08 08:23:52 +000066#define IDX3 1511
Herbert Xuda7f0332008-07-31 17:08:25 +080067#define IDX4 8193
68#define IDX5 22222
69#define IDX6 17101
70#define IDX7 27333
71#define IDX8 3000
72
73/*
74* Used by test_cipher()
75*/
76#define ENCRYPT 1
77#define DECRYPT 0
78
79struct tcrypt_result {
80 struct completion completion;
81 int err;
82};
83
84struct aead_test_suite {
85 struct {
86 struct aead_testvec *vecs;
87 unsigned int count;
88 } enc, dec;
89};
90
91struct cipher_test_suite {
92 struct {
93 struct cipher_testvec *vecs;
94 unsigned int count;
95 } enc, dec;
96};
97
98struct comp_test_suite {
99 struct {
100 struct comp_testvec *vecs;
101 unsigned int count;
102 } comp, decomp;
103};
104
105struct hash_test_suite {
106 struct hash_testvec *vecs;
107 unsigned int count;
108};
109
Jarod Wilson7647d6c2009-05-04 19:44:50 +0800110struct cprng_test_suite {
111 struct cprng_testvec *vecs;
112 unsigned int count;
113};
114
Stephan Mueller64d1cdf2014-05-31 17:25:36 +0200115struct drbg_test_suite {
116 struct drbg_testvec *vecs;
117 unsigned int count;
118};
119
Tadeusz Struk946cc462015-06-16 10:31:06 -0700120struct akcipher_test_suite {
121 struct akcipher_testvec *vecs;
122 unsigned int count;
123};
124
Salvatore Benedetto802c7f12016-06-22 17:49:14 +0100125struct kpp_test_suite {
126 struct kpp_testvec *vecs;
127 unsigned int count;
128};
129
Herbert Xuda7f0332008-07-31 17:08:25 +0800130struct alg_test_desc {
131 const char *alg;
132 int (*test)(const struct alg_test_desc *desc, const char *driver,
133 u32 type, u32 mask);
Jarod Wilsona1915d52009-05-15 15:16:03 +1000134 int fips_allowed; /* set if alg is allowed in fips mode */
Herbert Xuda7f0332008-07-31 17:08:25 +0800135
136 union {
137 struct aead_test_suite aead;
138 struct cipher_test_suite cipher;
139 struct comp_test_suite comp;
140 struct hash_test_suite hash;
Jarod Wilson7647d6c2009-05-04 19:44:50 +0800141 struct cprng_test_suite cprng;
Stephan Mueller64d1cdf2014-05-31 17:25:36 +0200142 struct drbg_test_suite drbg;
Tadeusz Struk946cc462015-06-16 10:31:06 -0700143 struct akcipher_test_suite akcipher;
Salvatore Benedetto802c7f12016-06-22 17:49:14 +0100144 struct kpp_test_suite kpp;
Herbert Xuda7f0332008-07-31 17:08:25 +0800145 } suite;
146};
147
148static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
149
Herbert Xuda7f0332008-07-31 17:08:25 +0800150static void hexdump(unsigned char *buf, unsigned int len)
151{
152 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
153 16, 1,
154 buf, len, false);
155}
156
157static void tcrypt_complete(struct crypto_async_request *req, int err)
158{
159 struct tcrypt_result *res = req->data;
160
161 if (err == -EINPROGRESS)
162 return;
163
164 res->err = err;
165 complete(&res->completion);
166}
167
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800168static int testmgr_alloc_buf(char *buf[XBUFSIZE])
169{
170 int i;
171
172 for (i = 0; i < XBUFSIZE; i++) {
173 buf[i] = (void *)__get_free_page(GFP_KERNEL);
174 if (!buf[i])
175 goto err_free_buf;
176 }
177
178 return 0;
179
180err_free_buf:
181 while (i-- > 0)
182 free_page((unsigned long)buf[i]);
183
184 return -ENOMEM;
185}
186
187static void testmgr_free_buf(char *buf[XBUFSIZE])
188{
189 int i;
190
191 for (i = 0; i < XBUFSIZE; i++)
192 free_page((unsigned long)buf[i]);
193}
194
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300195static int wait_async_op(struct tcrypt_result *tr, int ret)
David S. Millera8f1a052010-05-19 14:12:03 +1000196{
197 if (ret == -EINPROGRESS || ret == -EBUSY) {
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100198 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -0800199 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100200 ret = tr->err;
David S. Millera8f1a052010-05-19 14:12:03 +1000201 }
202 return ret;
203}
204
Wang, Rui Y018ba952016-02-03 18:26:57 +0800205static int ahash_partial_update(struct ahash_request **preq,
206 struct crypto_ahash *tfm, struct hash_testvec *template,
207 void *hash_buff, int k, int temp, struct scatterlist *sg,
208 const char *algo, char *result, struct tcrypt_result *tresult)
209{
210 char *state;
211 struct ahash_request *req;
212 int statesize, ret = -EINVAL;
Jan Stancek7bcb87b2016-09-28 16:38:37 +0200213 const char guard[] = { 0x00, 0xba, 0xad, 0x00 };
Wang, Rui Y018ba952016-02-03 18:26:57 +0800214
215 req = *preq;
216 statesize = crypto_ahash_statesize(
217 crypto_ahash_reqtfm(req));
Jan Stancek7bcb87b2016-09-28 16:38:37 +0200218 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
Wang, Rui Y018ba952016-02-03 18:26:57 +0800219 if (!state) {
220 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
221 goto out_nostate;
222 }
Jan Stancek7bcb87b2016-09-28 16:38:37 +0200223 memcpy(state + statesize, guard, sizeof(guard));
Wang, Rui Y018ba952016-02-03 18:26:57 +0800224 ret = crypto_ahash_export(req, state);
Jan Stancek7bcb87b2016-09-28 16:38:37 +0200225 WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
Wang, Rui Y018ba952016-02-03 18:26:57 +0800226 if (ret) {
227 pr_err("alt: hash: Failed to export() for %s\n", algo);
228 goto out;
229 }
230 ahash_request_free(req);
231 req = ahash_request_alloc(tfm, GFP_KERNEL);
232 if (!req) {
233 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
234 goto out_noreq;
235 }
236 ahash_request_set_callback(req,
237 CRYPTO_TFM_REQ_MAY_BACKLOG,
238 tcrypt_complete, tresult);
239
240 memcpy(hash_buff, template->plaintext + temp,
241 template->tap[k]);
242 sg_init_one(&sg[0], hash_buff, template->tap[k]);
243 ahash_request_set_crypt(req, sg, result, template->tap[k]);
244 ret = crypto_ahash_import(req, state);
245 if (ret) {
246 pr_err("alg: hash: Failed to import() for %s\n", algo);
247 goto out;
248 }
249 ret = wait_async_op(tresult, crypto_ahash_update(req));
250 if (ret)
251 goto out;
252 *preq = req;
253 ret = 0;
254 goto out_noreq;
255out:
256 ahash_request_free(req);
257out_noreq:
258 kfree(state);
259out_nostate:
260 return ret;
261}
262
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300263static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
264 unsigned int tcount, bool use_digest,
265 const int align_offset)
Herbert Xuda7f0332008-07-31 17:08:25 +0800266{
267 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
268 unsigned int i, j, k, temp;
269 struct scatterlist sg[8];
Horia Geanta29b77e52014-07-23 11:59:38 +0300270 char *result;
271 char *key;
Herbert Xuda7f0332008-07-31 17:08:25 +0800272 struct ahash_request *req;
273 struct tcrypt_result tresult;
Herbert Xuda7f0332008-07-31 17:08:25 +0800274 void *hash_buff;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800275 char *xbuf[XBUFSIZE];
276 int ret = -ENOMEM;
277
Horia Geanta29b77e52014-07-23 11:59:38 +0300278 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
279 if (!result)
280 return ret;
281 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
282 if (!key)
283 goto out_nobuf;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800284 if (testmgr_alloc_buf(xbuf))
285 goto out_nobuf;
Herbert Xuda7f0332008-07-31 17:08:25 +0800286
287 init_completion(&tresult.completion);
288
289 req = ahash_request_alloc(tfm, GFP_KERNEL);
290 if (!req) {
291 printk(KERN_ERR "alg: hash: Failed to allocate request for "
292 "%s\n", algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800293 goto out_noreq;
294 }
295 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
296 tcrypt_complete, &tresult);
297
Herbert Xua0cfae52009-05-29 16:23:12 +1000298 j = 0;
Herbert Xuda7f0332008-07-31 17:08:25 +0800299 for (i = 0; i < tcount; i++) {
Herbert Xua0cfae52009-05-29 16:23:12 +1000300 if (template[i].np)
301 continue;
302
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300303 ret = -EINVAL;
304 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
305 goto out;
306
Herbert Xua0cfae52009-05-29 16:23:12 +1000307 j++;
Horia Geanta29b77e52014-07-23 11:59:38 +0300308 memset(result, 0, MAX_DIGEST_SIZE);
Herbert Xuda7f0332008-07-31 17:08:25 +0800309
310 hash_buff = xbuf[0];
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300311 hash_buff += align_offset;
Herbert Xuda7f0332008-07-31 17:08:25 +0800312
313 memcpy(hash_buff, template[i].plaintext, template[i].psize);
314 sg_init_one(&sg[0], hash_buff, template[i].psize);
315
316 if (template[i].ksize) {
317 crypto_ahash_clear_flags(tfm, ~0);
Horia Geanta29b77e52014-07-23 11:59:38 +0300318 if (template[i].ksize > MAX_KEYLEN) {
319 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
320 j, algo, template[i].ksize, MAX_KEYLEN);
321 ret = -EINVAL;
322 goto out;
323 }
324 memcpy(key, template[i].key, template[i].ksize);
325 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
Herbert Xuda7f0332008-07-31 17:08:25 +0800326 if (ret) {
327 printk(KERN_ERR "alg: hash: setkey failed on "
Herbert Xua0cfae52009-05-29 16:23:12 +1000328 "test %d for %s: ret=%d\n", j, algo,
Herbert Xuda7f0332008-07-31 17:08:25 +0800329 -ret);
330 goto out;
331 }
332 }
333
334 ahash_request_set_crypt(req, sg, result, template[i].psize);
David S. Millera8f1a052010-05-19 14:12:03 +1000335 if (use_digest) {
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300336 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
David S. Millera8f1a052010-05-19 14:12:03 +1000337 if (ret) {
338 pr_err("alg: hash: digest failed on test %d "
339 "for %s: ret=%d\n", j, algo, -ret);
340 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +0800341 }
David S. Millera8f1a052010-05-19 14:12:03 +1000342 } else {
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300343 ret = wait_async_op(&tresult, crypto_ahash_init(req));
David S. Millera8f1a052010-05-19 14:12:03 +1000344 if (ret) {
345 pr_err("alt: hash: init failed on test %d "
346 "for %s: ret=%d\n", j, algo, -ret);
347 goto out;
348 }
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300349 ret = wait_async_op(&tresult, crypto_ahash_update(req));
David S. Millera8f1a052010-05-19 14:12:03 +1000350 if (ret) {
351 pr_err("alt: hash: update failed on test %d "
352 "for %s: ret=%d\n", j, algo, -ret);
353 goto out;
354 }
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300355 ret = wait_async_op(&tresult, crypto_ahash_final(req));
David S. Millera8f1a052010-05-19 14:12:03 +1000356 if (ret) {
357 pr_err("alt: hash: final failed on test %d "
358 "for %s: ret=%d\n", j, algo, -ret);
359 goto out;
360 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800361 }
362
363 if (memcmp(result, template[i].digest,
364 crypto_ahash_digestsize(tfm))) {
365 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
Herbert Xua0cfae52009-05-29 16:23:12 +1000366 j, algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800367 hexdump(result, crypto_ahash_digestsize(tfm));
368 ret = -EINVAL;
369 goto out;
370 }
371 }
372
373 j = 0;
374 for (i = 0; i < tcount; i++) {
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300375 /* alignment tests are only done with continuous buffers */
376 if (align_offset != 0)
377 break;
378
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300379 if (!template[i].np)
380 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800381
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300382 j++;
383 memset(result, 0, MAX_DIGEST_SIZE);
Herbert Xuda7f0332008-07-31 17:08:25 +0800384
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300385 temp = 0;
386 sg_init_table(sg, template[i].np);
387 ret = -EINVAL;
388 for (k = 0; k < template[i].np; k++) {
389 if (WARN_ON(offset_in_page(IDX[k]) +
390 template[i].tap[k] > PAGE_SIZE))
Herbert Xuda7f0332008-07-31 17:08:25 +0800391 goto out;
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300392 sg_set_buf(&sg[k],
393 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
394 offset_in_page(IDX[k]),
395 template[i].plaintext + temp,
396 template[i].tap[k]),
397 template[i].tap[k]);
398 temp += template[i].tap[k];
399 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800400
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300401 if (template[i].ksize) {
402 if (template[i].ksize > MAX_KEYLEN) {
403 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
404 j, algo, template[i].ksize, MAX_KEYLEN);
Herbert Xuda7f0332008-07-31 17:08:25 +0800405 ret = -EINVAL;
406 goto out;
407 }
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300408 crypto_ahash_clear_flags(tfm, ~0);
409 memcpy(key, template[i].key, template[i].ksize);
410 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
411
412 if (ret) {
413 printk(KERN_ERR "alg: hash: setkey "
414 "failed on chunking test %d "
415 "for %s: ret=%d\n", j, algo, -ret);
416 goto out;
417 }
418 }
419
420 ahash_request_set_crypt(req, sg, result, template[i].psize);
421 ret = crypto_ahash_digest(req);
422 switch (ret) {
423 case 0:
424 break;
425 case -EINPROGRESS:
426 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100427 wait_for_completion(&tresult.completion);
428 reinit_completion(&tresult.completion);
429 ret = tresult.err;
430 if (!ret)
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300431 break;
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300432 /* fall through */
433 default:
434 printk(KERN_ERR "alg: hash: digest failed "
435 "on chunking test %d for %s: "
436 "ret=%d\n", j, algo, -ret);
437 goto out;
438 }
439
440 if (memcmp(result, template[i].digest,
441 crypto_ahash_digestsize(tfm))) {
442 printk(KERN_ERR "alg: hash: Chunking test %d "
443 "failed for %s\n", j, algo);
444 hexdump(result, crypto_ahash_digestsize(tfm));
445 ret = -EINVAL;
446 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +0800447 }
448 }
449
Wang, Rui Y018ba952016-02-03 18:26:57 +0800450 /* partial update exercise */
451 j = 0;
452 for (i = 0; i < tcount; i++) {
453 /* alignment tests are only done with continuous buffers */
454 if (align_offset != 0)
455 break;
456
457 if (template[i].np < 2)
458 continue;
459
460 j++;
461 memset(result, 0, MAX_DIGEST_SIZE);
462
463 ret = -EINVAL;
464 hash_buff = xbuf[0];
465 memcpy(hash_buff, template[i].plaintext,
466 template[i].tap[0]);
467 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
468
469 if (template[i].ksize) {
470 crypto_ahash_clear_flags(tfm, ~0);
471 if (template[i].ksize > MAX_KEYLEN) {
472 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
473 j, algo, template[i].ksize, MAX_KEYLEN);
474 ret = -EINVAL;
475 goto out;
476 }
477 memcpy(key, template[i].key, template[i].ksize);
478 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
479 if (ret) {
480 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
481 j, algo, -ret);
482 goto out;
483 }
484 }
485
486 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
487 ret = wait_async_op(&tresult, crypto_ahash_init(req));
488 if (ret) {
489 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
490 j, algo, -ret);
491 goto out;
492 }
493 ret = wait_async_op(&tresult, crypto_ahash_update(req));
494 if (ret) {
495 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
496 j, algo, -ret);
497 goto out;
498 }
499
500 temp = template[i].tap[0];
501 for (k = 1; k < template[i].np; k++) {
502 ret = ahash_partial_update(&req, tfm, &template[i],
503 hash_buff, k, temp, &sg[0], algo, result,
504 &tresult);
505 if (ret) {
506 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
507 j, algo, -ret);
508 goto out_noreq;
509 }
510 temp += template[i].tap[k];
511 }
512 ret = wait_async_op(&tresult, crypto_ahash_final(req));
513 if (ret) {
514 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
515 j, algo, -ret);
516 goto out;
517 }
518 if (memcmp(result, template[i].digest,
519 crypto_ahash_digestsize(tfm))) {
520 pr_err("alg: hash: Partial Test %d failed for %s\n",
521 j, algo);
522 hexdump(result, crypto_ahash_digestsize(tfm));
523 ret = -EINVAL;
524 goto out;
525 }
526 }
527
Herbert Xuda7f0332008-07-31 17:08:25 +0800528 ret = 0;
529
530out:
531 ahash_request_free(req);
532out_noreq:
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800533 testmgr_free_buf(xbuf);
534out_nobuf:
Horia Geanta29b77e52014-07-23 11:59:38 +0300535 kfree(key);
536 kfree(result);
Herbert Xuda7f0332008-07-31 17:08:25 +0800537 return ret;
538}
539
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300540static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
541 unsigned int tcount, bool use_digest)
542{
543 unsigned int alignmask;
544 int ret;
545
546 ret = __test_hash(tfm, template, tcount, use_digest, 0);
547 if (ret)
548 return ret;
549
550 /* test unaligned buffers, check with one byte offset */
551 ret = __test_hash(tfm, template, tcount, use_digest, 1);
552 if (ret)
553 return ret;
554
555 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
556 if (alignmask) {
557 /* Check if alignment mask for tfm is correctly set. */
558 ret = __test_hash(tfm, template, tcount, use_digest,
559 alignmask + 1);
560 if (ret)
561 return ret;
562 }
563
564 return 0;
565}
566
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300567static int __test_aead(struct crypto_aead *tfm, int enc,
568 struct aead_testvec *template, unsigned int tcount,
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300569 const bool diff_dst, const int align_offset)
Herbert Xuda7f0332008-07-31 17:08:25 +0800570{
571 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
572 unsigned int i, j, k, n, temp;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800573 int ret = -ENOMEM;
Herbert Xuda7f0332008-07-31 17:08:25 +0800574 char *q;
575 char *key;
576 struct aead_request *req;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300577 struct scatterlist *sg;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300578 struct scatterlist *sgout;
579 const char *e, *d;
Herbert Xuda7f0332008-07-31 17:08:25 +0800580 struct tcrypt_result result;
Cristian Stoica424a5da2015-01-28 11:03:05 +0200581 unsigned int authsize, iv_len;
Herbert Xuda7f0332008-07-31 17:08:25 +0800582 void *input;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300583 void *output;
Herbert Xuda7f0332008-07-31 17:08:25 +0800584 void *assoc;
Tadeusz Struk9bac0192014-05-19 09:51:33 -0700585 char *iv;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800586 char *xbuf[XBUFSIZE];
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300587 char *xoutbuf[XBUFSIZE];
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800588 char *axbuf[XBUFSIZE];
589
Tadeusz Struk9bac0192014-05-19 09:51:33 -0700590 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
591 if (!iv)
592 return ret;
Horia Geanta29b77e52014-07-23 11:59:38 +0300593 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
594 if (!key)
595 goto out_noxbuf;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800596 if (testmgr_alloc_buf(xbuf))
597 goto out_noxbuf;
598 if (testmgr_alloc_buf(axbuf))
599 goto out_noaxbuf;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300600 if (diff_dst && testmgr_alloc_buf(xoutbuf))
601 goto out_nooutbuf;
602
603 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800604 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300605 if (!sg)
606 goto out_nosg;
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800607 sgout = &sg[16];
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300608
609 if (diff_dst)
610 d = "-ddst";
611 else
612 d = "";
613
Herbert Xuda7f0332008-07-31 17:08:25 +0800614 if (enc == ENCRYPT)
615 e = "encryption";
616 else
617 e = "decryption";
618
619 init_completion(&result.completion);
620
621 req = aead_request_alloc(tfm, GFP_KERNEL);
622 if (!req) {
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300623 pr_err("alg: aead%s: Failed to allocate request for %s\n",
624 d, algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800625 goto out;
626 }
627
628 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
629 tcrypt_complete, &result);
630
Jerome Marchandabfa7f42016-02-03 13:58:12 +0100631 iv_len = crypto_aead_ivsize(tfm);
632
Herbert Xuda7f0332008-07-31 17:08:25 +0800633 for (i = 0, j = 0; i < tcount; i++) {
Cristian Stoica05b1d332014-07-28 13:11:23 +0300634 if (template[i].np)
635 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800636
Cristian Stoica05b1d332014-07-28 13:11:23 +0300637 j++;
Herbert Xuda7f0332008-07-31 17:08:25 +0800638
Cristian Stoica05b1d332014-07-28 13:11:23 +0300639 /* some templates have no input data but they will
640 * touch input
641 */
642 input = xbuf[0];
643 input += align_offset;
644 assoc = axbuf[0];
645
646 ret = -EINVAL;
647 if (WARN_ON(align_offset + template[i].ilen >
648 PAGE_SIZE || template[i].alen > PAGE_SIZE))
649 goto out;
650
651 memcpy(input, template[i].input, template[i].ilen);
652 memcpy(assoc, template[i].assoc, template[i].alen);
653 if (template[i].iv)
Cristian Stoica424a5da2015-01-28 11:03:05 +0200654 memcpy(iv, template[i].iv, iv_len);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300655 else
Cristian Stoica424a5da2015-01-28 11:03:05 +0200656 memset(iv, 0, iv_len);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300657
658 crypto_aead_clear_flags(tfm, ~0);
659 if (template[i].wk)
660 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
661
662 if (template[i].klen > MAX_KEYLEN) {
663 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
664 d, j, algo, template[i].klen,
665 MAX_KEYLEN);
Herbert Xufd57f222009-05-29 16:05:42 +1000666 ret = -EINVAL;
Cristian Stoica05b1d332014-07-28 13:11:23 +0300667 goto out;
668 }
669 memcpy(key, template[i].key, template[i].klen);
Herbert Xufd57f222009-05-29 16:05:42 +1000670
Cristian Stoica05b1d332014-07-28 13:11:23 +0300671 ret = crypto_aead_setkey(tfm, key, template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +0800672 if (template[i].fail == !ret) {
Cristian Stoica05b1d332014-07-28 13:11:23 +0300673 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
674 d, j, algo, crypto_aead_get_flags(tfm));
675 goto out;
676 } else if (ret)
677 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800678
Cristian Stoica05b1d332014-07-28 13:11:23 +0300679 authsize = abs(template[i].rlen - template[i].ilen);
680 ret = crypto_aead_setauthsize(tfm, authsize);
681 if (ret) {
682 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
683 d, authsize, j, algo);
684 goto out;
685 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800686
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800687 k = !!template[i].alen;
688 sg_init_table(sg, k + 1);
689 sg_set_buf(&sg[0], assoc, template[i].alen);
690 sg_set_buf(&sg[k], input,
691 template[i].ilen + (enc ? authsize : 0));
692 output = input;
693
Cristian Stoica05b1d332014-07-28 13:11:23 +0300694 if (diff_dst) {
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800695 sg_init_table(sgout, k + 1);
696 sg_set_buf(&sgout[0], assoc, template[i].alen);
697
Cristian Stoica05b1d332014-07-28 13:11:23 +0300698 output = xoutbuf[0];
699 output += align_offset;
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800700 sg_set_buf(&sgout[k], output,
701 template[i].rlen + (enc ? 0 : authsize));
Cristian Stoica05b1d332014-07-28 13:11:23 +0300702 }
703
Cristian Stoica05b1d332014-07-28 13:11:23 +0300704 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
705 template[i].ilen, iv);
706
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800707 aead_request_set_ad(req, template[i].alen);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300708
709 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
710
711 switch (ret) {
712 case 0:
713 if (template[i].novrfy) {
714 /* verification was supposed to fail */
715 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
716 d, e, j, algo);
717 /* so really, we got a bad message */
718 ret = -EBADMSG;
Horia Geanta29b77e52014-07-23 11:59:38 +0300719 goto out;
720 }
Cristian Stoica05b1d332014-07-28 13:11:23 +0300721 break;
722 case -EINPROGRESS:
723 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100724 wait_for_completion(&result.completion);
725 reinit_completion(&result.completion);
726 ret = result.err;
727 if (!ret)
Herbert Xuda7f0332008-07-31 17:08:25 +0800728 break;
Cristian Stoica05b1d332014-07-28 13:11:23 +0300729 case -EBADMSG:
730 if (template[i].novrfy)
731 /* verification failure was expected */
732 continue;
733 /* fall through */
734 default:
735 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
736 d, e, j, algo, -ret);
737 goto out;
738 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800739
Cristian Stoica05b1d332014-07-28 13:11:23 +0300740 q = output;
741 if (memcmp(q, template[i].result, template[i].rlen)) {
742 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
743 d, j, e, algo);
744 hexdump(q, template[i].rlen);
745 ret = -EINVAL;
746 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +0800747 }
748 }
749
750 for (i = 0, j = 0; i < tcount; i++) {
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300751 /* alignment tests are only done with continuous buffers */
752 if (align_offset != 0)
753 break;
754
Cristian Stoica05b1d332014-07-28 13:11:23 +0300755 if (!template[i].np)
756 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800757
Cristian Stoica05b1d332014-07-28 13:11:23 +0300758 j++;
Herbert Xuda7f0332008-07-31 17:08:25 +0800759
Cristian Stoica05b1d332014-07-28 13:11:23 +0300760 if (template[i].iv)
Jerome Marchandabfa7f42016-02-03 13:58:12 +0100761 memcpy(iv, template[i].iv, iv_len);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300762 else
763 memset(iv, 0, MAX_IVLEN);
764
765 crypto_aead_clear_flags(tfm, ~0);
766 if (template[i].wk)
767 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
768 if (template[i].klen > MAX_KEYLEN) {
769 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
770 d, j, algo, template[i].klen, MAX_KEYLEN);
771 ret = -EINVAL;
772 goto out;
773 }
774 memcpy(key, template[i].key, template[i].klen);
775
776 ret = crypto_aead_setkey(tfm, key, template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +0800777 if (template[i].fail == !ret) {
Cristian Stoica05b1d332014-07-28 13:11:23 +0300778 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
779 d, j, algo, crypto_aead_get_flags(tfm));
780 goto out;
781 } else if (ret)
782 continue;
783
784 authsize = abs(template[i].rlen - template[i].ilen);
785
786 ret = -EINVAL;
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800787 sg_init_table(sg, template[i].anp + template[i].np);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300788 if (diff_dst)
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800789 sg_init_table(sgout, template[i].anp + template[i].np);
790
791 ret = -EINVAL;
792 for (k = 0, temp = 0; k < template[i].anp; k++) {
793 if (WARN_ON(offset_in_page(IDX[k]) +
794 template[i].atap[k] > PAGE_SIZE))
795 goto out;
796 sg_set_buf(&sg[k],
797 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
798 offset_in_page(IDX[k]),
799 template[i].assoc + temp,
800 template[i].atap[k]),
801 template[i].atap[k]);
802 if (diff_dst)
803 sg_set_buf(&sgout[k],
804 axbuf[IDX[k] >> PAGE_SHIFT] +
805 offset_in_page(IDX[k]),
806 template[i].atap[k]);
807 temp += template[i].atap[k];
808 }
809
Cristian Stoica05b1d332014-07-28 13:11:23 +0300810 for (k = 0, temp = 0; k < template[i].np; k++) {
811 if (WARN_ON(offset_in_page(IDX[k]) +
812 template[i].tap[k] > PAGE_SIZE))
813 goto out;
814
815 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
816 memcpy(q, template[i].input + temp, template[i].tap[k]);
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800817 sg_set_buf(&sg[template[i].anp + k],
818 q, template[i].tap[k]);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300819
820 if (diff_dst) {
821 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
822 offset_in_page(IDX[k]);
823
824 memset(q, 0, template[i].tap[k]);
825
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800826 sg_set_buf(&sgout[template[i].anp + k],
827 q, template[i].tap[k]);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300828 }
829
830 n = template[i].tap[k];
831 if (k == template[i].np - 1 && enc)
832 n += authsize;
833 if (offset_in_page(q) + n < PAGE_SIZE)
834 q[n] = 0;
835
836 temp += template[i].tap[k];
837 }
838
839 ret = crypto_aead_setauthsize(tfm, authsize);
840 if (ret) {
841 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
842 d, authsize, j, algo);
843 goto out;
844 }
845
846 if (enc) {
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800847 if (WARN_ON(sg[template[i].anp + k - 1].offset +
848 sg[template[i].anp + k - 1].length +
849 authsize > PAGE_SIZE)) {
Horia Geanta29b77e52014-07-23 11:59:38 +0300850 ret = -EINVAL;
851 goto out;
852 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800853
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300854 if (diff_dst)
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800855 sgout[template[i].anp + k - 1].length +=
856 authsize;
857 sg[template[i].anp + k - 1].length += authsize;
Cristian Stoica05b1d332014-07-28 13:11:23 +0300858 }
859
860 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
861 template[i].ilen,
862 iv);
863
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800864 aead_request_set_ad(req, template[i].alen);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300865
866 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
867
868 switch (ret) {
869 case 0:
870 if (template[i].novrfy) {
871 /* verification was supposed to fail */
872 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
873 d, e, j, algo);
874 /* so really, we got a bad message */
875 ret = -EBADMSG;
876 goto out;
877 }
878 break;
879 case -EINPROGRESS:
880 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100881 wait_for_completion(&result.completion);
882 reinit_completion(&result.completion);
883 ret = result.err;
884 if (!ret)
Cristian Stoica05b1d332014-07-28 13:11:23 +0300885 break;
Cristian Stoica05b1d332014-07-28 13:11:23 +0300886 case -EBADMSG:
887 if (template[i].novrfy)
888 /* verification failure was expected */
889 continue;
890 /* fall through */
891 default:
892 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
893 d, e, j, algo, -ret);
894 goto out;
895 }
896
897 ret = -EINVAL;
898 for (k = 0, temp = 0; k < template[i].np; k++) {
899 if (diff_dst)
900 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
901 offset_in_page(IDX[k]);
902 else
Herbert Xuda7f0332008-07-31 17:08:25 +0800903 q = xbuf[IDX[k] >> PAGE_SHIFT] +
904 offset_in_page(IDX[k]);
905
Cristian Stoica05b1d332014-07-28 13:11:23 +0300906 n = template[i].tap[k];
907 if (k == template[i].np - 1)
908 n += enc ? authsize : -authsize;
Herbert Xuda7f0332008-07-31 17:08:25 +0800909
Cristian Stoica05b1d332014-07-28 13:11:23 +0300910 if (memcmp(q, template[i].result + temp, n)) {
911 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
912 d, j, e, k, algo);
913 hexdump(q, n);
Herbert Xuda7f0332008-07-31 17:08:25 +0800914 goto out;
915 }
916
Cristian Stoica05b1d332014-07-28 13:11:23 +0300917 q += n;
918 if (k == template[i].np - 1 && !enc) {
919 if (!diff_dst &&
920 memcmp(q, template[i].input +
921 temp + n, authsize))
922 n = authsize;
Horia Geanta8ec25c52013-11-28 15:11:18 +0200923 else
Cristian Stoica05b1d332014-07-28 13:11:23 +0300924 n = 0;
925 } else {
926 for (n = 0; offset_in_page(q + n) && q[n]; n++)
927 ;
Herbert Xuda7f0332008-07-31 17:08:25 +0800928 }
Cristian Stoica05b1d332014-07-28 13:11:23 +0300929 if (n) {
930 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
931 d, j, e, k, algo, n);
932 hexdump(q, n);
Herbert Xuda7f0332008-07-31 17:08:25 +0800933 goto out;
934 }
935
Cristian Stoica05b1d332014-07-28 13:11:23 +0300936 temp += template[i].tap[k];
Herbert Xuda7f0332008-07-31 17:08:25 +0800937 }
938 }
939
940 ret = 0;
941
942out:
943 aead_request_free(req);
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300944 kfree(sg);
945out_nosg:
946 if (diff_dst)
947 testmgr_free_buf(xoutbuf);
948out_nooutbuf:
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800949 testmgr_free_buf(axbuf);
950out_noaxbuf:
951 testmgr_free_buf(xbuf);
952out_noxbuf:
Horia Geanta29b77e52014-07-23 11:59:38 +0300953 kfree(key);
Tadeusz Struk9bac0192014-05-19 09:51:33 -0700954 kfree(iv);
Herbert Xuda7f0332008-07-31 17:08:25 +0800955 return ret;
956}
957
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300958static int test_aead(struct crypto_aead *tfm, int enc,
959 struct aead_testvec *template, unsigned int tcount)
960{
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300961 unsigned int alignmask;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300962 int ret;
963
964 /* test 'dst == src' case */
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300965 ret = __test_aead(tfm, enc, template, tcount, false, 0);
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300966 if (ret)
967 return ret;
968
969 /* test 'dst != src' case */
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300970 ret = __test_aead(tfm, enc, template, tcount, true, 0);
971 if (ret)
972 return ret;
973
974 /* test unaligned buffers, check with one byte offset */
975 ret = __test_aead(tfm, enc, template, tcount, true, 1);
976 if (ret)
977 return ret;
978
979 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
980 if (alignmask) {
981 /* Check if alignment mask for tfm is correctly set. */
982 ret = __test_aead(tfm, enc, template, tcount, true,
983 alignmask + 1);
984 if (ret)
985 return ret;
986 }
987
988 return 0;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300989}
990
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000991static int test_cipher(struct crypto_cipher *tfm, int enc,
Herbert Xuda7f0332008-07-31 17:08:25 +0800992 struct cipher_testvec *template, unsigned int tcount)
993{
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000994 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
995 unsigned int i, j, k;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000996 char *q;
997 const char *e;
998 void *data;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800999 char *xbuf[XBUFSIZE];
1000 int ret = -ENOMEM;
1001
1002 if (testmgr_alloc_buf(xbuf))
1003 goto out_nobuf;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001004
1005 if (enc == ENCRYPT)
1006 e = "encryption";
1007 else
1008 e = "decryption";
1009
1010 j = 0;
1011 for (i = 0; i < tcount; i++) {
1012 if (template[i].np)
1013 continue;
1014
Stephan Mueller10faa8c2016-08-25 15:15:01 +02001015 if (fips_enabled && template[i].fips_skip)
1016 continue;
1017
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001018 j++;
1019
Herbert Xufd57f222009-05-29 16:05:42 +10001020 ret = -EINVAL;
1021 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1022 goto out;
1023
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001024 data = xbuf[0];
1025 memcpy(data, template[i].input, template[i].ilen);
1026
1027 crypto_cipher_clear_flags(tfm, ~0);
1028 if (template[i].wk)
1029 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1030
1031 ret = crypto_cipher_setkey(tfm, template[i].key,
1032 template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +08001033 if (template[i].fail == !ret) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001034 printk(KERN_ERR "alg: cipher: setkey failed "
1035 "on test %d for %s: flags=%x\n", j,
1036 algo, crypto_cipher_get_flags(tfm));
1037 goto out;
1038 } else if (ret)
1039 continue;
1040
1041 for (k = 0; k < template[i].ilen;
1042 k += crypto_cipher_blocksize(tfm)) {
1043 if (enc)
1044 crypto_cipher_encrypt_one(tfm, data + k,
1045 data + k);
1046 else
1047 crypto_cipher_decrypt_one(tfm, data + k,
1048 data + k);
1049 }
1050
1051 q = data;
1052 if (memcmp(q, template[i].result, template[i].rlen)) {
1053 printk(KERN_ERR "alg: cipher: Test %d failed "
1054 "on %s for %s\n", j, e, algo);
1055 hexdump(q, template[i].rlen);
1056 ret = -EINVAL;
1057 goto out;
1058 }
1059 }
1060
1061 ret = 0;
1062
1063out:
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001064 testmgr_free_buf(xbuf);
1065out_nobuf:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001066 return ret;
1067}
1068
Herbert Xu12773d92015-08-20 15:21:46 +08001069static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001070 struct cipher_testvec *template, unsigned int tcount,
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001071 const bool diff_dst, const int align_offset)
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001072{
Herbert Xuda7f0332008-07-31 17:08:25 +08001073 const char *algo =
Herbert Xu12773d92015-08-20 15:21:46 +08001074 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
Herbert Xuda7f0332008-07-31 17:08:25 +08001075 unsigned int i, j, k, n, temp;
Herbert Xuda7f0332008-07-31 17:08:25 +08001076 char *q;
Herbert Xu12773d92015-08-20 15:21:46 +08001077 struct skcipher_request *req;
Herbert Xuda7f0332008-07-31 17:08:25 +08001078 struct scatterlist sg[8];
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001079 struct scatterlist sgout[8];
1080 const char *e, *d;
Herbert Xuda7f0332008-07-31 17:08:25 +08001081 struct tcrypt_result result;
1082 void *data;
1083 char iv[MAX_IVLEN];
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001084 char *xbuf[XBUFSIZE];
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001085 char *xoutbuf[XBUFSIZE];
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001086 int ret = -ENOMEM;
Andrey Ryabinin84cba172015-09-10 13:11:55 +03001087 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001088
1089 if (testmgr_alloc_buf(xbuf))
1090 goto out_nobuf;
Herbert Xuda7f0332008-07-31 17:08:25 +08001091
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001092 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1093 goto out_nooutbuf;
1094
1095 if (diff_dst)
1096 d = "-ddst";
1097 else
1098 d = "";
1099
Herbert Xuda7f0332008-07-31 17:08:25 +08001100 if (enc == ENCRYPT)
1101 e = "encryption";
1102 else
1103 e = "decryption";
1104
1105 init_completion(&result.completion);
1106
Herbert Xu12773d92015-08-20 15:21:46 +08001107 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Herbert Xuda7f0332008-07-31 17:08:25 +08001108 if (!req) {
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001109 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1110 d, algo);
Herbert Xuda7f0332008-07-31 17:08:25 +08001111 goto out;
1112 }
1113
Herbert Xu12773d92015-08-20 15:21:46 +08001114 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1115 tcrypt_complete, &result);
Herbert Xuda7f0332008-07-31 17:08:25 +08001116
1117 j = 0;
1118 for (i = 0; i < tcount; i++) {
Cristian Stoicabbb9a7d2014-08-08 14:27:52 +03001119 if (template[i].np && !template[i].also_non_np)
1120 continue;
1121
Stephan Mueller10faa8c2016-08-25 15:15:01 +02001122 if (fips_enabled && template[i].fips_skip)
1123 continue;
1124
Herbert Xuda7f0332008-07-31 17:08:25 +08001125 if (template[i].iv)
Andrey Ryabinin84cba172015-09-10 13:11:55 +03001126 memcpy(iv, template[i].iv, ivsize);
Herbert Xuda7f0332008-07-31 17:08:25 +08001127 else
1128 memset(iv, 0, MAX_IVLEN);
1129
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001130 j++;
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001131 ret = -EINVAL;
1132 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1133 goto out;
1134
1135 data = xbuf[0];
1136 data += align_offset;
1137 memcpy(data, template[i].input, template[i].ilen);
1138
Herbert Xu12773d92015-08-20 15:21:46 +08001139 crypto_skcipher_clear_flags(tfm, ~0);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001140 if (template[i].wk)
Herbert Xu12773d92015-08-20 15:21:46 +08001141 crypto_skcipher_set_flags(tfm,
1142 CRYPTO_TFM_REQ_WEAK_KEY);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001143
Herbert Xu12773d92015-08-20 15:21:46 +08001144 ret = crypto_skcipher_setkey(tfm, template[i].key,
1145 template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +08001146 if (template[i].fail == !ret) {
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001147 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
Herbert Xu12773d92015-08-20 15:21:46 +08001148 d, j, algo, crypto_skcipher_get_flags(tfm));
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001149 goto out;
1150 } else if (ret)
1151 continue;
1152
1153 sg_init_one(&sg[0], data, template[i].ilen);
1154 if (diff_dst) {
1155 data = xoutbuf[0];
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001156 data += align_offset;
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001157 sg_init_one(&sgout[0], data, template[i].ilen);
1158 }
Herbert Xuda7f0332008-07-31 17:08:25 +08001159
Herbert Xu12773d92015-08-20 15:21:46 +08001160 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1161 template[i].ilen, iv);
1162 ret = enc ? crypto_skcipher_encrypt(req) :
1163 crypto_skcipher_decrypt(req);
Herbert Xuda7f0332008-07-31 17:08:25 +08001164
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001165 switch (ret) {
1166 case 0:
1167 break;
1168 case -EINPROGRESS:
1169 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001170 wait_for_completion(&result.completion);
1171 reinit_completion(&result.completion);
1172 ret = result.err;
1173 if (!ret)
Herbert Xuda7f0332008-07-31 17:08:25 +08001174 break;
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001175 /* fall through */
1176 default:
1177 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1178 d, e, j, algo, -ret);
1179 goto out;
1180 }
Herbert Xuda7f0332008-07-31 17:08:25 +08001181
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001182 q = data;
1183 if (memcmp(q, template[i].result, template[i].rlen)) {
Boris BREZILLON8a826a32015-06-16 11:46:46 +02001184 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001185 d, j, e, algo);
1186 hexdump(q, template[i].rlen);
1187 ret = -EINVAL;
1188 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +08001189 }
Boris BREZILLON8a826a32015-06-16 11:46:46 +02001190
1191 if (template[i].iv_out &&
1192 memcmp(iv, template[i].iv_out,
1193 crypto_skcipher_ivsize(tfm))) {
1194 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1195 d, j, e, algo);
1196 hexdump(iv, crypto_skcipher_ivsize(tfm));
1197 ret = -EINVAL;
1198 goto out;
1199 }
Herbert Xuda7f0332008-07-31 17:08:25 +08001200 }
1201
1202 j = 0;
1203 for (i = 0; i < tcount; i++) {
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001204 /* alignment tests are only done with continuous buffers */
1205 if (align_offset != 0)
1206 break;
Herbert Xuda7f0332008-07-31 17:08:25 +08001207
Cristian Stoicabbb9a7d2014-08-08 14:27:52 +03001208 if (!template[i].np)
1209 continue;
1210
Stephan Mueller10faa8c2016-08-25 15:15:01 +02001211 if (fips_enabled && template[i].fips_skip)
1212 continue;
1213
Herbert Xuda7f0332008-07-31 17:08:25 +08001214 if (template[i].iv)
Andrey Ryabinin84cba172015-09-10 13:11:55 +03001215 memcpy(iv, template[i].iv, ivsize);
Herbert Xuda7f0332008-07-31 17:08:25 +08001216 else
1217 memset(iv, 0, MAX_IVLEN);
1218
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001219 j++;
Herbert Xu12773d92015-08-20 15:21:46 +08001220 crypto_skcipher_clear_flags(tfm, ~0);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001221 if (template[i].wk)
Herbert Xu12773d92015-08-20 15:21:46 +08001222 crypto_skcipher_set_flags(tfm,
1223 CRYPTO_TFM_REQ_WEAK_KEY);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001224
Herbert Xu12773d92015-08-20 15:21:46 +08001225 ret = crypto_skcipher_setkey(tfm, template[i].key,
1226 template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +08001227 if (template[i].fail == !ret) {
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001228 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
Herbert Xu12773d92015-08-20 15:21:46 +08001229 d, j, algo, crypto_skcipher_get_flags(tfm));
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001230 goto out;
1231 } else if (ret)
1232 continue;
1233
1234 temp = 0;
1235 ret = -EINVAL;
1236 sg_init_table(sg, template[i].np);
1237 if (diff_dst)
1238 sg_init_table(sgout, template[i].np);
1239 for (k = 0; k < template[i].np; k++) {
1240 if (WARN_ON(offset_in_page(IDX[k]) +
1241 template[i].tap[k] > PAGE_SIZE))
Herbert Xuda7f0332008-07-31 17:08:25 +08001242 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +08001243
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001244 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1245
1246 memcpy(q, template[i].input + temp, template[i].tap[k]);
1247
1248 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1249 q[template[i].tap[k]] = 0;
1250
1251 sg_set_buf(&sg[k], q, template[i].tap[k]);
1252 if (diff_dst) {
1253 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1254 offset_in_page(IDX[k]);
1255
1256 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1257
1258 memset(q, 0, template[i].tap[k]);
1259 if (offset_in_page(q) +
1260 template[i].tap[k] < PAGE_SIZE)
1261 q[template[i].tap[k]] = 0;
1262 }
1263
1264 temp += template[i].tap[k];
1265 }
1266
Herbert Xu12773d92015-08-20 15:21:46 +08001267 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1268 template[i].ilen, iv);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001269
Herbert Xu12773d92015-08-20 15:21:46 +08001270 ret = enc ? crypto_skcipher_encrypt(req) :
1271 crypto_skcipher_decrypt(req);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001272
1273 switch (ret) {
1274 case 0:
1275 break;
1276 case -EINPROGRESS:
1277 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001278 wait_for_completion(&result.completion);
1279 reinit_completion(&result.completion);
1280 ret = result.err;
1281 if (!ret)
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001282 break;
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001283 /* fall through */
1284 default:
1285 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1286 d, e, j, algo, -ret);
1287 goto out;
1288 }
1289
1290 temp = 0;
1291 ret = -EINVAL;
1292 for (k = 0; k < template[i].np; k++) {
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001293 if (diff_dst)
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001294 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1295 offset_in_page(IDX[k]);
1296 else
Herbert Xuda7f0332008-07-31 17:08:25 +08001297 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1298 offset_in_page(IDX[k]);
1299
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001300 if (memcmp(q, template[i].result + temp,
1301 template[i].tap[k])) {
1302 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1303 d, j, e, k, algo);
1304 hexdump(q, template[i].tap[k]);
Herbert Xuda7f0332008-07-31 17:08:25 +08001305 goto out;
1306 }
1307
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001308 q += template[i].tap[k];
1309 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1310 ;
1311 if (n) {
1312 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1313 d, j, e, k, algo, n);
1314 hexdump(q, n);
1315 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +08001316 }
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001317 temp += template[i].tap[k];
Herbert Xuda7f0332008-07-31 17:08:25 +08001318 }
1319 }
1320
1321 ret = 0;
1322
1323out:
Herbert Xu12773d92015-08-20 15:21:46 +08001324 skcipher_request_free(req);
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001325 if (diff_dst)
1326 testmgr_free_buf(xoutbuf);
1327out_nooutbuf:
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001328 testmgr_free_buf(xbuf);
1329out_nobuf:
Herbert Xuda7f0332008-07-31 17:08:25 +08001330 return ret;
1331}
1332
Herbert Xu12773d92015-08-20 15:21:46 +08001333static int test_skcipher(struct crypto_skcipher *tfm, int enc,
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001334 struct cipher_testvec *template, unsigned int tcount)
1335{
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001336 unsigned int alignmask;
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001337 int ret;
1338
1339 /* test 'dst == src' case */
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001340 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001341 if (ret)
1342 return ret;
1343
1344 /* test 'dst != src' case */
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001345 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1346 if (ret)
1347 return ret;
1348
1349 /* test unaligned buffers, check with one byte offset */
1350 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1351 if (ret)
1352 return ret;
1353
1354 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1355 if (alignmask) {
1356 /* Check if alignment mask for tfm is correctly set. */
1357 ret = __test_skcipher(tfm, enc, template, tcount, true,
1358 alignmask + 1);
1359 if (ret)
1360 return ret;
1361 }
1362
1363 return 0;
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001364}
1365
Herbert Xuda7f0332008-07-31 17:08:25 +08001366static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1367 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1368{
1369 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1370 unsigned int i;
1371 char result[COMP_BUF_SIZE];
1372 int ret;
1373
1374 for (i = 0; i < ctcount; i++) {
Geert Uytterhoevenc79cf912009-03-29 15:44:19 +08001375 int ilen;
1376 unsigned int dlen = COMP_BUF_SIZE;
Herbert Xuda7f0332008-07-31 17:08:25 +08001377
1378 memset(result, 0, sizeof (result));
1379
1380 ilen = ctemplate[i].inlen;
1381 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1382 ilen, result, &dlen);
1383 if (ret) {
1384 printk(KERN_ERR "alg: comp: compression failed "
1385 "on test %d for %s: ret=%d\n", i + 1, algo,
1386 -ret);
1387 goto out;
1388 }
1389
Geert Uytterhoevenb812eb02008-11-28 20:51:28 +08001390 if (dlen != ctemplate[i].outlen) {
1391 printk(KERN_ERR "alg: comp: Compression test %d "
1392 "failed for %s: output len = %d\n", i + 1, algo,
1393 dlen);
1394 ret = -EINVAL;
1395 goto out;
1396 }
1397
Herbert Xuda7f0332008-07-31 17:08:25 +08001398 if (memcmp(result, ctemplate[i].output, dlen)) {
1399 printk(KERN_ERR "alg: comp: Compression test %d "
1400 "failed for %s\n", i + 1, algo);
1401 hexdump(result, dlen);
1402 ret = -EINVAL;
1403 goto out;
1404 }
1405 }
1406
1407 for (i = 0; i < dtcount; i++) {
Geert Uytterhoevenc79cf912009-03-29 15:44:19 +08001408 int ilen;
1409 unsigned int dlen = COMP_BUF_SIZE;
Herbert Xuda7f0332008-07-31 17:08:25 +08001410
1411 memset(result, 0, sizeof (result));
1412
1413 ilen = dtemplate[i].inlen;
1414 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1415 ilen, result, &dlen);
1416 if (ret) {
1417 printk(KERN_ERR "alg: comp: decompression failed "
1418 "on test %d for %s: ret=%d\n", i + 1, algo,
1419 -ret);
1420 goto out;
1421 }
1422
Geert Uytterhoevenb812eb02008-11-28 20:51:28 +08001423 if (dlen != dtemplate[i].outlen) {
1424 printk(KERN_ERR "alg: comp: Decompression test %d "
1425 "failed for %s: output len = %d\n", i + 1, algo,
1426 dlen);
1427 ret = -EINVAL;
1428 goto out;
1429 }
1430
Herbert Xuda7f0332008-07-31 17:08:25 +08001431 if (memcmp(result, dtemplate[i].output, dlen)) {
1432 printk(KERN_ERR "alg: comp: Decompression test %d "
1433 "failed for %s\n", i + 1, algo);
1434 hexdump(result, dlen);
1435 ret = -EINVAL;
1436 goto out;
1437 }
1438 }
1439
1440 ret = 0;
1441
1442out:
1443 return ret;
1444}
1445
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001446static int test_acomp(struct crypto_acomp *tfm, struct comp_testvec *ctemplate,
1447 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1448{
1449 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1450 unsigned int i;
Eric Biggerseb095592016-11-23 10:24:35 -08001451 char *output;
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001452 int ret;
1453 struct scatterlist src, dst;
1454 struct acomp_req *req;
1455 struct tcrypt_result result;
1456
Eric Biggerseb095592016-11-23 10:24:35 -08001457 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1458 if (!output)
1459 return -ENOMEM;
1460
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001461 for (i = 0; i < ctcount; i++) {
1462 unsigned int dlen = COMP_BUF_SIZE;
1463 int ilen = ctemplate[i].inlen;
1464
Eric Biggerseb095592016-11-23 10:24:35 -08001465 memset(output, 0, dlen);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001466 init_completion(&result.completion);
1467 sg_init_one(&src, ctemplate[i].input, ilen);
1468 sg_init_one(&dst, output, dlen);
1469
1470 req = acomp_request_alloc(tfm);
1471 if (!req) {
1472 pr_err("alg: acomp: request alloc failed for %s\n",
1473 algo);
1474 ret = -ENOMEM;
1475 goto out;
1476 }
1477
1478 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1479 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1480 tcrypt_complete, &result);
1481
1482 ret = wait_async_op(&result, crypto_acomp_compress(req));
1483 if (ret) {
1484 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1485 i + 1, algo, -ret);
1486 acomp_request_free(req);
1487 goto out;
1488 }
1489
1490 if (req->dlen != ctemplate[i].outlen) {
1491 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1492 i + 1, algo, req->dlen);
1493 ret = -EINVAL;
1494 acomp_request_free(req);
1495 goto out;
1496 }
1497
1498 if (memcmp(output, ctemplate[i].output, req->dlen)) {
1499 pr_err("alg: acomp: Compression test %d failed for %s\n",
1500 i + 1, algo);
1501 hexdump(output, req->dlen);
1502 ret = -EINVAL;
1503 acomp_request_free(req);
1504 goto out;
1505 }
1506
1507 acomp_request_free(req);
1508 }
1509
1510 for (i = 0; i < dtcount; i++) {
1511 unsigned int dlen = COMP_BUF_SIZE;
1512 int ilen = dtemplate[i].inlen;
1513
Eric Biggerseb095592016-11-23 10:24:35 -08001514 memset(output, 0, dlen);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001515 init_completion(&result.completion);
1516 sg_init_one(&src, dtemplate[i].input, ilen);
1517 sg_init_one(&dst, output, dlen);
1518
1519 req = acomp_request_alloc(tfm);
1520 if (!req) {
1521 pr_err("alg: acomp: request alloc failed for %s\n",
1522 algo);
1523 ret = -ENOMEM;
1524 goto out;
1525 }
1526
1527 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1528 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1529 tcrypt_complete, &result);
1530
1531 ret = wait_async_op(&result, crypto_acomp_decompress(req));
1532 if (ret) {
1533 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1534 i + 1, algo, -ret);
1535 acomp_request_free(req);
1536 goto out;
1537 }
1538
1539 if (req->dlen != dtemplate[i].outlen) {
1540 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1541 i + 1, algo, req->dlen);
1542 ret = -EINVAL;
1543 acomp_request_free(req);
1544 goto out;
1545 }
1546
1547 if (memcmp(output, dtemplate[i].output, req->dlen)) {
1548 pr_err("alg: acomp: Decompression test %d failed for %s\n",
1549 i + 1, algo);
1550 hexdump(output, req->dlen);
1551 ret = -EINVAL;
1552 acomp_request_free(req);
1553 goto out;
1554 }
1555
1556 acomp_request_free(req);
1557 }
1558
1559 ret = 0;
1560
1561out:
Eric Biggerseb095592016-11-23 10:24:35 -08001562 kfree(output);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001563 return ret;
1564}
1565
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001566static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1567 unsigned int tcount)
1568{
1569 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
Felipe Contrerasfa4ef8a2009-10-27 19:04:42 +08001570 int err = 0, i, j, seedsize;
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001571 u8 *seed;
1572 char result[32];
1573
1574 seedsize = crypto_rng_seedsize(tfm);
1575
1576 seed = kmalloc(seedsize, GFP_KERNEL);
1577 if (!seed) {
1578 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1579 "for %s\n", algo);
1580 return -ENOMEM;
1581 }
1582
1583 for (i = 0; i < tcount; i++) {
1584 memset(result, 0, 32);
1585
1586 memcpy(seed, template[i].v, template[i].vlen);
1587 memcpy(seed + template[i].vlen, template[i].key,
1588 template[i].klen);
1589 memcpy(seed + template[i].vlen + template[i].klen,
1590 template[i].dt, template[i].dtlen);
1591
1592 err = crypto_rng_reset(tfm, seed, seedsize);
1593 if (err) {
1594 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1595 "for %s\n", algo);
1596 goto out;
1597 }
1598
1599 for (j = 0; j < template[i].loops; j++) {
1600 err = crypto_rng_get_bytes(tfm, result,
1601 template[i].rlen);
Stephan Mueller19e60e12015-03-10 17:00:36 +01001602 if (err < 0) {
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001603 printk(KERN_ERR "alg: cprng: Failed to obtain "
1604 "the correct amount of random data for "
Stephan Mueller19e60e12015-03-10 17:00:36 +01001605 "%s (requested %d)\n", algo,
1606 template[i].rlen);
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001607 goto out;
1608 }
1609 }
1610
1611 err = memcmp(result, template[i].result,
1612 template[i].rlen);
1613 if (err) {
1614 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1615 i, algo);
1616 hexdump(result, template[i].rlen);
1617 err = -EINVAL;
1618 goto out;
1619 }
1620 }
1621
1622out:
1623 kfree(seed);
1624 return err;
1625}
1626
Herbert Xuda7f0332008-07-31 17:08:25 +08001627static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1628 u32 type, u32 mask)
1629{
1630 struct crypto_aead *tfm;
1631 int err = 0;
1632
Herbert Xueed93e02016-11-22 20:08:31 +08001633 tfm = crypto_alloc_aead(driver, type, mask);
Herbert Xuda7f0332008-07-31 17:08:25 +08001634 if (IS_ERR(tfm)) {
1635 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1636 "%ld\n", driver, PTR_ERR(tfm));
1637 return PTR_ERR(tfm);
1638 }
1639
1640 if (desc->suite.aead.enc.vecs) {
1641 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1642 desc->suite.aead.enc.count);
1643 if (err)
1644 goto out;
1645 }
1646
1647 if (!err && desc->suite.aead.dec.vecs)
1648 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1649 desc->suite.aead.dec.count);
1650
1651out:
1652 crypto_free_aead(tfm);
1653 return err;
1654}
1655
1656static int alg_test_cipher(const struct alg_test_desc *desc,
1657 const char *driver, u32 type, u32 mask)
1658{
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001659 struct crypto_cipher *tfm;
Herbert Xuda7f0332008-07-31 17:08:25 +08001660 int err = 0;
1661
Herbert Xueed93e02016-11-22 20:08:31 +08001662 tfm = crypto_alloc_cipher(driver, type, mask);
Herbert Xuda7f0332008-07-31 17:08:25 +08001663 if (IS_ERR(tfm)) {
1664 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1665 "%s: %ld\n", driver, PTR_ERR(tfm));
1666 return PTR_ERR(tfm);
1667 }
1668
1669 if (desc->suite.cipher.enc.vecs) {
1670 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1671 desc->suite.cipher.enc.count);
1672 if (err)
1673 goto out;
1674 }
1675
1676 if (desc->suite.cipher.dec.vecs)
1677 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1678 desc->suite.cipher.dec.count);
1679
1680out:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001681 crypto_free_cipher(tfm);
1682 return err;
1683}
1684
1685static int alg_test_skcipher(const struct alg_test_desc *desc,
1686 const char *driver, u32 type, u32 mask)
1687{
Herbert Xu12773d92015-08-20 15:21:46 +08001688 struct crypto_skcipher *tfm;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001689 int err = 0;
1690
Herbert Xueed93e02016-11-22 20:08:31 +08001691 tfm = crypto_alloc_skcipher(driver, type, mask);
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001692 if (IS_ERR(tfm)) {
1693 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1694 "%s: %ld\n", driver, PTR_ERR(tfm));
1695 return PTR_ERR(tfm);
1696 }
1697
1698 if (desc->suite.cipher.enc.vecs) {
1699 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1700 desc->suite.cipher.enc.count);
1701 if (err)
1702 goto out;
1703 }
1704
1705 if (desc->suite.cipher.dec.vecs)
1706 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1707 desc->suite.cipher.dec.count);
1708
1709out:
Herbert Xu12773d92015-08-20 15:21:46 +08001710 crypto_free_skcipher(tfm);
Herbert Xuda7f0332008-07-31 17:08:25 +08001711 return err;
1712}
1713
1714static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1715 u32 type, u32 mask)
1716{
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001717 struct crypto_comp *comp;
1718 struct crypto_acomp *acomp;
Herbert Xuda7f0332008-07-31 17:08:25 +08001719 int err;
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001720 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
Herbert Xuda7f0332008-07-31 17:08:25 +08001721
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001722 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1723 acomp = crypto_alloc_acomp(driver, type, mask);
1724 if (IS_ERR(acomp)) {
1725 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1726 driver, PTR_ERR(acomp));
1727 return PTR_ERR(acomp);
1728 }
1729 err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1730 desc->suite.comp.decomp.vecs,
1731 desc->suite.comp.comp.count,
1732 desc->suite.comp.decomp.count);
1733 crypto_free_acomp(acomp);
1734 } else {
1735 comp = crypto_alloc_comp(driver, type, mask);
1736 if (IS_ERR(comp)) {
1737 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1738 driver, PTR_ERR(comp));
1739 return PTR_ERR(comp);
1740 }
1741
1742 err = test_comp(comp, desc->suite.comp.comp.vecs,
1743 desc->suite.comp.decomp.vecs,
1744 desc->suite.comp.comp.count,
1745 desc->suite.comp.decomp.count);
1746
1747 crypto_free_comp(comp);
Herbert Xuda7f0332008-07-31 17:08:25 +08001748 }
Herbert Xuda7f0332008-07-31 17:08:25 +08001749 return err;
1750}
1751
1752static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1753 u32 type, u32 mask)
1754{
1755 struct crypto_ahash *tfm;
1756 int err;
1757
Herbert Xueed93e02016-11-22 20:08:31 +08001758 tfm = crypto_alloc_ahash(driver, type, mask);
Herbert Xuda7f0332008-07-31 17:08:25 +08001759 if (IS_ERR(tfm)) {
1760 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1761 "%ld\n", driver, PTR_ERR(tfm));
1762 return PTR_ERR(tfm);
1763 }
1764
David S. Millera8f1a052010-05-19 14:12:03 +10001765 err = test_hash(tfm, desc->suite.hash.vecs,
1766 desc->suite.hash.count, true);
1767 if (!err)
1768 err = test_hash(tfm, desc->suite.hash.vecs,
1769 desc->suite.hash.count, false);
Herbert Xuda7f0332008-07-31 17:08:25 +08001770
1771 crypto_free_ahash(tfm);
1772 return err;
1773}
1774
Herbert Xu8e3ee852008-11-07 14:58:52 +08001775static int alg_test_crc32c(const struct alg_test_desc *desc,
1776 const char *driver, u32 type, u32 mask)
1777{
1778 struct crypto_shash *tfm;
1779 u32 val;
1780 int err;
1781
1782 err = alg_test_hash(desc, driver, type, mask);
1783 if (err)
1784 goto out;
1785
Herbert Xueed93e02016-11-22 20:08:31 +08001786 tfm = crypto_alloc_shash(driver, type, mask);
Herbert Xu8e3ee852008-11-07 14:58:52 +08001787 if (IS_ERR(tfm)) {
1788 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1789 "%ld\n", driver, PTR_ERR(tfm));
1790 err = PTR_ERR(tfm);
1791 goto out;
1792 }
1793
1794 do {
Jan-Simon Möller4c5c3022012-07-02 13:48:30 +02001795 SHASH_DESC_ON_STACK(shash, tfm);
1796 u32 *ctx = (u32 *)shash_desc_ctx(shash);
Herbert Xu8e3ee852008-11-07 14:58:52 +08001797
Jan-Simon Möller4c5c3022012-07-02 13:48:30 +02001798 shash->tfm = tfm;
1799 shash->flags = 0;
Herbert Xu8e3ee852008-11-07 14:58:52 +08001800
Jan-Simon Möller4c5c3022012-07-02 13:48:30 +02001801 *ctx = le32_to_cpu(420553207);
1802 err = crypto_shash_final(shash, (u8 *)&val);
Herbert Xu8e3ee852008-11-07 14:58:52 +08001803 if (err) {
1804 printk(KERN_ERR "alg: crc32c: Operation failed for "
1805 "%s: %d\n", driver, err);
1806 break;
1807 }
1808
1809 if (val != ~420553207) {
1810 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1811 "%d\n", driver, val);
1812 err = -EINVAL;
1813 }
1814 } while (0);
1815
1816 crypto_free_shash(tfm);
1817
1818out:
1819 return err;
1820}
1821
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001822static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1823 u32 type, u32 mask)
1824{
1825 struct crypto_rng *rng;
1826 int err;
1827
Herbert Xueed93e02016-11-22 20:08:31 +08001828 rng = crypto_alloc_rng(driver, type, mask);
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001829 if (IS_ERR(rng)) {
1830 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1831 "%ld\n", driver, PTR_ERR(rng));
1832 return PTR_ERR(rng);
1833 }
1834
1835 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1836
1837 crypto_free_rng(rng);
1838
1839 return err;
1840}
1841
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001842
1843static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1844 const char *driver, u32 type, u32 mask)
1845{
1846 int ret = -EAGAIN;
1847 struct crypto_rng *drng;
1848 struct drbg_test_data test_data;
1849 struct drbg_string addtl, pers, testentropy;
1850 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1851
1852 if (!buf)
1853 return -ENOMEM;
1854
Herbert Xueed93e02016-11-22 20:08:31 +08001855 drng = crypto_alloc_rng(driver, type, mask);
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001856 if (IS_ERR(drng)) {
Jarod Wilson2fc0d252014-07-29 15:47:56 -04001857 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001858 "%s\n", driver);
1859 kzfree(buf);
1860 return -ENOMEM;
1861 }
1862
1863 test_data.testentropy = &testentropy;
1864 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1865 drbg_string_fill(&pers, test->pers, test->perslen);
1866 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1867 if (ret) {
1868 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1869 goto outbuf;
1870 }
1871
1872 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1873 if (pr) {
1874 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1875 ret = crypto_drbg_get_bytes_addtl_test(drng,
1876 buf, test->expectedlen, &addtl, &test_data);
1877 } else {
1878 ret = crypto_drbg_get_bytes_addtl(drng,
1879 buf, test->expectedlen, &addtl);
1880 }
Stephan Mueller19e60e12015-03-10 17:00:36 +01001881 if (ret < 0) {
Jarod Wilson2fc0d252014-07-29 15:47:56 -04001882 printk(KERN_ERR "alg: drbg: could not obtain random data for "
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001883 "driver %s\n", driver);
1884 goto outbuf;
1885 }
1886
1887 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1888 if (pr) {
1889 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1890 ret = crypto_drbg_get_bytes_addtl_test(drng,
1891 buf, test->expectedlen, &addtl, &test_data);
1892 } else {
1893 ret = crypto_drbg_get_bytes_addtl(drng,
1894 buf, test->expectedlen, &addtl);
1895 }
Stephan Mueller19e60e12015-03-10 17:00:36 +01001896 if (ret < 0) {
Jarod Wilson2fc0d252014-07-29 15:47:56 -04001897 printk(KERN_ERR "alg: drbg: could not obtain random data for "
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001898 "driver %s\n", driver);
1899 goto outbuf;
1900 }
1901
1902 ret = memcmp(test->expected, buf, test->expectedlen);
1903
1904outbuf:
1905 crypto_free_rng(drng);
1906 kzfree(buf);
1907 return ret;
1908}
1909
1910
1911static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1912 u32 type, u32 mask)
1913{
1914 int err = 0;
1915 int pr = 0;
1916 int i = 0;
1917 struct drbg_testvec *template = desc->suite.drbg.vecs;
1918 unsigned int tcount = desc->suite.drbg.count;
1919
1920 if (0 == memcmp(driver, "drbg_pr_", 8))
1921 pr = 1;
1922
1923 for (i = 0; i < tcount; i++) {
1924 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1925 if (err) {
1926 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1927 i, driver);
1928 err = -EINVAL;
1929 break;
1930 }
1931 }
1932 return err;
1933
1934}
1935
Salvatore Benedetto802c7f12016-06-22 17:49:14 +01001936static int do_test_kpp(struct crypto_kpp *tfm, struct kpp_testvec *vec,
1937 const char *alg)
1938{
1939 struct kpp_request *req;
1940 void *input_buf = NULL;
1941 void *output_buf = NULL;
1942 struct tcrypt_result result;
1943 unsigned int out_len_max;
1944 int err = -ENOMEM;
1945 struct scatterlist src, dst;
1946
1947 req = kpp_request_alloc(tfm, GFP_KERNEL);
1948 if (!req)
1949 return err;
1950
1951 init_completion(&result.completion);
1952
1953 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1954 if (err < 0)
1955 goto free_req;
1956
1957 out_len_max = crypto_kpp_maxsize(tfm);
1958 output_buf = kzalloc(out_len_max, GFP_KERNEL);
1959 if (!output_buf) {
1960 err = -ENOMEM;
1961 goto free_req;
1962 }
1963
1964 /* Use appropriate parameter as base */
1965 kpp_request_set_input(req, NULL, 0);
1966 sg_init_one(&dst, output_buf, out_len_max);
1967 kpp_request_set_output(req, &dst, out_len_max);
1968 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1969 tcrypt_complete, &result);
1970
1971 /* Compute public key */
1972 err = wait_async_op(&result, crypto_kpp_generate_public_key(req));
1973 if (err) {
1974 pr_err("alg: %s: generate public key test failed. err %d\n",
1975 alg, err);
1976 goto free_output;
1977 }
1978 /* Verify calculated public key */
1979 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
1980 vec->expected_a_public_size)) {
1981 pr_err("alg: %s: generate public key test failed. Invalid output\n",
1982 alg);
1983 err = -EINVAL;
1984 goto free_output;
1985 }
1986
1987 /* Calculate shared secret key by using counter part (b) public key. */
1988 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
1989 if (!input_buf) {
1990 err = -ENOMEM;
1991 goto free_output;
1992 }
1993
1994 memcpy(input_buf, vec->b_public, vec->b_public_size);
1995 sg_init_one(&src, input_buf, vec->b_public_size);
1996 sg_init_one(&dst, output_buf, out_len_max);
1997 kpp_request_set_input(req, &src, vec->b_public_size);
1998 kpp_request_set_output(req, &dst, out_len_max);
1999 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2000 tcrypt_complete, &result);
2001 err = wait_async_op(&result, crypto_kpp_compute_shared_secret(req));
2002 if (err) {
2003 pr_err("alg: %s: compute shard secret test failed. err %d\n",
2004 alg, err);
2005 goto free_all;
2006 }
2007 /*
2008 * verify shared secret from which the user will derive
2009 * secret key by executing whatever hash it has chosen
2010 */
2011 if (memcmp(vec->expected_ss, sg_virt(req->dst),
2012 vec->expected_ss_size)) {
2013 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2014 alg);
2015 err = -EINVAL;
2016 }
2017
2018free_all:
2019 kfree(input_buf);
2020free_output:
2021 kfree(output_buf);
2022free_req:
2023 kpp_request_free(req);
2024 return err;
2025}
2026
2027static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2028 struct kpp_testvec *vecs, unsigned int tcount)
2029{
2030 int ret, i;
2031
2032 for (i = 0; i < tcount; i++) {
2033 ret = do_test_kpp(tfm, vecs++, alg);
2034 if (ret) {
2035 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2036 alg, i + 1, ret);
2037 return ret;
2038 }
2039 }
2040 return 0;
2041}
2042
2043static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2044 u32 type, u32 mask)
2045{
2046 struct crypto_kpp *tfm;
2047 int err = 0;
2048
Herbert Xueed93e02016-11-22 20:08:31 +08002049 tfm = crypto_alloc_kpp(driver, type, mask);
Salvatore Benedetto802c7f12016-06-22 17:49:14 +01002050 if (IS_ERR(tfm)) {
2051 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2052 driver, PTR_ERR(tfm));
2053 return PTR_ERR(tfm);
2054 }
2055 if (desc->suite.kpp.vecs)
2056 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2057 desc->suite.kpp.count);
2058
2059 crypto_free_kpp(tfm);
2060 return err;
2061}
2062
Herbert Xu50d2b6432016-06-29 19:32:20 +08002063static int test_akcipher_one(struct crypto_akcipher *tfm,
2064 struct akcipher_testvec *vecs)
Tadeusz Struk946cc462015-06-16 10:31:06 -07002065{
Herbert Xudf27b262016-05-05 16:42:49 +08002066 char *xbuf[XBUFSIZE];
Tadeusz Struk946cc462015-06-16 10:31:06 -07002067 struct akcipher_request *req;
2068 void *outbuf_enc = NULL;
2069 void *outbuf_dec = NULL;
2070 struct tcrypt_result result;
2071 unsigned int out_len_max, out_len = 0;
2072 int err = -ENOMEM;
Tadeusz Struk22287b02015-10-08 09:26:55 -07002073 struct scatterlist src, dst, src_tab[2];
Tadeusz Struk946cc462015-06-16 10:31:06 -07002074
Herbert Xudf27b262016-05-05 16:42:49 +08002075 if (testmgr_alloc_buf(xbuf))
2076 return err;
2077
Tadeusz Struk946cc462015-06-16 10:31:06 -07002078 req = akcipher_request_alloc(tfm, GFP_KERNEL);
2079 if (!req)
Herbert Xudf27b262016-05-05 16:42:49 +08002080 goto free_xbuf;
Tadeusz Struk946cc462015-06-16 10:31:06 -07002081
2082 init_completion(&result.completion);
Tadeusz Struk22287b02015-10-08 09:26:55 -07002083
2084 if (vecs->public_key_vec)
2085 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2086 vecs->key_len);
2087 else
2088 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2089 vecs->key_len);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002090 if (err)
2091 goto free_req;
2092
Salvatore Benedetto57763f52016-07-04 10:52:34 +01002093 err = -ENOMEM;
Tadeusz Struk22287b02015-10-08 09:26:55 -07002094 out_len_max = crypto_akcipher_maxsize(tfm);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002095 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2096 if (!outbuf_enc)
2097 goto free_req;
2098
Herbert Xudf27b262016-05-05 16:42:49 +08002099 if (WARN_ON(vecs->m_size > PAGE_SIZE))
2100 goto free_all;
2101
2102 memcpy(xbuf[0], vecs->m, vecs->m_size);
2103
Tadeusz Struk22287b02015-10-08 09:26:55 -07002104 sg_init_table(src_tab, 2);
Herbert Xudf27b262016-05-05 16:42:49 +08002105 sg_set_buf(&src_tab[0], xbuf[0], 8);
2106 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
Tadeusz Struk22287b02015-10-08 09:26:55 -07002107 sg_init_one(&dst, outbuf_enc, out_len_max);
2108 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2109 out_len_max);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002110 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2111 tcrypt_complete, &result);
2112
2113 /* Run RSA encrypt - c = m^e mod n;*/
2114 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
2115 if (err) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002116 pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002117 goto free_all;
2118 }
Tadeusz Struk22287b02015-10-08 09:26:55 -07002119 if (req->dst_len != vecs->c_size) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002120 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
Tadeusz Struk946cc462015-06-16 10:31:06 -07002121 err = -EINVAL;
2122 goto free_all;
2123 }
2124 /* verify that encrypted message is equal to expected */
Herbert Xudf27b262016-05-05 16:42:49 +08002125 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002126 pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
2127 hexdump(outbuf_enc, vecs->c_size);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002128 err = -EINVAL;
2129 goto free_all;
2130 }
2131 /* Don't invoke decrypt for vectors with public key */
2132 if (vecs->public_key_vec) {
2133 err = 0;
2134 goto free_all;
2135 }
2136 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2137 if (!outbuf_dec) {
2138 err = -ENOMEM;
2139 goto free_all;
2140 }
Herbert Xudf27b262016-05-05 16:42:49 +08002141
2142 if (WARN_ON(vecs->c_size > PAGE_SIZE))
2143 goto free_all;
2144
2145 memcpy(xbuf[0], vecs->c, vecs->c_size);
2146
2147 sg_init_one(&src, xbuf[0], vecs->c_size);
Tadeusz Struk22287b02015-10-08 09:26:55 -07002148 sg_init_one(&dst, outbuf_dec, out_len_max);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002149 init_completion(&result.completion);
Tadeusz Struk22287b02015-10-08 09:26:55 -07002150 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002151
2152 /* Run RSA decrypt - m = c^d mod n;*/
2153 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
2154 if (err) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002155 pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002156 goto free_all;
2157 }
2158 out_len = req->dst_len;
Herbert Xu50d2b6432016-06-29 19:32:20 +08002159 if (out_len < vecs->m_size) {
2160 pr_err("alg: akcipher: decrypt test failed. "
2161 "Invalid output len %u\n", out_len);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002162 err = -EINVAL;
2163 goto free_all;
2164 }
2165 /* verify that decrypted message is equal to the original msg */
Herbert Xu50d2b6432016-06-29 19:32:20 +08002166 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2167 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2168 vecs->m_size)) {
2169 pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2170 hexdump(outbuf_dec, out_len);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002171 err = -EINVAL;
2172 }
2173free_all:
2174 kfree(outbuf_dec);
2175 kfree(outbuf_enc);
2176free_req:
2177 akcipher_request_free(req);
Herbert Xudf27b262016-05-05 16:42:49 +08002178free_xbuf:
2179 testmgr_free_buf(xbuf);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002180 return err;
2181}
2182
Herbert Xu50d2b6432016-06-29 19:32:20 +08002183static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2184 struct akcipher_testvec *vecs, unsigned int tcount)
Tadeusz Struk946cc462015-06-16 10:31:06 -07002185{
Herbert Xu15226e42016-07-18 18:20:10 +08002186 const char *algo =
2187 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
Tadeusz Struk946cc462015-06-16 10:31:06 -07002188 int ret, i;
2189
2190 for (i = 0; i < tcount; i++) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002191 ret = test_akcipher_one(tfm, vecs++);
2192 if (!ret)
2193 continue;
2194
Herbert Xu15226e42016-07-18 18:20:10 +08002195 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2196 i + 1, algo, ret);
Herbert Xu50d2b6432016-06-29 19:32:20 +08002197 return ret;
Tadeusz Struk946cc462015-06-16 10:31:06 -07002198 }
2199 return 0;
2200}
2201
Tadeusz Struk946cc462015-06-16 10:31:06 -07002202static int alg_test_akcipher(const struct alg_test_desc *desc,
2203 const char *driver, u32 type, u32 mask)
2204{
2205 struct crypto_akcipher *tfm;
2206 int err = 0;
2207
Herbert Xueed93e02016-11-22 20:08:31 +08002208 tfm = crypto_alloc_akcipher(driver, type, mask);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002209 if (IS_ERR(tfm)) {
2210 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2211 driver, PTR_ERR(tfm));
2212 return PTR_ERR(tfm);
2213 }
2214 if (desc->suite.akcipher.vecs)
2215 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2216 desc->suite.akcipher.count);
2217
2218 crypto_free_akcipher(tfm);
2219 return err;
2220}
2221
Youquan, Song863b5572009-12-23 19:45:20 +08002222static int alg_test_null(const struct alg_test_desc *desc,
2223 const char *driver, u32 type, u32 mask)
2224{
2225 return 0;
2226}
2227
Herbert Xuda7f0332008-07-31 17:08:25 +08002228/* Please keep this list sorted by algorithm name. */
2229static const struct alg_test_desc alg_test_descs[] = {
2230 {
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08002231 .alg = "ansi_cprng",
2232 .test = alg_test_cprng,
2233 .suite = {
2234 .cprng = {
2235 .vecs = ansi_cprng_aes_tv_template,
2236 .count = ANSI_CPRNG_AES_TEST_VECTORS
2237 }
2238 }
2239 }, {
Horia Geantabca4feb2014-03-14 17:46:51 +02002240 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2241 .test = alg_test_aead,
Horia Geantabca4feb2014-03-14 17:46:51 +02002242 .suite = {
2243 .aead = {
2244 .enc = {
2245 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2246 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2247 },
2248 .dec = {
2249 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2250 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2251 }
2252 }
2253 }
2254 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002255 .alg = "authenc(hmac(sha1),cbc(aes))",
Horia Geantae46e9a42012-07-03 19:16:54 +03002256 .test = alg_test_aead,
Horia Geantae46e9a42012-07-03 19:16:54 +03002257 .suite = {
2258 .aead = {
2259 .enc = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302260 .vecs =
2261 hmac_sha1_aes_cbc_enc_tv_temp,
2262 .count =
2263 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2264 }
2265 }
2266 }
2267 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002268 .alg = "authenc(hmac(sha1),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302269 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302270 .suite = {
2271 .aead = {
2272 .enc = {
2273 .vecs =
2274 hmac_sha1_des_cbc_enc_tv_temp,
2275 .count =
2276 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2277 }
2278 }
2279 }
2280 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002281 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302282 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002283 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302284 .suite = {
2285 .aead = {
2286 .enc = {
2287 .vecs =
2288 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2289 .count =
2290 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
Horia Geantae46e9a42012-07-03 19:16:54 +03002291 }
2292 }
2293 }
2294 }, {
Marcus Meissnerfb16abc2016-02-06 11:53:07 +01002295 .alg = "authenc(hmac(sha1),ctr(aes))",
2296 .test = alg_test_null,
2297 .fips_allowed = 1,
2298 }, {
Horia Geantabca4feb2014-03-14 17:46:51 +02002299 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2300 .test = alg_test_aead,
Horia Geantabca4feb2014-03-14 17:46:51 +02002301 .suite = {
2302 .aead = {
2303 .enc = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302304 .vecs =
2305 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2306 .count =
2307 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
Horia Geantabca4feb2014-03-14 17:46:51 +02002308 },
2309 .dec = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302310 .vecs =
2311 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2312 .count =
2313 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2314 }
2315 }
2316 }
2317 }, {
Marcus Meissner88886902016-02-19 13:34:28 +01002318 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2319 .test = alg_test_null,
2320 .fips_allowed = 1,
2321 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002322 .alg = "authenc(hmac(sha224),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302323 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302324 .suite = {
2325 .aead = {
2326 .enc = {
2327 .vecs =
2328 hmac_sha224_des_cbc_enc_tv_temp,
2329 .count =
2330 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2331 }
2332 }
2333 }
2334 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002335 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302336 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002337 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302338 .suite = {
2339 .aead = {
2340 .enc = {
2341 .vecs =
2342 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2343 .count =
2344 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
Horia Geantabca4feb2014-03-14 17:46:51 +02002345 }
2346 }
2347 }
2348 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002349 .alg = "authenc(hmac(sha256),cbc(aes))",
Horia Geantae46e9a42012-07-03 19:16:54 +03002350 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002351 .fips_allowed = 1,
Horia Geantae46e9a42012-07-03 19:16:54 +03002352 .suite = {
2353 .aead = {
2354 .enc = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302355 .vecs =
2356 hmac_sha256_aes_cbc_enc_tv_temp,
2357 .count =
2358 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2359 }
2360 }
2361 }
2362 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002363 .alg = "authenc(hmac(sha256),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302364 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302365 .suite = {
2366 .aead = {
2367 .enc = {
2368 .vecs =
2369 hmac_sha256_des_cbc_enc_tv_temp,
2370 .count =
2371 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2372 }
2373 }
2374 }
2375 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002376 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302377 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002378 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302379 .suite = {
2380 .aead = {
2381 .enc = {
2382 .vecs =
2383 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2384 .count =
2385 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2386 }
2387 }
2388 }
2389 }, {
Marcus Meissnerfb16abc2016-02-06 11:53:07 +01002390 .alg = "authenc(hmac(sha256),ctr(aes))",
2391 .test = alg_test_null,
2392 .fips_allowed = 1,
2393 }, {
Marcus Meissner88886902016-02-19 13:34:28 +01002394 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2395 .test = alg_test_null,
2396 .fips_allowed = 1,
2397 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002398 .alg = "authenc(hmac(sha384),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302399 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302400 .suite = {
2401 .aead = {
2402 .enc = {
2403 .vecs =
2404 hmac_sha384_des_cbc_enc_tv_temp,
2405 .count =
2406 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2407 }
2408 }
2409 }
2410 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002411 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302412 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002413 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302414 .suite = {
2415 .aead = {
2416 .enc = {
2417 .vecs =
2418 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2419 .count =
2420 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
Horia Geantae46e9a42012-07-03 19:16:54 +03002421 }
2422 }
2423 }
2424 }, {
Marcus Meissnerfb16abc2016-02-06 11:53:07 +01002425 .alg = "authenc(hmac(sha384),ctr(aes))",
2426 .test = alg_test_null,
2427 .fips_allowed = 1,
2428 }, {
Marcus Meissner88886902016-02-19 13:34:28 +01002429 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2430 .test = alg_test_null,
2431 .fips_allowed = 1,
2432 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002433 .alg = "authenc(hmac(sha512),cbc(aes))",
Marcus Meissnered1afac2016-02-05 14:23:33 +01002434 .fips_allowed = 1,
Horia Geantae46e9a42012-07-03 19:16:54 +03002435 .test = alg_test_aead,
Horia Geantae46e9a42012-07-03 19:16:54 +03002436 .suite = {
2437 .aead = {
2438 .enc = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302439 .vecs =
2440 hmac_sha512_aes_cbc_enc_tv_temp,
2441 .count =
2442 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2443 }
2444 }
2445 }
2446 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002447 .alg = "authenc(hmac(sha512),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302448 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302449 .suite = {
2450 .aead = {
2451 .enc = {
2452 .vecs =
2453 hmac_sha512_des_cbc_enc_tv_temp,
2454 .count =
2455 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2456 }
2457 }
2458 }
2459 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002460 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302461 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002462 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302463 .suite = {
2464 .aead = {
2465 .enc = {
2466 .vecs =
2467 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2468 .count =
2469 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
Horia Geantae46e9a42012-07-03 19:16:54 +03002470 }
2471 }
2472 }
2473 }, {
Marcus Meissnerfb16abc2016-02-06 11:53:07 +01002474 .alg = "authenc(hmac(sha512),ctr(aes))",
2475 .test = alg_test_null,
2476 .fips_allowed = 1,
2477 }, {
Marcus Meissner88886902016-02-19 13:34:28 +01002478 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2479 .test = alg_test_null,
2480 .fips_allowed = 1,
2481 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002482 .alg = "cbc(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002483 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002484 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002485 .suite = {
2486 .cipher = {
2487 .enc = {
2488 .vecs = aes_cbc_enc_tv_template,
2489 .count = AES_CBC_ENC_TEST_VECTORS
2490 },
2491 .dec = {
2492 .vecs = aes_cbc_dec_tv_template,
2493 .count = AES_CBC_DEC_TEST_VECTORS
2494 }
2495 }
2496 }
2497 }, {
2498 .alg = "cbc(anubis)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002499 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002500 .suite = {
2501 .cipher = {
2502 .enc = {
2503 .vecs = anubis_cbc_enc_tv_template,
2504 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2505 },
2506 .dec = {
2507 .vecs = anubis_cbc_dec_tv_template,
2508 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2509 }
2510 }
2511 }
2512 }, {
2513 .alg = "cbc(blowfish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002514 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002515 .suite = {
2516 .cipher = {
2517 .enc = {
2518 .vecs = bf_cbc_enc_tv_template,
2519 .count = BF_CBC_ENC_TEST_VECTORS
2520 },
2521 .dec = {
2522 .vecs = bf_cbc_dec_tv_template,
2523 .count = BF_CBC_DEC_TEST_VECTORS
2524 }
2525 }
2526 }
2527 }, {
2528 .alg = "cbc(camellia)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002529 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002530 .suite = {
2531 .cipher = {
2532 .enc = {
2533 .vecs = camellia_cbc_enc_tv_template,
2534 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2535 },
2536 .dec = {
2537 .vecs = camellia_cbc_dec_tv_template,
2538 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2539 }
2540 }
2541 }
2542 }, {
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002543 .alg = "cbc(cast5)",
2544 .test = alg_test_skcipher,
2545 .suite = {
2546 .cipher = {
2547 .enc = {
2548 .vecs = cast5_cbc_enc_tv_template,
2549 .count = CAST5_CBC_ENC_TEST_VECTORS
2550 },
2551 .dec = {
2552 .vecs = cast5_cbc_dec_tv_template,
2553 .count = CAST5_CBC_DEC_TEST_VECTORS
2554 }
2555 }
2556 }
2557 }, {
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002558 .alg = "cbc(cast6)",
2559 .test = alg_test_skcipher,
2560 .suite = {
2561 .cipher = {
2562 .enc = {
2563 .vecs = cast6_cbc_enc_tv_template,
2564 .count = CAST6_CBC_ENC_TEST_VECTORS
2565 },
2566 .dec = {
2567 .vecs = cast6_cbc_dec_tv_template,
2568 .count = CAST6_CBC_DEC_TEST_VECTORS
2569 }
2570 }
2571 }
2572 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002573 .alg = "cbc(des)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002574 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002575 .suite = {
2576 .cipher = {
2577 .enc = {
2578 .vecs = des_cbc_enc_tv_template,
2579 .count = DES_CBC_ENC_TEST_VECTORS
2580 },
2581 .dec = {
2582 .vecs = des_cbc_dec_tv_template,
2583 .count = DES_CBC_DEC_TEST_VECTORS
2584 }
2585 }
2586 }
2587 }, {
2588 .alg = "cbc(des3_ede)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002589 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002590 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002591 .suite = {
2592 .cipher = {
2593 .enc = {
2594 .vecs = des3_ede_cbc_enc_tv_template,
2595 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2596 },
2597 .dec = {
2598 .vecs = des3_ede_cbc_dec_tv_template,
2599 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2600 }
2601 }
2602 }
2603 }, {
Jussi Kivilinna9d259172011-10-18 00:02:53 +03002604 .alg = "cbc(serpent)",
2605 .test = alg_test_skcipher,
2606 .suite = {
2607 .cipher = {
2608 .enc = {
2609 .vecs = serpent_cbc_enc_tv_template,
2610 .count = SERPENT_CBC_ENC_TEST_VECTORS
2611 },
2612 .dec = {
2613 .vecs = serpent_cbc_dec_tv_template,
2614 .count = SERPENT_CBC_DEC_TEST_VECTORS
2615 }
2616 }
2617 }
2618 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002619 .alg = "cbc(twofish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002620 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002621 .suite = {
2622 .cipher = {
2623 .enc = {
2624 .vecs = tf_cbc_enc_tv_template,
2625 .count = TF_CBC_ENC_TEST_VECTORS
2626 },
2627 .dec = {
2628 .vecs = tf_cbc_dec_tv_template,
2629 .count = TF_CBC_DEC_TEST_VECTORS
2630 }
2631 }
2632 }
2633 }, {
2634 .alg = "ccm(aes)",
2635 .test = alg_test_aead,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002636 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002637 .suite = {
2638 .aead = {
2639 .enc = {
2640 .vecs = aes_ccm_enc_tv_template,
2641 .count = AES_CCM_ENC_TEST_VECTORS
2642 },
2643 .dec = {
2644 .vecs = aes_ccm_dec_tv_template,
2645 .count = AES_CCM_DEC_TEST_VECTORS
2646 }
2647 }
2648 }
2649 }, {
Martin Willi3590ebf2015-06-01 13:43:57 +02002650 .alg = "chacha20",
2651 .test = alg_test_skcipher,
2652 .suite = {
2653 .cipher = {
2654 .enc = {
2655 .vecs = chacha20_enc_tv_template,
2656 .count = CHACHA20_ENC_TEST_VECTORS
2657 },
2658 .dec = {
2659 .vecs = chacha20_enc_tv_template,
2660 .count = CHACHA20_ENC_TEST_VECTORS
2661 },
2662 }
2663 }
2664 }, {
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03002665 .alg = "cmac(aes)",
Stephan Mueller8f183752015-08-19 08:42:07 +02002666 .fips_allowed = 1,
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03002667 .test = alg_test_hash,
2668 .suite = {
2669 .hash = {
2670 .vecs = aes_cmac128_tv_template,
2671 .count = CMAC_AES_TEST_VECTORS
2672 }
2673 }
2674 }, {
2675 .alg = "cmac(des3_ede)",
Stephan Mueller8f183752015-08-19 08:42:07 +02002676 .fips_allowed = 1,
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03002677 .test = alg_test_hash,
2678 .suite = {
2679 .hash = {
2680 .vecs = des3_ede_cmac64_tv_template,
2681 .count = CMAC_DES3_EDE_TEST_VECTORS
2682 }
2683 }
2684 }, {
Jussi Kivilinnae4483702013-04-07 16:43:56 +03002685 .alg = "compress_null",
2686 .test = alg_test_null,
2687 }, {
Ard Biesheuvelebb34722015-05-04 11:00:17 +02002688 .alg = "crc32",
2689 .test = alg_test_hash,
2690 .suite = {
2691 .hash = {
2692 .vecs = crc32_tv_template,
2693 .count = CRC32_TEST_VECTORS
2694 }
2695 }
2696 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002697 .alg = "crc32c",
Herbert Xu8e3ee852008-11-07 14:58:52 +08002698 .test = alg_test_crc32c,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002699 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002700 .suite = {
2701 .hash = {
2702 .vecs = crc32c_tv_template,
2703 .count = CRC32C_TEST_VECTORS
2704 }
2705 }
2706 }, {
Herbert Xu684115212013-09-07 12:56:26 +10002707 .alg = "crct10dif",
2708 .test = alg_test_hash,
2709 .fips_allowed = 1,
2710 .suite = {
2711 .hash = {
2712 .vecs = crct10dif_tv_template,
2713 .count = CRCT10DIF_TEST_VECTORS
2714 }
2715 }
2716 }, {
Jarod Wilsonf7cb80f2009-05-06 17:29:17 +08002717 .alg = "ctr(aes)",
2718 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002719 .fips_allowed = 1,
Jarod Wilsonf7cb80f2009-05-06 17:29:17 +08002720 .suite = {
2721 .cipher = {
2722 .enc = {
2723 .vecs = aes_ctr_enc_tv_template,
2724 .count = AES_CTR_ENC_TEST_VECTORS
2725 },
2726 .dec = {
2727 .vecs = aes_ctr_dec_tv_template,
2728 .count = AES_CTR_DEC_TEST_VECTORS
2729 }
2730 }
2731 }
2732 }, {
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03002733 .alg = "ctr(blowfish)",
2734 .test = alg_test_skcipher,
2735 .suite = {
2736 .cipher = {
2737 .enc = {
2738 .vecs = bf_ctr_enc_tv_template,
2739 .count = BF_CTR_ENC_TEST_VECTORS
2740 },
2741 .dec = {
2742 .vecs = bf_ctr_dec_tv_template,
2743 .count = BF_CTR_DEC_TEST_VECTORS
2744 }
2745 }
2746 }
2747 }, {
Jussi Kivilinna08406052012-03-05 20:26:21 +02002748 .alg = "ctr(camellia)",
2749 .test = alg_test_skcipher,
2750 .suite = {
2751 .cipher = {
2752 .enc = {
2753 .vecs = camellia_ctr_enc_tv_template,
2754 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2755 },
2756 .dec = {
2757 .vecs = camellia_ctr_dec_tv_template,
2758 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2759 }
2760 }
2761 }
2762 }, {
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002763 .alg = "ctr(cast5)",
2764 .test = alg_test_skcipher,
2765 .suite = {
2766 .cipher = {
2767 .enc = {
2768 .vecs = cast5_ctr_enc_tv_template,
2769 .count = CAST5_CTR_ENC_TEST_VECTORS
2770 },
2771 .dec = {
2772 .vecs = cast5_ctr_dec_tv_template,
2773 .count = CAST5_CTR_DEC_TEST_VECTORS
2774 }
2775 }
2776 }
2777 }, {
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002778 .alg = "ctr(cast6)",
2779 .test = alg_test_skcipher,
2780 .suite = {
2781 .cipher = {
2782 .enc = {
2783 .vecs = cast6_ctr_enc_tv_template,
2784 .count = CAST6_CTR_ENC_TEST_VECTORS
2785 },
2786 .dec = {
2787 .vecs = cast6_ctr_dec_tv_template,
2788 .count = CAST6_CTR_DEC_TEST_VECTORS
2789 }
2790 }
2791 }
2792 }, {
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03002793 .alg = "ctr(des)",
2794 .test = alg_test_skcipher,
2795 .suite = {
2796 .cipher = {
2797 .enc = {
2798 .vecs = des_ctr_enc_tv_template,
2799 .count = DES_CTR_ENC_TEST_VECTORS
2800 },
2801 .dec = {
2802 .vecs = des_ctr_dec_tv_template,
2803 .count = DES_CTR_DEC_TEST_VECTORS
2804 }
2805 }
2806 }
2807 }, {
Jussi Kivilinnae080b172012-10-20 14:53:12 +03002808 .alg = "ctr(des3_ede)",
2809 .test = alg_test_skcipher,
2810 .suite = {
2811 .cipher = {
2812 .enc = {
2813 .vecs = des3_ede_ctr_enc_tv_template,
2814 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2815 },
2816 .dec = {
2817 .vecs = des3_ede_ctr_dec_tv_template,
2818 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2819 }
2820 }
2821 }
2822 }, {
Jussi Kivilinna9d259172011-10-18 00:02:53 +03002823 .alg = "ctr(serpent)",
2824 .test = alg_test_skcipher,
2825 .suite = {
2826 .cipher = {
2827 .enc = {
2828 .vecs = serpent_ctr_enc_tv_template,
2829 .count = SERPENT_CTR_ENC_TEST_VECTORS
2830 },
2831 .dec = {
2832 .vecs = serpent_ctr_dec_tv_template,
2833 .count = SERPENT_CTR_DEC_TEST_VECTORS
2834 }
2835 }
2836 }
2837 }, {
Jussi Kivilinna573da622011-10-10 23:03:12 +03002838 .alg = "ctr(twofish)",
2839 .test = alg_test_skcipher,
2840 .suite = {
2841 .cipher = {
2842 .enc = {
2843 .vecs = tf_ctr_enc_tv_template,
2844 .count = TF_CTR_ENC_TEST_VECTORS
2845 },
2846 .dec = {
2847 .vecs = tf_ctr_dec_tv_template,
2848 .count = TF_CTR_DEC_TEST_VECTORS
2849 }
2850 }
2851 }
2852 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002853 .alg = "cts(cbc(aes))",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002854 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002855 .suite = {
2856 .cipher = {
2857 .enc = {
2858 .vecs = cts_mode_enc_tv_template,
2859 .count = CTS_MODE_ENC_TEST_VECTORS
2860 },
2861 .dec = {
2862 .vecs = cts_mode_dec_tv_template,
2863 .count = CTS_MODE_DEC_TEST_VECTORS
2864 }
2865 }
2866 }
2867 }, {
2868 .alg = "deflate",
2869 .test = alg_test_comp,
Milan Broz08189042012-12-06 17:16:28 +08002870 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002871 .suite = {
2872 .comp = {
2873 .comp = {
2874 .vecs = deflate_comp_tv_template,
2875 .count = DEFLATE_COMP_TEST_VECTORS
2876 },
2877 .decomp = {
2878 .vecs = deflate_decomp_tv_template,
2879 .count = DEFLATE_DECOMP_TEST_VECTORS
2880 }
2881 }
2882 }
2883 }, {
Salvatore Benedetto802c7f12016-06-22 17:49:14 +01002884 .alg = "dh",
2885 .test = alg_test_kpp,
2886 .fips_allowed = 1,
2887 .suite = {
2888 .kpp = {
2889 .vecs = dh_tv_template,
2890 .count = DH_TEST_VECTORS
2891 }
2892 }
2893 }, {
Jussi Kivilinnae4483702013-04-07 16:43:56 +03002894 .alg = "digest_null",
2895 .test = alg_test_null,
2896 }, {
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02002897 .alg = "drbg_nopr_ctr_aes128",
2898 .test = alg_test_drbg,
2899 .fips_allowed = 1,
2900 .suite = {
2901 .drbg = {
2902 .vecs = drbg_nopr_ctr_aes128_tv_template,
2903 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2904 }
2905 }
2906 }, {
2907 .alg = "drbg_nopr_ctr_aes192",
2908 .test = alg_test_drbg,
2909 .fips_allowed = 1,
2910 .suite = {
2911 .drbg = {
2912 .vecs = drbg_nopr_ctr_aes192_tv_template,
2913 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2914 }
2915 }
2916 }, {
2917 .alg = "drbg_nopr_ctr_aes256",
2918 .test = alg_test_drbg,
2919 .fips_allowed = 1,
2920 .suite = {
2921 .drbg = {
2922 .vecs = drbg_nopr_ctr_aes256_tv_template,
2923 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2924 }
2925 }
2926 }, {
2927 /*
2928 * There is no need to specifically test the DRBG with every
2929 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2930 */
2931 .alg = "drbg_nopr_hmac_sha1",
2932 .fips_allowed = 1,
2933 .test = alg_test_null,
2934 }, {
2935 .alg = "drbg_nopr_hmac_sha256",
2936 .test = alg_test_drbg,
2937 .fips_allowed = 1,
2938 .suite = {
2939 .drbg = {
2940 .vecs = drbg_nopr_hmac_sha256_tv_template,
2941 .count =
2942 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2943 }
2944 }
2945 }, {
2946 /* covered by drbg_nopr_hmac_sha256 test */
2947 .alg = "drbg_nopr_hmac_sha384",
2948 .fips_allowed = 1,
2949 .test = alg_test_null,
2950 }, {
2951 .alg = "drbg_nopr_hmac_sha512",
2952 .test = alg_test_null,
2953 .fips_allowed = 1,
2954 }, {
2955 .alg = "drbg_nopr_sha1",
2956 .fips_allowed = 1,
2957 .test = alg_test_null,
2958 }, {
2959 .alg = "drbg_nopr_sha256",
2960 .test = alg_test_drbg,
2961 .fips_allowed = 1,
2962 .suite = {
2963 .drbg = {
2964 .vecs = drbg_nopr_sha256_tv_template,
2965 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2966 }
2967 }
2968 }, {
2969 /* covered by drbg_nopr_sha256 test */
2970 .alg = "drbg_nopr_sha384",
2971 .fips_allowed = 1,
2972 .test = alg_test_null,
2973 }, {
2974 .alg = "drbg_nopr_sha512",
2975 .fips_allowed = 1,
2976 .test = alg_test_null,
2977 }, {
2978 .alg = "drbg_pr_ctr_aes128",
2979 .test = alg_test_drbg,
2980 .fips_allowed = 1,
2981 .suite = {
2982 .drbg = {
2983 .vecs = drbg_pr_ctr_aes128_tv_template,
2984 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2985 }
2986 }
2987 }, {
2988 /* covered by drbg_pr_ctr_aes128 test */
2989 .alg = "drbg_pr_ctr_aes192",
2990 .fips_allowed = 1,
2991 .test = alg_test_null,
2992 }, {
2993 .alg = "drbg_pr_ctr_aes256",
2994 .fips_allowed = 1,
2995 .test = alg_test_null,
2996 }, {
2997 .alg = "drbg_pr_hmac_sha1",
2998 .fips_allowed = 1,
2999 .test = alg_test_null,
3000 }, {
3001 .alg = "drbg_pr_hmac_sha256",
3002 .test = alg_test_drbg,
3003 .fips_allowed = 1,
3004 .suite = {
3005 .drbg = {
3006 .vecs = drbg_pr_hmac_sha256_tv_template,
3007 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
3008 }
3009 }
3010 }, {
3011 /* covered by drbg_pr_hmac_sha256 test */
3012 .alg = "drbg_pr_hmac_sha384",
3013 .fips_allowed = 1,
3014 .test = alg_test_null,
3015 }, {
3016 .alg = "drbg_pr_hmac_sha512",
3017 .test = alg_test_null,
3018 .fips_allowed = 1,
3019 }, {
3020 .alg = "drbg_pr_sha1",
3021 .fips_allowed = 1,
3022 .test = alg_test_null,
3023 }, {
3024 .alg = "drbg_pr_sha256",
3025 .test = alg_test_drbg,
3026 .fips_allowed = 1,
3027 .suite = {
3028 .drbg = {
3029 .vecs = drbg_pr_sha256_tv_template,
3030 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
3031 }
3032 }
3033 }, {
3034 /* covered by drbg_pr_sha256 test */
3035 .alg = "drbg_pr_sha384",
3036 .fips_allowed = 1,
3037 .test = alg_test_null,
3038 }, {
3039 .alg = "drbg_pr_sha512",
3040 .fips_allowed = 1,
3041 .test = alg_test_null,
3042 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003043 .alg = "ecb(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003044 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003045 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003046 .suite = {
3047 .cipher = {
3048 .enc = {
3049 .vecs = aes_enc_tv_template,
3050 .count = AES_ENC_TEST_VECTORS
3051 },
3052 .dec = {
3053 .vecs = aes_dec_tv_template,
3054 .count = AES_DEC_TEST_VECTORS
3055 }
3056 }
3057 }
3058 }, {
3059 .alg = "ecb(anubis)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003060 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003061 .suite = {
3062 .cipher = {
3063 .enc = {
3064 .vecs = anubis_enc_tv_template,
3065 .count = ANUBIS_ENC_TEST_VECTORS
3066 },
3067 .dec = {
3068 .vecs = anubis_dec_tv_template,
3069 .count = ANUBIS_DEC_TEST_VECTORS
3070 }
3071 }
3072 }
3073 }, {
3074 .alg = "ecb(arc4)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003075 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003076 .suite = {
3077 .cipher = {
3078 .enc = {
3079 .vecs = arc4_enc_tv_template,
3080 .count = ARC4_ENC_TEST_VECTORS
3081 },
3082 .dec = {
3083 .vecs = arc4_dec_tv_template,
3084 .count = ARC4_DEC_TEST_VECTORS
3085 }
3086 }
3087 }
3088 }, {
3089 .alg = "ecb(blowfish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003090 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003091 .suite = {
3092 .cipher = {
3093 .enc = {
3094 .vecs = bf_enc_tv_template,
3095 .count = BF_ENC_TEST_VECTORS
3096 },
3097 .dec = {
3098 .vecs = bf_dec_tv_template,
3099 .count = BF_DEC_TEST_VECTORS
3100 }
3101 }
3102 }
3103 }, {
3104 .alg = "ecb(camellia)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003105 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003106 .suite = {
3107 .cipher = {
3108 .enc = {
3109 .vecs = camellia_enc_tv_template,
3110 .count = CAMELLIA_ENC_TEST_VECTORS
3111 },
3112 .dec = {
3113 .vecs = camellia_dec_tv_template,
3114 .count = CAMELLIA_DEC_TEST_VECTORS
3115 }
3116 }
3117 }
3118 }, {
3119 .alg = "ecb(cast5)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003120 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003121 .suite = {
3122 .cipher = {
3123 .enc = {
3124 .vecs = cast5_enc_tv_template,
3125 .count = CAST5_ENC_TEST_VECTORS
3126 },
3127 .dec = {
3128 .vecs = cast5_dec_tv_template,
3129 .count = CAST5_DEC_TEST_VECTORS
3130 }
3131 }
3132 }
3133 }, {
3134 .alg = "ecb(cast6)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003135 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003136 .suite = {
3137 .cipher = {
3138 .enc = {
3139 .vecs = cast6_enc_tv_template,
3140 .count = CAST6_ENC_TEST_VECTORS
3141 },
3142 .dec = {
3143 .vecs = cast6_dec_tv_template,
3144 .count = CAST6_DEC_TEST_VECTORS
3145 }
3146 }
3147 }
3148 }, {
Jussi Kivilinnae4483702013-04-07 16:43:56 +03003149 .alg = "ecb(cipher_null)",
3150 .test = alg_test_null,
3151 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003152 .alg = "ecb(des)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003153 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003154 .suite = {
3155 .cipher = {
3156 .enc = {
3157 .vecs = des_enc_tv_template,
3158 .count = DES_ENC_TEST_VECTORS
3159 },
3160 .dec = {
3161 .vecs = des_dec_tv_template,
3162 .count = DES_DEC_TEST_VECTORS
3163 }
3164 }
3165 }
3166 }, {
3167 .alg = "ecb(des3_ede)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003168 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003169 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003170 .suite = {
3171 .cipher = {
3172 .enc = {
3173 .vecs = des3_ede_enc_tv_template,
3174 .count = DES3_EDE_ENC_TEST_VECTORS
3175 },
3176 .dec = {
3177 .vecs = des3_ede_dec_tv_template,
3178 .count = DES3_EDE_DEC_TEST_VECTORS
3179 }
3180 }
3181 }
3182 }, {
Jussi Kivilinna66e5bd02013-01-19 13:31:36 +02003183 .alg = "ecb(fcrypt)",
3184 .test = alg_test_skcipher,
3185 .suite = {
3186 .cipher = {
3187 .enc = {
3188 .vecs = fcrypt_pcbc_enc_tv_template,
3189 .count = 1
3190 },
3191 .dec = {
3192 .vecs = fcrypt_pcbc_dec_tv_template,
3193 .count = 1
3194 }
3195 }
3196 }
3197 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003198 .alg = "ecb(khazad)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003199 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003200 .suite = {
3201 .cipher = {
3202 .enc = {
3203 .vecs = khazad_enc_tv_template,
3204 .count = KHAZAD_ENC_TEST_VECTORS
3205 },
3206 .dec = {
3207 .vecs = khazad_dec_tv_template,
3208 .count = KHAZAD_DEC_TEST_VECTORS
3209 }
3210 }
3211 }
3212 }, {
3213 .alg = "ecb(seed)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003214 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003215 .suite = {
3216 .cipher = {
3217 .enc = {
3218 .vecs = seed_enc_tv_template,
3219 .count = SEED_ENC_TEST_VECTORS
3220 },
3221 .dec = {
3222 .vecs = seed_dec_tv_template,
3223 .count = SEED_DEC_TEST_VECTORS
3224 }
3225 }
3226 }
3227 }, {
3228 .alg = "ecb(serpent)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003229 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003230 .suite = {
3231 .cipher = {
3232 .enc = {
3233 .vecs = serpent_enc_tv_template,
3234 .count = SERPENT_ENC_TEST_VECTORS
3235 },
3236 .dec = {
3237 .vecs = serpent_dec_tv_template,
3238 .count = SERPENT_DEC_TEST_VECTORS
3239 }
3240 }
3241 }
3242 }, {
3243 .alg = "ecb(tea)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003244 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003245 .suite = {
3246 .cipher = {
3247 .enc = {
3248 .vecs = tea_enc_tv_template,
3249 .count = TEA_ENC_TEST_VECTORS
3250 },
3251 .dec = {
3252 .vecs = tea_dec_tv_template,
3253 .count = TEA_DEC_TEST_VECTORS
3254 }
3255 }
3256 }
3257 }, {
3258 .alg = "ecb(tnepres)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003259 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003260 .suite = {
3261 .cipher = {
3262 .enc = {
3263 .vecs = tnepres_enc_tv_template,
3264 .count = TNEPRES_ENC_TEST_VECTORS
3265 },
3266 .dec = {
3267 .vecs = tnepres_dec_tv_template,
3268 .count = TNEPRES_DEC_TEST_VECTORS
3269 }
3270 }
3271 }
3272 }, {
3273 .alg = "ecb(twofish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003274 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003275 .suite = {
3276 .cipher = {
3277 .enc = {
3278 .vecs = tf_enc_tv_template,
3279 .count = TF_ENC_TEST_VECTORS
3280 },
3281 .dec = {
3282 .vecs = tf_dec_tv_template,
3283 .count = TF_DEC_TEST_VECTORS
3284 }
3285 }
3286 }
3287 }, {
3288 .alg = "ecb(xeta)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003289 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003290 .suite = {
3291 .cipher = {
3292 .enc = {
3293 .vecs = xeta_enc_tv_template,
3294 .count = XETA_ENC_TEST_VECTORS
3295 },
3296 .dec = {
3297 .vecs = xeta_dec_tv_template,
3298 .count = XETA_DEC_TEST_VECTORS
3299 }
3300 }
3301 }
3302 }, {
3303 .alg = "ecb(xtea)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003304 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003305 .suite = {
3306 .cipher = {
3307 .enc = {
3308 .vecs = xtea_enc_tv_template,
3309 .count = XTEA_ENC_TEST_VECTORS
3310 },
3311 .dec = {
3312 .vecs = xtea_dec_tv_template,
3313 .count = XTEA_DEC_TEST_VECTORS
3314 }
3315 }
3316 }
3317 }, {
Salvatore Benedetto3c4b2392016-06-22 17:49:15 +01003318 .alg = "ecdh",
3319 .test = alg_test_kpp,
3320 .fips_allowed = 1,
3321 .suite = {
3322 .kpp = {
3323 .vecs = ecdh_tv_template,
3324 .count = ECDH_TEST_VECTORS
3325 }
3326 }
3327 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003328 .alg = "gcm(aes)",
3329 .test = alg_test_aead,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003330 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003331 .suite = {
3332 .aead = {
3333 .enc = {
3334 .vecs = aes_gcm_enc_tv_template,
3335 .count = AES_GCM_ENC_TEST_VECTORS
3336 },
3337 .dec = {
3338 .vecs = aes_gcm_dec_tv_template,
3339 .count = AES_GCM_DEC_TEST_VECTORS
3340 }
3341 }
3342 }
3343 }, {
Youquan, Song507069c2009-11-23 20:23:04 +08003344 .alg = "ghash",
3345 .test = alg_test_hash,
Jarod Wilson18c0ebd2011-01-29 15:14:35 +11003346 .fips_allowed = 1,
Youquan, Song507069c2009-11-23 20:23:04 +08003347 .suite = {
3348 .hash = {
3349 .vecs = ghash_tv_template,
3350 .count = GHASH_TEST_VECTORS
3351 }
3352 }
3353 }, {
Sonic Zhanga482b082012-05-25 17:54:13 +08003354 .alg = "hmac(crc32)",
3355 .test = alg_test_hash,
3356 .suite = {
3357 .hash = {
3358 .vecs = bfin_crc_tv_template,
3359 .count = BFIN_CRC_TEST_VECTORS
3360 }
3361 }
3362 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003363 .alg = "hmac(md5)",
3364 .test = alg_test_hash,
3365 .suite = {
3366 .hash = {
3367 .vecs = hmac_md5_tv_template,
3368 .count = HMAC_MD5_TEST_VECTORS
3369 }
3370 }
3371 }, {
3372 .alg = "hmac(rmd128)",
3373 .test = alg_test_hash,
3374 .suite = {
3375 .hash = {
3376 .vecs = hmac_rmd128_tv_template,
3377 .count = HMAC_RMD128_TEST_VECTORS
3378 }
3379 }
3380 }, {
3381 .alg = "hmac(rmd160)",
3382 .test = alg_test_hash,
3383 .suite = {
3384 .hash = {
3385 .vecs = hmac_rmd160_tv_template,
3386 .count = HMAC_RMD160_TEST_VECTORS
3387 }
3388 }
3389 }, {
3390 .alg = "hmac(sha1)",
3391 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003392 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003393 .suite = {
3394 .hash = {
3395 .vecs = hmac_sha1_tv_template,
3396 .count = HMAC_SHA1_TEST_VECTORS
3397 }
3398 }
3399 }, {
3400 .alg = "hmac(sha224)",
3401 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003402 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003403 .suite = {
3404 .hash = {
3405 .vecs = hmac_sha224_tv_template,
3406 .count = HMAC_SHA224_TEST_VECTORS
3407 }
3408 }
3409 }, {
3410 .alg = "hmac(sha256)",
3411 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003412 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003413 .suite = {
3414 .hash = {
3415 .vecs = hmac_sha256_tv_template,
3416 .count = HMAC_SHA256_TEST_VECTORS
3417 }
3418 }
3419 }, {
raveendra padasalagi98eca722016-07-01 11:16:54 +05303420 .alg = "hmac(sha3-224)",
3421 .test = alg_test_hash,
3422 .fips_allowed = 1,
3423 .suite = {
3424 .hash = {
3425 .vecs = hmac_sha3_224_tv_template,
3426 .count = HMAC_SHA3_224_TEST_VECTORS
3427 }
3428 }
3429 }, {
3430 .alg = "hmac(sha3-256)",
3431 .test = alg_test_hash,
3432 .fips_allowed = 1,
3433 .suite = {
3434 .hash = {
3435 .vecs = hmac_sha3_256_tv_template,
3436 .count = HMAC_SHA3_256_TEST_VECTORS
3437 }
3438 }
3439 }, {
3440 .alg = "hmac(sha3-384)",
3441 .test = alg_test_hash,
3442 .fips_allowed = 1,
3443 .suite = {
3444 .hash = {
3445 .vecs = hmac_sha3_384_tv_template,
3446 .count = HMAC_SHA3_384_TEST_VECTORS
3447 }
3448 }
3449 }, {
3450 .alg = "hmac(sha3-512)",
3451 .test = alg_test_hash,
3452 .fips_allowed = 1,
3453 .suite = {
3454 .hash = {
3455 .vecs = hmac_sha3_512_tv_template,
3456 .count = HMAC_SHA3_512_TEST_VECTORS
3457 }
3458 }
3459 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003460 .alg = "hmac(sha384)",
3461 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003462 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003463 .suite = {
3464 .hash = {
3465 .vecs = hmac_sha384_tv_template,
3466 .count = HMAC_SHA384_TEST_VECTORS
3467 }
3468 }
3469 }, {
3470 .alg = "hmac(sha512)",
3471 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003472 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003473 .suite = {
3474 .hash = {
3475 .vecs = hmac_sha512_tv_template,
3476 .count = HMAC_SHA512_TEST_VECTORS
3477 }
3478 }
3479 }, {
Stephan Muellerbb5530e2015-05-25 15:10:20 +02003480 .alg = "jitterentropy_rng",
3481 .fips_allowed = 1,
3482 .test = alg_test_null,
3483 }, {
Stephan Mueller35351982015-09-21 20:59:56 +02003484 .alg = "kw(aes)",
3485 .test = alg_test_skcipher,
3486 .fips_allowed = 1,
3487 .suite = {
3488 .cipher = {
3489 .enc = {
3490 .vecs = aes_kw_enc_tv_template,
3491 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3492 },
3493 .dec = {
3494 .vecs = aes_kw_dec_tv_template,
3495 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3496 }
3497 }
3498 }
3499 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003500 .alg = "lrw(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003501 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003502 .suite = {
3503 .cipher = {
3504 .enc = {
3505 .vecs = aes_lrw_enc_tv_template,
3506 .count = AES_LRW_ENC_TEST_VECTORS
3507 },
3508 .dec = {
3509 .vecs = aes_lrw_dec_tv_template,
3510 .count = AES_LRW_DEC_TEST_VECTORS
3511 }
3512 }
3513 }
3514 }, {
Jussi Kivilinna08406052012-03-05 20:26:21 +02003515 .alg = "lrw(camellia)",
3516 .test = alg_test_skcipher,
3517 .suite = {
3518 .cipher = {
3519 .enc = {
3520 .vecs = camellia_lrw_enc_tv_template,
3521 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3522 },
3523 .dec = {
3524 .vecs = camellia_lrw_dec_tv_template,
3525 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3526 }
3527 }
3528 }
3529 }, {
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02003530 .alg = "lrw(cast6)",
3531 .test = alg_test_skcipher,
3532 .suite = {
3533 .cipher = {
3534 .enc = {
3535 .vecs = cast6_lrw_enc_tv_template,
3536 .count = CAST6_LRW_ENC_TEST_VECTORS
3537 },
3538 .dec = {
3539 .vecs = cast6_lrw_dec_tv_template,
3540 .count = CAST6_LRW_DEC_TEST_VECTORS
3541 }
3542 }
3543 }
3544 }, {
Jussi Kivilinnad7bfc0f2011-10-18 13:32:34 +03003545 .alg = "lrw(serpent)",
3546 .test = alg_test_skcipher,
3547 .suite = {
3548 .cipher = {
3549 .enc = {
3550 .vecs = serpent_lrw_enc_tv_template,
3551 .count = SERPENT_LRW_ENC_TEST_VECTORS
3552 },
3553 .dec = {
3554 .vecs = serpent_lrw_dec_tv_template,
3555 .count = SERPENT_LRW_DEC_TEST_VECTORS
3556 }
3557 }
3558 }
3559 }, {
Jussi Kivilinna0b2a1552011-10-18 13:32:50 +03003560 .alg = "lrw(twofish)",
3561 .test = alg_test_skcipher,
3562 .suite = {
3563 .cipher = {
3564 .enc = {
3565 .vecs = tf_lrw_enc_tv_template,
3566 .count = TF_LRW_ENC_TEST_VECTORS
3567 },
3568 .dec = {
3569 .vecs = tf_lrw_dec_tv_template,
3570 .count = TF_LRW_DEC_TEST_VECTORS
3571 }
3572 }
3573 }
3574 }, {
KOVACS Krisztian1443cc92014-08-22 10:44:36 +02003575 .alg = "lz4",
3576 .test = alg_test_comp,
3577 .fips_allowed = 1,
3578 .suite = {
3579 .comp = {
3580 .comp = {
3581 .vecs = lz4_comp_tv_template,
3582 .count = LZ4_COMP_TEST_VECTORS
3583 },
3584 .decomp = {
3585 .vecs = lz4_decomp_tv_template,
3586 .count = LZ4_DECOMP_TEST_VECTORS
3587 }
3588 }
3589 }
3590 }, {
3591 .alg = "lz4hc",
3592 .test = alg_test_comp,
3593 .fips_allowed = 1,
3594 .suite = {
3595 .comp = {
3596 .comp = {
3597 .vecs = lz4hc_comp_tv_template,
3598 .count = LZ4HC_COMP_TEST_VECTORS
3599 },
3600 .decomp = {
3601 .vecs = lz4hc_decomp_tv_template,
3602 .count = LZ4HC_DECOMP_TEST_VECTORS
3603 }
3604 }
3605 }
3606 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003607 .alg = "lzo",
3608 .test = alg_test_comp,
Milan Broz08189042012-12-06 17:16:28 +08003609 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003610 .suite = {
3611 .comp = {
3612 .comp = {
3613 .vecs = lzo_comp_tv_template,
3614 .count = LZO_COMP_TEST_VECTORS
3615 },
3616 .decomp = {
3617 .vecs = lzo_decomp_tv_template,
3618 .count = LZO_DECOMP_TEST_VECTORS
3619 }
3620 }
3621 }
3622 }, {
3623 .alg = "md4",
3624 .test = alg_test_hash,
3625 .suite = {
3626 .hash = {
3627 .vecs = md4_tv_template,
3628 .count = MD4_TEST_VECTORS
3629 }
3630 }
3631 }, {
3632 .alg = "md5",
3633 .test = alg_test_hash,
3634 .suite = {
3635 .hash = {
3636 .vecs = md5_tv_template,
3637 .count = MD5_TEST_VECTORS
3638 }
3639 }
3640 }, {
3641 .alg = "michael_mic",
3642 .test = alg_test_hash,
3643 .suite = {
3644 .hash = {
3645 .vecs = michael_mic_tv_template,
3646 .count = MICHAEL_MIC_TEST_VECTORS
3647 }
3648 }
3649 }, {
Puneet Saxenaba0e14a2011-05-04 15:04:10 +10003650 .alg = "ofb(aes)",
3651 .test = alg_test_skcipher,
3652 .fips_allowed = 1,
3653 .suite = {
3654 .cipher = {
3655 .enc = {
3656 .vecs = aes_ofb_enc_tv_template,
3657 .count = AES_OFB_ENC_TEST_VECTORS
3658 },
3659 .dec = {
3660 .vecs = aes_ofb_dec_tv_template,
3661 .count = AES_OFB_DEC_TEST_VECTORS
3662 }
3663 }
3664 }
3665 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003666 .alg = "pcbc(fcrypt)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003667 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003668 .suite = {
3669 .cipher = {
3670 .enc = {
3671 .vecs = fcrypt_pcbc_enc_tv_template,
3672 .count = FCRYPT_ENC_TEST_VECTORS
3673 },
3674 .dec = {
3675 .vecs = fcrypt_pcbc_dec_tv_template,
3676 .count = FCRYPT_DEC_TEST_VECTORS
3677 }
3678 }
3679 }
3680 }, {
Martin Willieee9dc62015-06-01 13:43:59 +02003681 .alg = "poly1305",
3682 .test = alg_test_hash,
3683 .suite = {
3684 .hash = {
3685 .vecs = poly1305_tv_template,
3686 .count = POLY1305_TEST_VECTORS
3687 }
3688 }
3689 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003690 .alg = "rfc3686(ctr(aes))",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003691 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003692 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003693 .suite = {
3694 .cipher = {
3695 .enc = {
Jarod Wilsonf7cb80f2009-05-06 17:29:17 +08003696 .vecs = aes_ctr_rfc3686_enc_tv_template,
3697 .count = AES_CTR_3686_ENC_TEST_VECTORS
Herbert Xuda7f0332008-07-31 17:08:25 +08003698 },
3699 .dec = {
Jarod Wilsonf7cb80f2009-05-06 17:29:17 +08003700 .vecs = aes_ctr_rfc3686_dec_tv_template,
3701 .count = AES_CTR_3686_DEC_TEST_VECTORS
Herbert Xuda7f0332008-07-31 17:08:25 +08003702 }
3703 }
3704 }
3705 }, {
Herbert Xu3f31a742015-07-09 07:17:34 +08003706 .alg = "rfc4106(gcm(aes))",
Adrian Hoban69435b92010-11-04 15:02:04 -04003707 .test = alg_test_aead,
Jarod Wilsondb71f29a2015-01-23 12:42:15 -05003708 .fips_allowed = 1,
Adrian Hoban69435b92010-11-04 15:02:04 -04003709 .suite = {
3710 .aead = {
3711 .enc = {
3712 .vecs = aes_gcm_rfc4106_enc_tv_template,
3713 .count = AES_GCM_4106_ENC_TEST_VECTORS
3714 },
3715 .dec = {
3716 .vecs = aes_gcm_rfc4106_dec_tv_template,
3717 .count = AES_GCM_4106_DEC_TEST_VECTORS
3718 }
3719 }
3720 }
3721 }, {
Herbert Xu544c4362015-07-14 16:53:22 +08003722 .alg = "rfc4309(ccm(aes))",
Jarod Wilson5d667322009-05-04 19:23:40 +08003723 .test = alg_test_aead,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003724 .fips_allowed = 1,
Jarod Wilson5d667322009-05-04 19:23:40 +08003725 .suite = {
3726 .aead = {
3727 .enc = {
3728 .vecs = aes_ccm_rfc4309_enc_tv_template,
3729 .count = AES_CCM_4309_ENC_TEST_VECTORS
3730 },
3731 .dec = {
3732 .vecs = aes_ccm_rfc4309_dec_tv_template,
3733 .count = AES_CCM_4309_DEC_TEST_VECTORS
3734 }
3735 }
3736 }
3737 }, {
Herbert Xubb687452015-06-16 13:54:24 +08003738 .alg = "rfc4543(gcm(aes))",
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03003739 .test = alg_test_aead,
3740 .suite = {
3741 .aead = {
3742 .enc = {
3743 .vecs = aes_gcm_rfc4543_enc_tv_template,
3744 .count = AES_GCM_4543_ENC_TEST_VECTORS
3745 },
3746 .dec = {
3747 .vecs = aes_gcm_rfc4543_dec_tv_template,
3748 .count = AES_GCM_4543_DEC_TEST_VECTORS
3749 },
3750 }
3751 }
3752 }, {
Martin Williaf2b76b2015-06-01 13:44:01 +02003753 .alg = "rfc7539(chacha20,poly1305)",
3754 .test = alg_test_aead,
3755 .suite = {
3756 .aead = {
3757 .enc = {
3758 .vecs = rfc7539_enc_tv_template,
3759 .count = RFC7539_ENC_TEST_VECTORS
3760 },
3761 .dec = {
3762 .vecs = rfc7539_dec_tv_template,
3763 .count = RFC7539_DEC_TEST_VECTORS
3764 },
3765 }
3766 }
3767 }, {
Martin Willi59007582015-06-01 13:44:03 +02003768 .alg = "rfc7539esp(chacha20,poly1305)",
3769 .test = alg_test_aead,
3770 .suite = {
3771 .aead = {
3772 .enc = {
3773 .vecs = rfc7539esp_enc_tv_template,
3774 .count = RFC7539ESP_ENC_TEST_VECTORS
3775 },
3776 .dec = {
3777 .vecs = rfc7539esp_dec_tv_template,
3778 .count = RFC7539ESP_DEC_TEST_VECTORS
3779 },
3780 }
3781 }
3782 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003783 .alg = "rmd128",
3784 .test = alg_test_hash,
3785 .suite = {
3786 .hash = {
3787 .vecs = rmd128_tv_template,
3788 .count = RMD128_TEST_VECTORS
3789 }
3790 }
3791 }, {
3792 .alg = "rmd160",
3793 .test = alg_test_hash,
3794 .suite = {
3795 .hash = {
3796 .vecs = rmd160_tv_template,
3797 .count = RMD160_TEST_VECTORS
3798 }
3799 }
3800 }, {
3801 .alg = "rmd256",
3802 .test = alg_test_hash,
3803 .suite = {
3804 .hash = {
3805 .vecs = rmd256_tv_template,
3806 .count = RMD256_TEST_VECTORS
3807 }
3808 }
3809 }, {
3810 .alg = "rmd320",
3811 .test = alg_test_hash,
3812 .suite = {
3813 .hash = {
3814 .vecs = rmd320_tv_template,
3815 .count = RMD320_TEST_VECTORS
3816 }
3817 }
3818 }, {
Tadeusz Struk946cc462015-06-16 10:31:06 -07003819 .alg = "rsa",
3820 .test = alg_test_akcipher,
3821 .fips_allowed = 1,
3822 .suite = {
3823 .akcipher = {
3824 .vecs = rsa_tv_template,
3825 .count = RSA_TEST_VECTORS
3826 }
3827 }
3828 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003829 .alg = "salsa20",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003830 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003831 .suite = {
3832 .cipher = {
3833 .enc = {
3834 .vecs = salsa20_stream_enc_tv_template,
3835 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3836 }
3837 }
3838 }
3839 }, {
3840 .alg = "sha1",
3841 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003842 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003843 .suite = {
3844 .hash = {
3845 .vecs = sha1_tv_template,
3846 .count = SHA1_TEST_VECTORS
3847 }
3848 }
3849 }, {
3850 .alg = "sha224",
3851 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003852 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003853 .suite = {
3854 .hash = {
3855 .vecs = sha224_tv_template,
3856 .count = SHA224_TEST_VECTORS
3857 }
3858 }
3859 }, {
3860 .alg = "sha256",
3861 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003862 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003863 .suite = {
3864 .hash = {
3865 .vecs = sha256_tv_template,
3866 .count = SHA256_TEST_VECTORS
3867 }
3868 }
3869 }, {
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05303870 .alg = "sha3-224",
3871 .test = alg_test_hash,
3872 .fips_allowed = 1,
3873 .suite = {
3874 .hash = {
3875 .vecs = sha3_224_tv_template,
3876 .count = SHA3_224_TEST_VECTORS
3877 }
3878 }
3879 }, {
3880 .alg = "sha3-256",
3881 .test = alg_test_hash,
3882 .fips_allowed = 1,
3883 .suite = {
3884 .hash = {
3885 .vecs = sha3_256_tv_template,
3886 .count = SHA3_256_TEST_VECTORS
3887 }
3888 }
3889 }, {
3890 .alg = "sha3-384",
3891 .test = alg_test_hash,
3892 .fips_allowed = 1,
3893 .suite = {
3894 .hash = {
3895 .vecs = sha3_384_tv_template,
3896 .count = SHA3_384_TEST_VECTORS
3897 }
3898 }
3899 }, {
3900 .alg = "sha3-512",
3901 .test = alg_test_hash,
3902 .fips_allowed = 1,
3903 .suite = {
3904 .hash = {
3905 .vecs = sha3_512_tv_template,
3906 .count = SHA3_512_TEST_VECTORS
3907 }
3908 }
3909 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003910 .alg = "sha384",
3911 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003912 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003913 .suite = {
3914 .hash = {
3915 .vecs = sha384_tv_template,
3916 .count = SHA384_TEST_VECTORS
3917 }
3918 }
3919 }, {
3920 .alg = "sha512",
3921 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003922 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003923 .suite = {
3924 .hash = {
3925 .vecs = sha512_tv_template,
3926 .count = SHA512_TEST_VECTORS
3927 }
3928 }
3929 }, {
3930 .alg = "tgr128",
3931 .test = alg_test_hash,
3932 .suite = {
3933 .hash = {
3934 .vecs = tgr128_tv_template,
3935 .count = TGR128_TEST_VECTORS
3936 }
3937 }
3938 }, {
3939 .alg = "tgr160",
3940 .test = alg_test_hash,
3941 .suite = {
3942 .hash = {
3943 .vecs = tgr160_tv_template,
3944 .count = TGR160_TEST_VECTORS
3945 }
3946 }
3947 }, {
3948 .alg = "tgr192",
3949 .test = alg_test_hash,
3950 .suite = {
3951 .hash = {
3952 .vecs = tgr192_tv_template,
3953 .count = TGR192_TEST_VECTORS
3954 }
3955 }
3956 }, {
Shane Wangf1939f72009-09-02 20:05:22 +10003957 .alg = "vmac(aes)",
3958 .test = alg_test_hash,
3959 .suite = {
3960 .hash = {
3961 .vecs = aes_vmac128_tv_template,
3962 .count = VMAC_AES_TEST_VECTORS
3963 }
3964 }
3965 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003966 .alg = "wp256",
3967 .test = alg_test_hash,
3968 .suite = {
3969 .hash = {
3970 .vecs = wp256_tv_template,
3971 .count = WP256_TEST_VECTORS
3972 }
3973 }
3974 }, {
3975 .alg = "wp384",
3976 .test = alg_test_hash,
3977 .suite = {
3978 .hash = {
3979 .vecs = wp384_tv_template,
3980 .count = WP384_TEST_VECTORS
3981 }
3982 }
3983 }, {
3984 .alg = "wp512",
3985 .test = alg_test_hash,
3986 .suite = {
3987 .hash = {
3988 .vecs = wp512_tv_template,
3989 .count = WP512_TEST_VECTORS
3990 }
3991 }
3992 }, {
3993 .alg = "xcbc(aes)",
3994 .test = alg_test_hash,
3995 .suite = {
3996 .hash = {
3997 .vecs = aes_xcbc128_tv_template,
3998 .count = XCBC_AES_TEST_VECTORS
3999 }
4000 }
4001 }, {
4002 .alg = "xts(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004003 .test = alg_test_skcipher,
Jarod Wilson2918aa82011-01-29 15:14:01 +11004004 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08004005 .suite = {
4006 .cipher = {
4007 .enc = {
4008 .vecs = aes_xts_enc_tv_template,
4009 .count = AES_XTS_ENC_TEST_VECTORS
4010 },
4011 .dec = {
4012 .vecs = aes_xts_dec_tv_template,
4013 .count = AES_XTS_DEC_TEST_VECTORS
4014 }
4015 }
4016 }
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08004017 }, {
Jussi Kivilinna08406052012-03-05 20:26:21 +02004018 .alg = "xts(camellia)",
4019 .test = alg_test_skcipher,
4020 .suite = {
4021 .cipher = {
4022 .enc = {
4023 .vecs = camellia_xts_enc_tv_template,
4024 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
4025 },
4026 .dec = {
4027 .vecs = camellia_xts_dec_tv_template,
4028 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
4029 }
4030 }
4031 }
4032 }, {
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02004033 .alg = "xts(cast6)",
4034 .test = alg_test_skcipher,
4035 .suite = {
4036 .cipher = {
4037 .enc = {
4038 .vecs = cast6_xts_enc_tv_template,
4039 .count = CAST6_XTS_ENC_TEST_VECTORS
4040 },
4041 .dec = {
4042 .vecs = cast6_xts_dec_tv_template,
4043 .count = CAST6_XTS_DEC_TEST_VECTORS
4044 }
4045 }
4046 }
4047 }, {
Jussi Kivilinna18be20b92011-10-18 13:33:17 +03004048 .alg = "xts(serpent)",
4049 .test = alg_test_skcipher,
4050 .suite = {
4051 .cipher = {
4052 .enc = {
4053 .vecs = serpent_xts_enc_tv_template,
4054 .count = SERPENT_XTS_ENC_TEST_VECTORS
4055 },
4056 .dec = {
4057 .vecs = serpent_xts_dec_tv_template,
4058 .count = SERPENT_XTS_DEC_TEST_VECTORS
4059 }
4060 }
4061 }
4062 }, {
Jussi Kivilinnaaed265b2011-10-18 13:33:33 +03004063 .alg = "xts(twofish)",
4064 .test = alg_test_skcipher,
4065 .suite = {
4066 .cipher = {
4067 .enc = {
4068 .vecs = tf_xts_enc_tv_template,
4069 .count = TF_XTS_ENC_TEST_VECTORS
4070 },
4071 .dec = {
4072 .vecs = tf_xts_dec_tv_template,
4073 .count = TF_XTS_DEC_TEST_VECTORS
4074 }
4075 }
4076 }
Herbert Xuda7f0332008-07-31 17:08:25 +08004077 }
4078};
4079
Jussi Kivilinna57147582013-06-13 17:37:40 +03004080static bool alg_test_descs_checked;
4081
4082static void alg_test_descs_check_order(void)
4083{
4084 int i;
4085
4086 /* only check once */
4087 if (alg_test_descs_checked)
4088 return;
4089
4090 alg_test_descs_checked = true;
4091
4092 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
4093 int diff = strcmp(alg_test_descs[i - 1].alg,
4094 alg_test_descs[i].alg);
4095
4096 if (WARN_ON(diff > 0)) {
4097 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
4098 alg_test_descs[i - 1].alg,
4099 alg_test_descs[i].alg);
4100 }
4101
4102 if (WARN_ON(diff == 0)) {
4103 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
4104 alg_test_descs[i].alg);
4105 }
4106 }
4107}
4108
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004109static int alg_find_test(const char *alg)
Herbert Xuda7f0332008-07-31 17:08:25 +08004110{
4111 int start = 0;
4112 int end = ARRAY_SIZE(alg_test_descs);
4113
4114 while (start < end) {
4115 int i = (start + end) / 2;
4116 int diff = strcmp(alg_test_descs[i].alg, alg);
4117
4118 if (diff > 0) {
4119 end = i;
4120 continue;
4121 }
4122
4123 if (diff < 0) {
4124 start = i + 1;
4125 continue;
4126 }
4127
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004128 return i;
Herbert Xuda7f0332008-07-31 17:08:25 +08004129 }
4130
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004131 return -1;
4132}
4133
4134int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
4135{
4136 int i;
Herbert Xua68f6612009-07-02 16:32:12 +08004137 int j;
Neil Hormand12d6b62008-10-12 20:36:51 +08004138 int rc;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004139
Richard W.M. Jones9e5c9fe2016-05-03 10:00:17 +01004140 if (!fips_enabled && notests) {
4141 printk_once(KERN_INFO "alg: self-tests disabled\n");
4142 return 0;
4143 }
4144
Jussi Kivilinna57147582013-06-13 17:37:40 +03004145 alg_test_descs_check_order();
4146
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004147 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
4148 char nalg[CRYPTO_MAX_ALG_NAME];
4149
4150 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
4151 sizeof(nalg))
4152 return -ENAMETOOLONG;
4153
4154 i = alg_find_test(nalg);
4155 if (i < 0)
4156 goto notest;
4157
Jarod Wilsona3bef3a2009-05-15 15:17:05 +10004158 if (fips_enabled && !alg_test_descs[i].fips_allowed)
4159 goto non_fips_alg;
4160
Jarod Wilson941fb322009-05-04 19:49:23 +08004161 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
4162 goto test_done;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004163 }
4164
4165 i = alg_find_test(alg);
Herbert Xua68f6612009-07-02 16:32:12 +08004166 j = alg_find_test(driver);
4167 if (i < 0 && j < 0)
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004168 goto notest;
4169
Herbert Xua68f6612009-07-02 16:32:12 +08004170 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
4171 (j >= 0 && !alg_test_descs[j].fips_allowed)))
Jarod Wilsona3bef3a2009-05-15 15:17:05 +10004172 goto non_fips_alg;
4173
Herbert Xua68f6612009-07-02 16:32:12 +08004174 rc = 0;
4175 if (i >= 0)
4176 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
4177 type, mask);
Cristian Stoica032c8ca2013-07-18 18:57:07 +03004178 if (j >= 0 && j != i)
Herbert Xua68f6612009-07-02 16:32:12 +08004179 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
4180 type, mask);
4181
Jarod Wilson941fb322009-05-04 19:49:23 +08004182test_done:
Neil Hormand12d6b62008-10-12 20:36:51 +08004183 if (fips_enabled && rc)
4184 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
4185
Jarod Wilson29ecd4a2009-05-04 19:51:17 +08004186 if (fips_enabled && !rc)
Masanari Iida3e8cffd2014-10-07 00:37:54 +09004187 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
Jarod Wilson29ecd4a2009-05-04 19:51:17 +08004188
Neil Hormand12d6b62008-10-12 20:36:51 +08004189 return rc;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004190
4191notest:
Herbert Xuda7f0332008-07-31 17:08:25 +08004192 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4193 return 0;
Jarod Wilsona3bef3a2009-05-15 15:17:05 +10004194non_fips_alg:
4195 return -EINVAL;
Herbert Xuda7f0332008-07-31 17:08:25 +08004196}
Alexander Shishkin0b767f92010-06-03 20:53:43 +10004197
Herbert Xu326a6342010-08-06 09:40:28 +08004198#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
Alexander Shishkin0b767f92010-06-03 20:53:43 +10004199
Herbert Xuda7f0332008-07-31 17:08:25 +08004200EXPORT_SYMBOL_GPL(alg_test);