blob: 881176ebd8a8a096e48256f5a251431f4b84a9eb [file] [log] [blame]
Herbert Xuda7f0332008-07-31 17:08:25 +08001/*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
Adrian Hoban69435b92010-11-04 15:02:04 -04009 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
Herbert Xuda7f0332008-07-31 17:08:25 +080016 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
Herbert Xu1ce33112015-04-22 15:06:31 +080023#include <crypto/aead.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080024#include <crypto/hash.h>
Herbert Xu12773d92015-08-20 15:21:46 +080025#include <crypto/skcipher.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080026#include <linux/err.h>
Herbert Xu1c41b882015-04-22 13:25:58 +080027#include <linux/fips.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080028#include <linux/module.h>
29#include <linux/scatterlist.h>
30#include <linux/slab.h>
31#include <linux/string.h>
Jarod Wilson7647d6c2009-05-04 19:44:50 +080032#include <crypto/rng.h>
Stephan Mueller64d1cdf2014-05-31 17:25:36 +020033#include <crypto/drbg.h>
Tadeusz Struk946cc462015-06-16 10:31:06 -070034#include <crypto/akcipher.h>
Salvatore Benedetto802c7f12016-06-22 17:49:14 +010035#include <crypto/kpp.h>
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +010036#include <crypto/acompress.h>
Herbert Xuda7f0332008-07-31 17:08:25 +080037
38#include "internal.h"
Alexander Shishkin0b767f92010-06-03 20:53:43 +100039
Richard W.M. Jones9e5c9fe2016-05-03 10:00:17 +010040static bool notests;
41module_param(notests, bool, 0644);
42MODULE_PARM_DESC(notests, "disable crypto self-tests");
43
Herbert Xu326a6342010-08-06 09:40:28 +080044#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
Alexander Shishkin0b767f92010-06-03 20:53:43 +100045
46/* a perfect nop */
47int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
48{
49 return 0;
50}
51
52#else
53
Herbert Xuda7f0332008-07-31 17:08:25 +080054#include "testmgr.h"
55
56/*
57 * Need slab memory for testing (size in number of pages).
58 */
59#define XBUFSIZE 8
60
61/*
62 * Indexes into the xbuf to simulate cross-page access.
63 */
64#define IDX1 32
65#define IDX2 32400
Ard Biesheuvel04b46fb2016-12-08 08:23:52 +000066#define IDX3 1511
Herbert Xuda7f0332008-07-31 17:08:25 +080067#define IDX4 8193
68#define IDX5 22222
69#define IDX6 17101
70#define IDX7 27333
71#define IDX8 3000
72
73/*
74* Used by test_cipher()
75*/
76#define ENCRYPT 1
77#define DECRYPT 0
78
79struct tcrypt_result {
80 struct completion completion;
81 int err;
82};
83
84struct aead_test_suite {
85 struct {
86 struct aead_testvec *vecs;
87 unsigned int count;
88 } enc, dec;
89};
90
91struct cipher_test_suite {
92 struct {
93 struct cipher_testvec *vecs;
94 unsigned int count;
95 } enc, dec;
96};
97
98struct comp_test_suite {
99 struct {
100 struct comp_testvec *vecs;
101 unsigned int count;
102 } comp, decomp;
103};
104
105struct hash_test_suite {
106 struct hash_testvec *vecs;
107 unsigned int count;
108};
109
Jarod Wilson7647d6c2009-05-04 19:44:50 +0800110struct cprng_test_suite {
111 struct cprng_testvec *vecs;
112 unsigned int count;
113};
114
Stephan Mueller64d1cdf2014-05-31 17:25:36 +0200115struct drbg_test_suite {
116 struct drbg_testvec *vecs;
117 unsigned int count;
118};
119
Tadeusz Struk946cc462015-06-16 10:31:06 -0700120struct akcipher_test_suite {
121 struct akcipher_testvec *vecs;
122 unsigned int count;
123};
124
Salvatore Benedetto802c7f12016-06-22 17:49:14 +0100125struct kpp_test_suite {
126 struct kpp_testvec *vecs;
127 unsigned int count;
128};
129
Herbert Xuda7f0332008-07-31 17:08:25 +0800130struct alg_test_desc {
131 const char *alg;
132 int (*test)(const struct alg_test_desc *desc, const char *driver,
133 u32 type, u32 mask);
Jarod Wilsona1915d52009-05-15 15:16:03 +1000134 int fips_allowed; /* set if alg is allowed in fips mode */
Herbert Xuda7f0332008-07-31 17:08:25 +0800135
136 union {
137 struct aead_test_suite aead;
138 struct cipher_test_suite cipher;
139 struct comp_test_suite comp;
140 struct hash_test_suite hash;
Jarod Wilson7647d6c2009-05-04 19:44:50 +0800141 struct cprng_test_suite cprng;
Stephan Mueller64d1cdf2014-05-31 17:25:36 +0200142 struct drbg_test_suite drbg;
Tadeusz Struk946cc462015-06-16 10:31:06 -0700143 struct akcipher_test_suite akcipher;
Salvatore Benedetto802c7f12016-06-22 17:49:14 +0100144 struct kpp_test_suite kpp;
Herbert Xuda7f0332008-07-31 17:08:25 +0800145 } suite;
146};
147
148static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
149
Herbert Xuda7f0332008-07-31 17:08:25 +0800150static void hexdump(unsigned char *buf, unsigned int len)
151{
152 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
153 16, 1,
154 buf, len, false);
155}
156
157static void tcrypt_complete(struct crypto_async_request *req, int err)
158{
159 struct tcrypt_result *res = req->data;
160
161 if (err == -EINPROGRESS)
162 return;
163
164 res->err = err;
165 complete(&res->completion);
166}
167
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800168static int testmgr_alloc_buf(char *buf[XBUFSIZE])
169{
170 int i;
171
172 for (i = 0; i < XBUFSIZE; i++) {
173 buf[i] = (void *)__get_free_page(GFP_KERNEL);
174 if (!buf[i])
175 goto err_free_buf;
176 }
177
178 return 0;
179
180err_free_buf:
181 while (i-- > 0)
182 free_page((unsigned long)buf[i]);
183
184 return -ENOMEM;
185}
186
187static void testmgr_free_buf(char *buf[XBUFSIZE])
188{
189 int i;
190
191 for (i = 0; i < XBUFSIZE; i++)
192 free_page((unsigned long)buf[i]);
193}
194
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300195static int wait_async_op(struct tcrypt_result *tr, int ret)
David S. Millera8f1a052010-05-19 14:12:03 +1000196{
197 if (ret == -EINPROGRESS || ret == -EBUSY) {
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100198 wait_for_completion(&tr->completion);
Wolfram Sang16735d02013-11-14 14:32:02 -0800199 reinit_completion(&tr->completion);
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100200 ret = tr->err;
David S. Millera8f1a052010-05-19 14:12:03 +1000201 }
202 return ret;
203}
204
Wang, Rui Y018ba952016-02-03 18:26:57 +0800205static int ahash_partial_update(struct ahash_request **preq,
206 struct crypto_ahash *tfm, struct hash_testvec *template,
207 void *hash_buff, int k, int temp, struct scatterlist *sg,
208 const char *algo, char *result, struct tcrypt_result *tresult)
209{
210 char *state;
211 struct ahash_request *req;
212 int statesize, ret = -EINVAL;
Jan Stancek7bcb87b2016-09-28 16:38:37 +0200213 const char guard[] = { 0x00, 0xba, 0xad, 0x00 };
Wang, Rui Y018ba952016-02-03 18:26:57 +0800214
215 req = *preq;
216 statesize = crypto_ahash_statesize(
217 crypto_ahash_reqtfm(req));
Jan Stancek7bcb87b2016-09-28 16:38:37 +0200218 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
Wang, Rui Y018ba952016-02-03 18:26:57 +0800219 if (!state) {
220 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
221 goto out_nostate;
222 }
Jan Stancek7bcb87b2016-09-28 16:38:37 +0200223 memcpy(state + statesize, guard, sizeof(guard));
Wang, Rui Y018ba952016-02-03 18:26:57 +0800224 ret = crypto_ahash_export(req, state);
Jan Stancek7bcb87b2016-09-28 16:38:37 +0200225 WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
Wang, Rui Y018ba952016-02-03 18:26:57 +0800226 if (ret) {
227 pr_err("alt: hash: Failed to export() for %s\n", algo);
228 goto out;
229 }
230 ahash_request_free(req);
231 req = ahash_request_alloc(tfm, GFP_KERNEL);
232 if (!req) {
233 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
234 goto out_noreq;
235 }
236 ahash_request_set_callback(req,
237 CRYPTO_TFM_REQ_MAY_BACKLOG,
238 tcrypt_complete, tresult);
239
240 memcpy(hash_buff, template->plaintext + temp,
241 template->tap[k]);
242 sg_init_one(&sg[0], hash_buff, template->tap[k]);
243 ahash_request_set_crypt(req, sg, result, template->tap[k]);
244 ret = crypto_ahash_import(req, state);
245 if (ret) {
246 pr_err("alg: hash: Failed to import() for %s\n", algo);
247 goto out;
248 }
249 ret = wait_async_op(tresult, crypto_ahash_update(req));
250 if (ret)
251 goto out;
252 *preq = req;
253 ret = 0;
254 goto out_noreq;
255out:
256 ahash_request_free(req);
257out_noreq:
258 kfree(state);
259out_nostate:
260 return ret;
261}
262
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300263static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
264 unsigned int tcount, bool use_digest,
265 const int align_offset)
Herbert Xuda7f0332008-07-31 17:08:25 +0800266{
267 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
268 unsigned int i, j, k, temp;
269 struct scatterlist sg[8];
Horia Geanta29b77e52014-07-23 11:59:38 +0300270 char *result;
271 char *key;
Herbert Xuda7f0332008-07-31 17:08:25 +0800272 struct ahash_request *req;
273 struct tcrypt_result tresult;
Herbert Xuda7f0332008-07-31 17:08:25 +0800274 void *hash_buff;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800275 char *xbuf[XBUFSIZE];
276 int ret = -ENOMEM;
277
Horia Geanta29b77e52014-07-23 11:59:38 +0300278 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
279 if (!result)
280 return ret;
281 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
282 if (!key)
283 goto out_nobuf;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800284 if (testmgr_alloc_buf(xbuf))
285 goto out_nobuf;
Herbert Xuda7f0332008-07-31 17:08:25 +0800286
287 init_completion(&tresult.completion);
288
289 req = ahash_request_alloc(tfm, GFP_KERNEL);
290 if (!req) {
291 printk(KERN_ERR "alg: hash: Failed to allocate request for "
292 "%s\n", algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800293 goto out_noreq;
294 }
295 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
296 tcrypt_complete, &tresult);
297
Herbert Xua0cfae52009-05-29 16:23:12 +1000298 j = 0;
Herbert Xuda7f0332008-07-31 17:08:25 +0800299 for (i = 0; i < tcount; i++) {
Herbert Xua0cfae52009-05-29 16:23:12 +1000300 if (template[i].np)
301 continue;
302
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300303 ret = -EINVAL;
304 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
305 goto out;
306
Herbert Xua0cfae52009-05-29 16:23:12 +1000307 j++;
Horia Geanta29b77e52014-07-23 11:59:38 +0300308 memset(result, 0, MAX_DIGEST_SIZE);
Herbert Xuda7f0332008-07-31 17:08:25 +0800309
310 hash_buff = xbuf[0];
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300311 hash_buff += align_offset;
Herbert Xuda7f0332008-07-31 17:08:25 +0800312
313 memcpy(hash_buff, template[i].plaintext, template[i].psize);
314 sg_init_one(&sg[0], hash_buff, template[i].psize);
315
316 if (template[i].ksize) {
317 crypto_ahash_clear_flags(tfm, ~0);
Horia Geanta29b77e52014-07-23 11:59:38 +0300318 if (template[i].ksize > MAX_KEYLEN) {
319 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
320 j, algo, template[i].ksize, MAX_KEYLEN);
321 ret = -EINVAL;
322 goto out;
323 }
324 memcpy(key, template[i].key, template[i].ksize);
325 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
Herbert Xuda7f0332008-07-31 17:08:25 +0800326 if (ret) {
327 printk(KERN_ERR "alg: hash: setkey failed on "
Herbert Xua0cfae52009-05-29 16:23:12 +1000328 "test %d for %s: ret=%d\n", j, algo,
Herbert Xuda7f0332008-07-31 17:08:25 +0800329 -ret);
330 goto out;
331 }
332 }
333
334 ahash_request_set_crypt(req, sg, result, template[i].psize);
David S. Millera8f1a052010-05-19 14:12:03 +1000335 if (use_digest) {
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300336 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
David S. Millera8f1a052010-05-19 14:12:03 +1000337 if (ret) {
338 pr_err("alg: hash: digest failed on test %d "
339 "for %s: ret=%d\n", j, algo, -ret);
340 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +0800341 }
David S. Millera8f1a052010-05-19 14:12:03 +1000342 } else {
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300343 ret = wait_async_op(&tresult, crypto_ahash_init(req));
David S. Millera8f1a052010-05-19 14:12:03 +1000344 if (ret) {
345 pr_err("alt: hash: init failed on test %d "
346 "for %s: ret=%d\n", j, algo, -ret);
347 goto out;
348 }
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300349 ret = wait_async_op(&tresult, crypto_ahash_update(req));
David S. Millera8f1a052010-05-19 14:12:03 +1000350 if (ret) {
351 pr_err("alt: hash: update failed on test %d "
352 "for %s: ret=%d\n", j, algo, -ret);
353 goto out;
354 }
Cristian Stoicad4c85f92014-08-08 12:30:04 +0300355 ret = wait_async_op(&tresult, crypto_ahash_final(req));
David S. Millera8f1a052010-05-19 14:12:03 +1000356 if (ret) {
357 pr_err("alt: hash: final failed on test %d "
358 "for %s: ret=%d\n", j, algo, -ret);
359 goto out;
360 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800361 }
362
363 if (memcmp(result, template[i].digest,
364 crypto_ahash_digestsize(tfm))) {
365 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
Herbert Xua0cfae52009-05-29 16:23:12 +1000366 j, algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800367 hexdump(result, crypto_ahash_digestsize(tfm));
368 ret = -EINVAL;
369 goto out;
370 }
371 }
372
373 j = 0;
374 for (i = 0; i < tcount; i++) {
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300375 /* alignment tests are only done with continuous buffers */
376 if (align_offset != 0)
377 break;
378
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300379 if (!template[i].np)
380 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800381
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300382 j++;
383 memset(result, 0, MAX_DIGEST_SIZE);
Herbert Xuda7f0332008-07-31 17:08:25 +0800384
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300385 temp = 0;
386 sg_init_table(sg, template[i].np);
387 ret = -EINVAL;
388 for (k = 0; k < template[i].np; k++) {
389 if (WARN_ON(offset_in_page(IDX[k]) +
390 template[i].tap[k] > PAGE_SIZE))
Herbert Xuda7f0332008-07-31 17:08:25 +0800391 goto out;
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300392 sg_set_buf(&sg[k],
393 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
394 offset_in_page(IDX[k]),
395 template[i].plaintext + temp,
396 template[i].tap[k]),
397 template[i].tap[k]);
398 temp += template[i].tap[k];
399 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800400
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300401 if (template[i].ksize) {
402 if (template[i].ksize > MAX_KEYLEN) {
403 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
404 j, algo, template[i].ksize, MAX_KEYLEN);
Herbert Xuda7f0332008-07-31 17:08:25 +0800405 ret = -EINVAL;
406 goto out;
407 }
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300408 crypto_ahash_clear_flags(tfm, ~0);
409 memcpy(key, template[i].key, template[i].ksize);
410 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
411
412 if (ret) {
413 printk(KERN_ERR "alg: hash: setkey "
414 "failed on chunking test %d "
415 "for %s: ret=%d\n", j, algo, -ret);
416 goto out;
417 }
418 }
419
420 ahash_request_set_crypt(req, sg, result, template[i].psize);
421 ret = crypto_ahash_digest(req);
422 switch (ret) {
423 case 0:
424 break;
425 case -EINPROGRESS:
426 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100427 wait_for_completion(&tresult.completion);
428 reinit_completion(&tresult.completion);
429 ret = tresult.err;
430 if (!ret)
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300431 break;
Cristian Stoica5f2b4242014-08-08 14:27:50 +0300432 /* fall through */
433 default:
434 printk(KERN_ERR "alg: hash: digest failed "
435 "on chunking test %d for %s: "
436 "ret=%d\n", j, algo, -ret);
437 goto out;
438 }
439
440 if (memcmp(result, template[i].digest,
441 crypto_ahash_digestsize(tfm))) {
442 printk(KERN_ERR "alg: hash: Chunking test %d "
443 "failed for %s\n", j, algo);
444 hexdump(result, crypto_ahash_digestsize(tfm));
445 ret = -EINVAL;
446 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +0800447 }
448 }
449
Wang, Rui Y018ba952016-02-03 18:26:57 +0800450 /* partial update exercise */
451 j = 0;
452 for (i = 0; i < tcount; i++) {
453 /* alignment tests are only done with continuous buffers */
454 if (align_offset != 0)
455 break;
456
457 if (template[i].np < 2)
458 continue;
459
460 j++;
461 memset(result, 0, MAX_DIGEST_SIZE);
462
463 ret = -EINVAL;
464 hash_buff = xbuf[0];
465 memcpy(hash_buff, template[i].plaintext,
466 template[i].tap[0]);
467 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
468
469 if (template[i].ksize) {
470 crypto_ahash_clear_flags(tfm, ~0);
471 if (template[i].ksize > MAX_KEYLEN) {
472 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
473 j, algo, template[i].ksize, MAX_KEYLEN);
474 ret = -EINVAL;
475 goto out;
476 }
477 memcpy(key, template[i].key, template[i].ksize);
478 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
479 if (ret) {
480 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
481 j, algo, -ret);
482 goto out;
483 }
484 }
485
486 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
487 ret = wait_async_op(&tresult, crypto_ahash_init(req));
488 if (ret) {
489 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
490 j, algo, -ret);
491 goto out;
492 }
493 ret = wait_async_op(&tresult, crypto_ahash_update(req));
494 if (ret) {
495 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
496 j, algo, -ret);
497 goto out;
498 }
499
500 temp = template[i].tap[0];
501 for (k = 1; k < template[i].np; k++) {
502 ret = ahash_partial_update(&req, tfm, &template[i],
503 hash_buff, k, temp, &sg[0], algo, result,
504 &tresult);
505 if (ret) {
506 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
507 j, algo, -ret);
508 goto out_noreq;
509 }
510 temp += template[i].tap[k];
511 }
512 ret = wait_async_op(&tresult, crypto_ahash_final(req));
513 if (ret) {
514 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
515 j, algo, -ret);
516 goto out;
517 }
518 if (memcmp(result, template[i].digest,
519 crypto_ahash_digestsize(tfm))) {
520 pr_err("alg: hash: Partial Test %d failed for %s\n",
521 j, algo);
522 hexdump(result, crypto_ahash_digestsize(tfm));
523 ret = -EINVAL;
524 goto out;
525 }
526 }
527
Herbert Xuda7f0332008-07-31 17:08:25 +0800528 ret = 0;
529
530out:
531 ahash_request_free(req);
532out_noreq:
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800533 testmgr_free_buf(xbuf);
534out_nobuf:
Horia Geanta29b77e52014-07-23 11:59:38 +0300535 kfree(key);
536 kfree(result);
Herbert Xuda7f0332008-07-31 17:08:25 +0800537 return ret;
538}
539
Jussi Kivilinnada5ffe12013-06-13 17:37:55 +0300540static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
541 unsigned int tcount, bool use_digest)
542{
543 unsigned int alignmask;
544 int ret;
545
546 ret = __test_hash(tfm, template, tcount, use_digest, 0);
547 if (ret)
548 return ret;
549
550 /* test unaligned buffers, check with one byte offset */
551 ret = __test_hash(tfm, template, tcount, use_digest, 1);
552 if (ret)
553 return ret;
554
555 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
556 if (alignmask) {
557 /* Check if alignment mask for tfm is correctly set. */
558 ret = __test_hash(tfm, template, tcount, use_digest,
559 alignmask + 1);
560 if (ret)
561 return ret;
562 }
563
564 return 0;
565}
566
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300567static int __test_aead(struct crypto_aead *tfm, int enc,
568 struct aead_testvec *template, unsigned int tcount,
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300569 const bool diff_dst, const int align_offset)
Herbert Xuda7f0332008-07-31 17:08:25 +0800570{
571 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
572 unsigned int i, j, k, n, temp;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800573 int ret = -ENOMEM;
Herbert Xuda7f0332008-07-31 17:08:25 +0800574 char *q;
575 char *key;
576 struct aead_request *req;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300577 struct scatterlist *sg;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300578 struct scatterlist *sgout;
579 const char *e, *d;
Herbert Xuda7f0332008-07-31 17:08:25 +0800580 struct tcrypt_result result;
Cristian Stoica424a5da2015-01-28 11:03:05 +0200581 unsigned int authsize, iv_len;
Herbert Xuda7f0332008-07-31 17:08:25 +0800582 void *input;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300583 void *output;
Herbert Xuda7f0332008-07-31 17:08:25 +0800584 void *assoc;
Tadeusz Struk9bac0192014-05-19 09:51:33 -0700585 char *iv;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800586 char *xbuf[XBUFSIZE];
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300587 char *xoutbuf[XBUFSIZE];
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800588 char *axbuf[XBUFSIZE];
589
Tadeusz Struk9bac0192014-05-19 09:51:33 -0700590 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
591 if (!iv)
592 return ret;
Horia Geanta29b77e52014-07-23 11:59:38 +0300593 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
594 if (!key)
595 goto out_noxbuf;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800596 if (testmgr_alloc_buf(xbuf))
597 goto out_noxbuf;
598 if (testmgr_alloc_buf(axbuf))
599 goto out_noaxbuf;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300600 if (diff_dst && testmgr_alloc_buf(xoutbuf))
601 goto out_nooutbuf;
602
603 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800604 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300605 if (!sg)
606 goto out_nosg;
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800607 sgout = &sg[16];
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300608
609 if (diff_dst)
610 d = "-ddst";
611 else
612 d = "";
613
Herbert Xuda7f0332008-07-31 17:08:25 +0800614 if (enc == ENCRYPT)
615 e = "encryption";
616 else
617 e = "decryption";
618
619 init_completion(&result.completion);
620
621 req = aead_request_alloc(tfm, GFP_KERNEL);
622 if (!req) {
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300623 pr_err("alg: aead%s: Failed to allocate request for %s\n",
624 d, algo);
Herbert Xuda7f0332008-07-31 17:08:25 +0800625 goto out;
626 }
627
628 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
629 tcrypt_complete, &result);
630
Jerome Marchandabfa7f42016-02-03 13:58:12 +0100631 iv_len = crypto_aead_ivsize(tfm);
632
Herbert Xuda7f0332008-07-31 17:08:25 +0800633 for (i = 0, j = 0; i < tcount; i++) {
Cristian Stoica05b1d332014-07-28 13:11:23 +0300634 if (template[i].np)
635 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800636
Cristian Stoica05b1d332014-07-28 13:11:23 +0300637 j++;
Herbert Xuda7f0332008-07-31 17:08:25 +0800638
Cristian Stoica05b1d332014-07-28 13:11:23 +0300639 /* some templates have no input data but they will
640 * touch input
641 */
642 input = xbuf[0];
643 input += align_offset;
644 assoc = axbuf[0];
645
646 ret = -EINVAL;
647 if (WARN_ON(align_offset + template[i].ilen >
648 PAGE_SIZE || template[i].alen > PAGE_SIZE))
649 goto out;
650
651 memcpy(input, template[i].input, template[i].ilen);
652 memcpy(assoc, template[i].assoc, template[i].alen);
653 if (template[i].iv)
Cristian Stoica424a5da2015-01-28 11:03:05 +0200654 memcpy(iv, template[i].iv, iv_len);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300655 else
Cristian Stoica424a5da2015-01-28 11:03:05 +0200656 memset(iv, 0, iv_len);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300657
658 crypto_aead_clear_flags(tfm, ~0);
659 if (template[i].wk)
660 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
661
662 if (template[i].klen > MAX_KEYLEN) {
663 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
664 d, j, algo, template[i].klen,
665 MAX_KEYLEN);
Herbert Xufd57f222009-05-29 16:05:42 +1000666 ret = -EINVAL;
Cristian Stoica05b1d332014-07-28 13:11:23 +0300667 goto out;
668 }
669 memcpy(key, template[i].key, template[i].klen);
Herbert Xufd57f222009-05-29 16:05:42 +1000670
Cristian Stoica05b1d332014-07-28 13:11:23 +0300671 ret = crypto_aead_setkey(tfm, key, template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +0800672 if (template[i].fail == !ret) {
Cristian Stoica05b1d332014-07-28 13:11:23 +0300673 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
674 d, j, algo, crypto_aead_get_flags(tfm));
675 goto out;
676 } else if (ret)
677 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800678
Cristian Stoica05b1d332014-07-28 13:11:23 +0300679 authsize = abs(template[i].rlen - template[i].ilen);
680 ret = crypto_aead_setauthsize(tfm, authsize);
681 if (ret) {
682 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
683 d, authsize, j, algo);
684 goto out;
685 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800686
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800687 k = !!template[i].alen;
688 sg_init_table(sg, k + 1);
689 sg_set_buf(&sg[0], assoc, template[i].alen);
690 sg_set_buf(&sg[k], input,
691 template[i].ilen + (enc ? authsize : 0));
692 output = input;
693
Cristian Stoica05b1d332014-07-28 13:11:23 +0300694 if (diff_dst) {
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800695 sg_init_table(sgout, k + 1);
696 sg_set_buf(&sgout[0], assoc, template[i].alen);
697
Cristian Stoica05b1d332014-07-28 13:11:23 +0300698 output = xoutbuf[0];
699 output += align_offset;
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800700 sg_set_buf(&sgout[k], output,
701 template[i].rlen + (enc ? 0 : authsize));
Cristian Stoica05b1d332014-07-28 13:11:23 +0300702 }
703
Cristian Stoica05b1d332014-07-28 13:11:23 +0300704 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
705 template[i].ilen, iv);
706
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800707 aead_request_set_ad(req, template[i].alen);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300708
709 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
710
711 switch (ret) {
712 case 0:
713 if (template[i].novrfy) {
714 /* verification was supposed to fail */
715 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
716 d, e, j, algo);
717 /* so really, we got a bad message */
718 ret = -EBADMSG;
Horia Geanta29b77e52014-07-23 11:59:38 +0300719 goto out;
720 }
Cristian Stoica05b1d332014-07-28 13:11:23 +0300721 break;
722 case -EINPROGRESS:
723 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100724 wait_for_completion(&result.completion);
725 reinit_completion(&result.completion);
726 ret = result.err;
727 if (!ret)
Herbert Xuda7f0332008-07-31 17:08:25 +0800728 break;
Cristian Stoica05b1d332014-07-28 13:11:23 +0300729 case -EBADMSG:
730 if (template[i].novrfy)
731 /* verification failure was expected */
732 continue;
733 /* fall through */
734 default:
735 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
736 d, e, j, algo, -ret);
737 goto out;
738 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800739
Cristian Stoica05b1d332014-07-28 13:11:23 +0300740 q = output;
741 if (memcmp(q, template[i].result, template[i].rlen)) {
742 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
743 d, j, e, algo);
744 hexdump(q, template[i].rlen);
745 ret = -EINVAL;
746 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +0800747 }
748 }
749
750 for (i = 0, j = 0; i < tcount; i++) {
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300751 /* alignment tests are only done with continuous buffers */
752 if (align_offset != 0)
753 break;
754
Cristian Stoica05b1d332014-07-28 13:11:23 +0300755 if (!template[i].np)
756 continue;
Herbert Xuda7f0332008-07-31 17:08:25 +0800757
Cristian Stoica05b1d332014-07-28 13:11:23 +0300758 j++;
Herbert Xuda7f0332008-07-31 17:08:25 +0800759
Cristian Stoica05b1d332014-07-28 13:11:23 +0300760 if (template[i].iv)
Jerome Marchandabfa7f42016-02-03 13:58:12 +0100761 memcpy(iv, template[i].iv, iv_len);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300762 else
763 memset(iv, 0, MAX_IVLEN);
764
765 crypto_aead_clear_flags(tfm, ~0);
766 if (template[i].wk)
767 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
768 if (template[i].klen > MAX_KEYLEN) {
769 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
770 d, j, algo, template[i].klen, MAX_KEYLEN);
771 ret = -EINVAL;
772 goto out;
773 }
774 memcpy(key, template[i].key, template[i].klen);
775
776 ret = crypto_aead_setkey(tfm, key, template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +0800777 if (template[i].fail == !ret) {
Cristian Stoica05b1d332014-07-28 13:11:23 +0300778 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
779 d, j, algo, crypto_aead_get_flags(tfm));
780 goto out;
781 } else if (ret)
782 continue;
783
784 authsize = abs(template[i].rlen - template[i].ilen);
785
786 ret = -EINVAL;
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800787 sg_init_table(sg, template[i].anp + template[i].np);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300788 if (diff_dst)
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800789 sg_init_table(sgout, template[i].anp + template[i].np);
790
791 ret = -EINVAL;
792 for (k = 0, temp = 0; k < template[i].anp; k++) {
793 if (WARN_ON(offset_in_page(IDX[k]) +
794 template[i].atap[k] > PAGE_SIZE))
795 goto out;
796 sg_set_buf(&sg[k],
797 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
798 offset_in_page(IDX[k]),
799 template[i].assoc + temp,
800 template[i].atap[k]),
801 template[i].atap[k]);
802 if (diff_dst)
803 sg_set_buf(&sgout[k],
804 axbuf[IDX[k] >> PAGE_SHIFT] +
805 offset_in_page(IDX[k]),
806 template[i].atap[k]);
807 temp += template[i].atap[k];
808 }
809
Cristian Stoica05b1d332014-07-28 13:11:23 +0300810 for (k = 0, temp = 0; k < template[i].np; k++) {
811 if (WARN_ON(offset_in_page(IDX[k]) +
812 template[i].tap[k] > PAGE_SIZE))
813 goto out;
814
815 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
816 memcpy(q, template[i].input + temp, template[i].tap[k]);
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800817 sg_set_buf(&sg[template[i].anp + k],
818 q, template[i].tap[k]);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300819
820 if (diff_dst) {
821 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
822 offset_in_page(IDX[k]);
823
824 memset(q, 0, template[i].tap[k]);
825
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800826 sg_set_buf(&sgout[template[i].anp + k],
827 q, template[i].tap[k]);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300828 }
829
830 n = template[i].tap[k];
831 if (k == template[i].np - 1 && enc)
832 n += authsize;
833 if (offset_in_page(q) + n < PAGE_SIZE)
834 q[n] = 0;
835
836 temp += template[i].tap[k];
837 }
838
839 ret = crypto_aead_setauthsize(tfm, authsize);
840 if (ret) {
841 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
842 d, authsize, j, algo);
843 goto out;
844 }
845
846 if (enc) {
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800847 if (WARN_ON(sg[template[i].anp + k - 1].offset +
848 sg[template[i].anp + k - 1].length +
849 authsize > PAGE_SIZE)) {
Horia Geanta29b77e52014-07-23 11:59:38 +0300850 ret = -EINVAL;
851 goto out;
852 }
Herbert Xuda7f0332008-07-31 17:08:25 +0800853
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300854 if (diff_dst)
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800855 sgout[template[i].anp + k - 1].length +=
856 authsize;
857 sg[template[i].anp + k - 1].length += authsize;
Cristian Stoica05b1d332014-07-28 13:11:23 +0300858 }
859
860 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
861 template[i].ilen,
862 iv);
863
Herbert Xu8a525fcd2015-05-27 16:03:43 +0800864 aead_request_set_ad(req, template[i].alen);
Cristian Stoica05b1d332014-07-28 13:11:23 +0300865
866 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
867
868 switch (ret) {
869 case 0:
870 if (template[i].novrfy) {
871 /* verification was supposed to fail */
872 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
873 d, e, j, algo);
874 /* so really, we got a bad message */
875 ret = -EBADMSG;
876 goto out;
877 }
878 break;
879 case -EINPROGRESS:
880 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +0100881 wait_for_completion(&result.completion);
882 reinit_completion(&result.completion);
883 ret = result.err;
884 if (!ret)
Cristian Stoica05b1d332014-07-28 13:11:23 +0300885 break;
Cristian Stoica05b1d332014-07-28 13:11:23 +0300886 case -EBADMSG:
887 if (template[i].novrfy)
888 /* verification failure was expected */
889 continue;
890 /* fall through */
891 default:
892 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
893 d, e, j, algo, -ret);
894 goto out;
895 }
896
897 ret = -EINVAL;
898 for (k = 0, temp = 0; k < template[i].np; k++) {
899 if (diff_dst)
900 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
901 offset_in_page(IDX[k]);
902 else
Herbert Xuda7f0332008-07-31 17:08:25 +0800903 q = xbuf[IDX[k] >> PAGE_SHIFT] +
904 offset_in_page(IDX[k]);
905
Cristian Stoica05b1d332014-07-28 13:11:23 +0300906 n = template[i].tap[k];
907 if (k == template[i].np - 1)
908 n += enc ? authsize : -authsize;
Herbert Xuda7f0332008-07-31 17:08:25 +0800909
Cristian Stoica05b1d332014-07-28 13:11:23 +0300910 if (memcmp(q, template[i].result + temp, n)) {
911 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
912 d, j, e, k, algo);
913 hexdump(q, n);
Herbert Xuda7f0332008-07-31 17:08:25 +0800914 goto out;
915 }
916
Cristian Stoica05b1d332014-07-28 13:11:23 +0300917 q += n;
918 if (k == template[i].np - 1 && !enc) {
919 if (!diff_dst &&
920 memcmp(q, template[i].input +
921 temp + n, authsize))
922 n = authsize;
Horia Geanta8ec25c52013-11-28 15:11:18 +0200923 else
Cristian Stoica05b1d332014-07-28 13:11:23 +0300924 n = 0;
925 } else {
926 for (n = 0; offset_in_page(q + n) && q[n]; n++)
927 ;
Herbert Xuda7f0332008-07-31 17:08:25 +0800928 }
Cristian Stoica05b1d332014-07-28 13:11:23 +0300929 if (n) {
930 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
931 d, j, e, k, algo, n);
932 hexdump(q, n);
Herbert Xuda7f0332008-07-31 17:08:25 +0800933 goto out;
934 }
935
Cristian Stoica05b1d332014-07-28 13:11:23 +0300936 temp += template[i].tap[k];
Herbert Xuda7f0332008-07-31 17:08:25 +0800937 }
938 }
939
940 ret = 0;
941
942out:
943 aead_request_free(req);
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300944 kfree(sg);
945out_nosg:
946 if (diff_dst)
947 testmgr_free_buf(xoutbuf);
948out_nooutbuf:
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800949 testmgr_free_buf(axbuf);
950out_noaxbuf:
951 testmgr_free_buf(xbuf);
952out_noxbuf:
Horia Geanta29b77e52014-07-23 11:59:38 +0300953 kfree(key);
Tadeusz Struk9bac0192014-05-19 09:51:33 -0700954 kfree(iv);
Herbert Xuda7f0332008-07-31 17:08:25 +0800955 return ret;
956}
957
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300958static int test_aead(struct crypto_aead *tfm, int enc,
959 struct aead_testvec *template, unsigned int tcount)
960{
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300961 unsigned int alignmask;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300962 int ret;
963
964 /* test 'dst == src' case */
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300965 ret = __test_aead(tfm, enc, template, tcount, false, 0);
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300966 if (ret)
967 return ret;
968
969 /* test 'dst != src' case */
Jussi Kivilinna58dcf542013-06-13 17:37:50 +0300970 ret = __test_aead(tfm, enc, template, tcount, true, 0);
971 if (ret)
972 return ret;
973
974 /* test unaligned buffers, check with one byte offset */
975 ret = __test_aead(tfm, enc, template, tcount, true, 1);
976 if (ret)
977 return ret;
978
979 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
980 if (alignmask) {
981 /* Check if alignment mask for tfm is correctly set. */
982 ret = __test_aead(tfm, enc, template, tcount, true,
983 alignmask + 1);
984 if (ret)
985 return ret;
986 }
987
988 return 0;
Jussi Kivilinnad8a32ac2012-09-21 10:26:52 +0300989}
990
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000991static int test_cipher(struct crypto_cipher *tfm, int enc,
Herbert Xuda7f0332008-07-31 17:08:25 +0800992 struct cipher_testvec *template, unsigned int tcount)
993{
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000994 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
995 unsigned int i, j, k;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +1000996 char *q;
997 const char *e;
998 void *data;
Herbert Xuf8b0d4d2009-05-06 14:15:47 +0800999 char *xbuf[XBUFSIZE];
1000 int ret = -ENOMEM;
1001
1002 if (testmgr_alloc_buf(xbuf))
1003 goto out_nobuf;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001004
1005 if (enc == ENCRYPT)
1006 e = "encryption";
1007 else
1008 e = "decryption";
1009
1010 j = 0;
1011 for (i = 0; i < tcount; i++) {
1012 if (template[i].np)
1013 continue;
1014
Stephan Mueller10faa8c2016-08-25 15:15:01 +02001015 if (fips_enabled && template[i].fips_skip)
1016 continue;
1017
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001018 j++;
1019
Herbert Xufd57f222009-05-29 16:05:42 +10001020 ret = -EINVAL;
1021 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1022 goto out;
1023
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001024 data = xbuf[0];
1025 memcpy(data, template[i].input, template[i].ilen);
1026
1027 crypto_cipher_clear_flags(tfm, ~0);
1028 if (template[i].wk)
1029 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1030
1031 ret = crypto_cipher_setkey(tfm, template[i].key,
1032 template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +08001033 if (template[i].fail == !ret) {
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001034 printk(KERN_ERR "alg: cipher: setkey failed "
1035 "on test %d for %s: flags=%x\n", j,
1036 algo, crypto_cipher_get_flags(tfm));
1037 goto out;
1038 } else if (ret)
1039 continue;
1040
1041 for (k = 0; k < template[i].ilen;
1042 k += crypto_cipher_blocksize(tfm)) {
1043 if (enc)
1044 crypto_cipher_encrypt_one(tfm, data + k,
1045 data + k);
1046 else
1047 crypto_cipher_decrypt_one(tfm, data + k,
1048 data + k);
1049 }
1050
1051 q = data;
1052 if (memcmp(q, template[i].result, template[i].rlen)) {
1053 printk(KERN_ERR "alg: cipher: Test %d failed "
1054 "on %s for %s\n", j, e, algo);
1055 hexdump(q, template[i].rlen);
1056 ret = -EINVAL;
1057 goto out;
1058 }
1059 }
1060
1061 ret = 0;
1062
1063out:
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001064 testmgr_free_buf(xbuf);
1065out_nobuf:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001066 return ret;
1067}
1068
Herbert Xu12773d92015-08-20 15:21:46 +08001069static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001070 struct cipher_testvec *template, unsigned int tcount,
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001071 const bool diff_dst, const int align_offset)
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001072{
Herbert Xuda7f0332008-07-31 17:08:25 +08001073 const char *algo =
Herbert Xu12773d92015-08-20 15:21:46 +08001074 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
Herbert Xuda7f0332008-07-31 17:08:25 +08001075 unsigned int i, j, k, n, temp;
Herbert Xuda7f0332008-07-31 17:08:25 +08001076 char *q;
Herbert Xu12773d92015-08-20 15:21:46 +08001077 struct skcipher_request *req;
Herbert Xuda7f0332008-07-31 17:08:25 +08001078 struct scatterlist sg[8];
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001079 struct scatterlist sgout[8];
1080 const char *e, *d;
Herbert Xuda7f0332008-07-31 17:08:25 +08001081 struct tcrypt_result result;
1082 void *data;
1083 char iv[MAX_IVLEN];
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001084 char *xbuf[XBUFSIZE];
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001085 char *xoutbuf[XBUFSIZE];
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001086 int ret = -ENOMEM;
Andrey Ryabinin84cba172015-09-10 13:11:55 +03001087 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001088
1089 if (testmgr_alloc_buf(xbuf))
1090 goto out_nobuf;
Herbert Xuda7f0332008-07-31 17:08:25 +08001091
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001092 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1093 goto out_nooutbuf;
1094
1095 if (diff_dst)
1096 d = "-ddst";
1097 else
1098 d = "";
1099
Herbert Xuda7f0332008-07-31 17:08:25 +08001100 if (enc == ENCRYPT)
1101 e = "encryption";
1102 else
1103 e = "decryption";
1104
1105 init_completion(&result.completion);
1106
Herbert Xu12773d92015-08-20 15:21:46 +08001107 req = skcipher_request_alloc(tfm, GFP_KERNEL);
Herbert Xuda7f0332008-07-31 17:08:25 +08001108 if (!req) {
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001109 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1110 d, algo);
Herbert Xuda7f0332008-07-31 17:08:25 +08001111 goto out;
1112 }
1113
Herbert Xu12773d92015-08-20 15:21:46 +08001114 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1115 tcrypt_complete, &result);
Herbert Xuda7f0332008-07-31 17:08:25 +08001116
1117 j = 0;
1118 for (i = 0; i < tcount; i++) {
Cristian Stoicabbb9a7d2014-08-08 14:27:52 +03001119 if (template[i].np && !template[i].also_non_np)
1120 continue;
1121
Stephan Mueller10faa8c2016-08-25 15:15:01 +02001122 if (fips_enabled && template[i].fips_skip)
1123 continue;
1124
Herbert Xuda7f0332008-07-31 17:08:25 +08001125 if (template[i].iv)
Andrey Ryabinin84cba172015-09-10 13:11:55 +03001126 memcpy(iv, template[i].iv, ivsize);
Herbert Xuda7f0332008-07-31 17:08:25 +08001127 else
1128 memset(iv, 0, MAX_IVLEN);
1129
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001130 j++;
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001131 ret = -EINVAL;
1132 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1133 goto out;
1134
1135 data = xbuf[0];
1136 data += align_offset;
1137 memcpy(data, template[i].input, template[i].ilen);
1138
Herbert Xu12773d92015-08-20 15:21:46 +08001139 crypto_skcipher_clear_flags(tfm, ~0);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001140 if (template[i].wk)
Herbert Xu12773d92015-08-20 15:21:46 +08001141 crypto_skcipher_set_flags(tfm,
1142 CRYPTO_TFM_REQ_WEAK_KEY);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001143
Herbert Xu12773d92015-08-20 15:21:46 +08001144 ret = crypto_skcipher_setkey(tfm, template[i].key,
1145 template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +08001146 if (template[i].fail == !ret) {
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001147 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
Herbert Xu12773d92015-08-20 15:21:46 +08001148 d, j, algo, crypto_skcipher_get_flags(tfm));
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001149 goto out;
1150 } else if (ret)
1151 continue;
1152
1153 sg_init_one(&sg[0], data, template[i].ilen);
1154 if (diff_dst) {
1155 data = xoutbuf[0];
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001156 data += align_offset;
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001157 sg_init_one(&sgout[0], data, template[i].ilen);
1158 }
Herbert Xuda7f0332008-07-31 17:08:25 +08001159
Herbert Xu12773d92015-08-20 15:21:46 +08001160 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1161 template[i].ilen, iv);
1162 ret = enc ? crypto_skcipher_encrypt(req) :
1163 crypto_skcipher_decrypt(req);
Herbert Xuda7f0332008-07-31 17:08:25 +08001164
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001165 switch (ret) {
1166 case 0:
1167 break;
1168 case -EINPROGRESS:
1169 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001170 wait_for_completion(&result.completion);
1171 reinit_completion(&result.completion);
1172 ret = result.err;
1173 if (!ret)
Herbert Xuda7f0332008-07-31 17:08:25 +08001174 break;
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001175 /* fall through */
1176 default:
1177 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1178 d, e, j, algo, -ret);
1179 goto out;
1180 }
Herbert Xuda7f0332008-07-31 17:08:25 +08001181
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001182 q = data;
1183 if (memcmp(q, template[i].result, template[i].rlen)) {
Boris BREZILLON8a826a32015-06-16 11:46:46 +02001184 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001185 d, j, e, algo);
1186 hexdump(q, template[i].rlen);
1187 ret = -EINVAL;
1188 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +08001189 }
Boris BREZILLON8a826a32015-06-16 11:46:46 +02001190
1191 if (template[i].iv_out &&
1192 memcmp(iv, template[i].iv_out,
1193 crypto_skcipher_ivsize(tfm))) {
1194 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1195 d, j, e, algo);
1196 hexdump(iv, crypto_skcipher_ivsize(tfm));
1197 ret = -EINVAL;
1198 goto out;
1199 }
Herbert Xuda7f0332008-07-31 17:08:25 +08001200 }
1201
1202 j = 0;
1203 for (i = 0; i < tcount; i++) {
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001204 /* alignment tests are only done with continuous buffers */
1205 if (align_offset != 0)
1206 break;
Herbert Xuda7f0332008-07-31 17:08:25 +08001207
Cristian Stoicabbb9a7d2014-08-08 14:27:52 +03001208 if (!template[i].np)
1209 continue;
1210
Stephan Mueller10faa8c2016-08-25 15:15:01 +02001211 if (fips_enabled && template[i].fips_skip)
1212 continue;
1213
Herbert Xuda7f0332008-07-31 17:08:25 +08001214 if (template[i].iv)
Andrey Ryabinin84cba172015-09-10 13:11:55 +03001215 memcpy(iv, template[i].iv, ivsize);
Herbert Xuda7f0332008-07-31 17:08:25 +08001216 else
1217 memset(iv, 0, MAX_IVLEN);
1218
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001219 j++;
Herbert Xu12773d92015-08-20 15:21:46 +08001220 crypto_skcipher_clear_flags(tfm, ~0);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001221 if (template[i].wk)
Herbert Xu12773d92015-08-20 15:21:46 +08001222 crypto_skcipher_set_flags(tfm,
1223 CRYPTO_TFM_REQ_WEAK_KEY);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001224
Herbert Xu12773d92015-08-20 15:21:46 +08001225 ret = crypto_skcipher_setkey(tfm, template[i].key,
1226 template[i].klen);
Yanjiang Jin0fae0c12016-07-29 16:32:09 +08001227 if (template[i].fail == !ret) {
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001228 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
Herbert Xu12773d92015-08-20 15:21:46 +08001229 d, j, algo, crypto_skcipher_get_flags(tfm));
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001230 goto out;
1231 } else if (ret)
1232 continue;
1233
1234 temp = 0;
1235 ret = -EINVAL;
1236 sg_init_table(sg, template[i].np);
1237 if (diff_dst)
1238 sg_init_table(sgout, template[i].np);
1239 for (k = 0; k < template[i].np; k++) {
1240 if (WARN_ON(offset_in_page(IDX[k]) +
1241 template[i].tap[k] > PAGE_SIZE))
Herbert Xuda7f0332008-07-31 17:08:25 +08001242 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +08001243
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001244 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1245
1246 memcpy(q, template[i].input + temp, template[i].tap[k]);
1247
1248 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1249 q[template[i].tap[k]] = 0;
1250
1251 sg_set_buf(&sg[k], q, template[i].tap[k]);
1252 if (diff_dst) {
1253 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1254 offset_in_page(IDX[k]);
1255
1256 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1257
1258 memset(q, 0, template[i].tap[k]);
1259 if (offset_in_page(q) +
1260 template[i].tap[k] < PAGE_SIZE)
1261 q[template[i].tap[k]] = 0;
1262 }
1263
1264 temp += template[i].tap[k];
1265 }
1266
Herbert Xu12773d92015-08-20 15:21:46 +08001267 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1268 template[i].ilen, iv);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001269
Herbert Xu12773d92015-08-20 15:21:46 +08001270 ret = enc ? crypto_skcipher_encrypt(req) :
1271 crypto_skcipher_decrypt(req);
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001272
1273 switch (ret) {
1274 case 0:
1275 break;
1276 case -EINPROGRESS:
1277 case -EBUSY:
Rabin Vincent8a45ac12015-01-09 16:25:28 +01001278 wait_for_completion(&result.completion);
1279 reinit_completion(&result.completion);
1280 ret = result.err;
1281 if (!ret)
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001282 break;
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001283 /* fall through */
1284 default:
1285 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1286 d, e, j, algo, -ret);
1287 goto out;
1288 }
1289
1290 temp = 0;
1291 ret = -EINVAL;
1292 for (k = 0; k < template[i].np; k++) {
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001293 if (diff_dst)
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001294 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1295 offset_in_page(IDX[k]);
1296 else
Herbert Xuda7f0332008-07-31 17:08:25 +08001297 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1298 offset_in_page(IDX[k]);
1299
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001300 if (memcmp(q, template[i].result + temp,
1301 template[i].tap[k])) {
1302 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1303 d, j, e, k, algo);
1304 hexdump(q, template[i].tap[k]);
Herbert Xuda7f0332008-07-31 17:08:25 +08001305 goto out;
1306 }
1307
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001308 q += template[i].tap[k];
1309 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1310 ;
1311 if (n) {
1312 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1313 d, j, e, k, algo, n);
1314 hexdump(q, n);
1315 goto out;
Herbert Xuda7f0332008-07-31 17:08:25 +08001316 }
Cristian Stoicaa1aa44a2014-08-08 14:27:51 +03001317 temp += template[i].tap[k];
Herbert Xuda7f0332008-07-31 17:08:25 +08001318 }
1319 }
1320
1321 ret = 0;
1322
1323out:
Herbert Xu12773d92015-08-20 15:21:46 +08001324 skcipher_request_free(req);
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001325 if (diff_dst)
1326 testmgr_free_buf(xoutbuf);
1327out_nooutbuf:
Herbert Xuf8b0d4d2009-05-06 14:15:47 +08001328 testmgr_free_buf(xbuf);
1329out_nobuf:
Herbert Xuda7f0332008-07-31 17:08:25 +08001330 return ret;
1331}
1332
Herbert Xu12773d92015-08-20 15:21:46 +08001333static int test_skcipher(struct crypto_skcipher *tfm, int enc,
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001334 struct cipher_testvec *template, unsigned int tcount)
1335{
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001336 unsigned int alignmask;
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001337 int ret;
1338
1339 /* test 'dst == src' case */
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001340 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001341 if (ret)
1342 return ret;
1343
1344 /* test 'dst != src' case */
Jussi Kivilinna3a338f22013-06-13 17:37:45 +03001345 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1346 if (ret)
1347 return ret;
1348
1349 /* test unaligned buffers, check with one byte offset */
1350 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1351 if (ret)
1352 return ret;
1353
1354 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1355 if (alignmask) {
1356 /* Check if alignment mask for tfm is correctly set. */
1357 ret = __test_skcipher(tfm, enc, template, tcount, true,
1358 alignmask + 1);
1359 if (ret)
1360 return ret;
1361 }
1362
1363 return 0;
Jussi Kivilinna08d6af82012-09-21 10:26:47 +03001364}
1365
Herbert Xuda7f0332008-07-31 17:08:25 +08001366static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1367 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1368{
1369 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1370 unsigned int i;
1371 char result[COMP_BUF_SIZE];
1372 int ret;
1373
1374 for (i = 0; i < ctcount; i++) {
Geert Uytterhoevenc79cf912009-03-29 15:44:19 +08001375 int ilen;
1376 unsigned int dlen = COMP_BUF_SIZE;
Herbert Xuda7f0332008-07-31 17:08:25 +08001377
1378 memset(result, 0, sizeof (result));
1379
1380 ilen = ctemplate[i].inlen;
1381 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1382 ilen, result, &dlen);
1383 if (ret) {
1384 printk(KERN_ERR "alg: comp: compression failed "
1385 "on test %d for %s: ret=%d\n", i + 1, algo,
1386 -ret);
1387 goto out;
1388 }
1389
Geert Uytterhoevenb812eb02008-11-28 20:51:28 +08001390 if (dlen != ctemplate[i].outlen) {
1391 printk(KERN_ERR "alg: comp: Compression test %d "
1392 "failed for %s: output len = %d\n", i + 1, algo,
1393 dlen);
1394 ret = -EINVAL;
1395 goto out;
1396 }
1397
Herbert Xuda7f0332008-07-31 17:08:25 +08001398 if (memcmp(result, ctemplate[i].output, dlen)) {
1399 printk(KERN_ERR "alg: comp: Compression test %d "
1400 "failed for %s\n", i + 1, algo);
1401 hexdump(result, dlen);
1402 ret = -EINVAL;
1403 goto out;
1404 }
1405 }
1406
1407 for (i = 0; i < dtcount; i++) {
Geert Uytterhoevenc79cf912009-03-29 15:44:19 +08001408 int ilen;
1409 unsigned int dlen = COMP_BUF_SIZE;
Herbert Xuda7f0332008-07-31 17:08:25 +08001410
1411 memset(result, 0, sizeof (result));
1412
1413 ilen = dtemplate[i].inlen;
1414 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1415 ilen, result, &dlen);
1416 if (ret) {
1417 printk(KERN_ERR "alg: comp: decompression failed "
1418 "on test %d for %s: ret=%d\n", i + 1, algo,
1419 -ret);
1420 goto out;
1421 }
1422
Geert Uytterhoevenb812eb02008-11-28 20:51:28 +08001423 if (dlen != dtemplate[i].outlen) {
1424 printk(KERN_ERR "alg: comp: Decompression test %d "
1425 "failed for %s: output len = %d\n", i + 1, algo,
1426 dlen);
1427 ret = -EINVAL;
1428 goto out;
1429 }
1430
Herbert Xuda7f0332008-07-31 17:08:25 +08001431 if (memcmp(result, dtemplate[i].output, dlen)) {
1432 printk(KERN_ERR "alg: comp: Decompression test %d "
1433 "failed for %s\n", i + 1, algo);
1434 hexdump(result, dlen);
1435 ret = -EINVAL;
1436 goto out;
1437 }
1438 }
1439
1440 ret = 0;
1441
1442out:
1443 return ret;
1444}
1445
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001446static int test_acomp(struct crypto_acomp *tfm, struct comp_testvec *ctemplate,
1447 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1448{
1449 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1450 unsigned int i;
Eric Biggerseb095592016-11-23 10:24:35 -08001451 char *output;
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001452 int ret;
1453 struct scatterlist src, dst;
1454 struct acomp_req *req;
1455 struct tcrypt_result result;
1456
Eric Biggerseb095592016-11-23 10:24:35 -08001457 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1458 if (!output)
1459 return -ENOMEM;
1460
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001461 for (i = 0; i < ctcount; i++) {
1462 unsigned int dlen = COMP_BUF_SIZE;
1463 int ilen = ctemplate[i].inlen;
Laura Abbott02608e02016-12-21 12:32:54 -08001464 void *input_vec;
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001465
Eric Biggersd2110222016-12-30 14:12:00 -06001466 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
Laura Abbott02608e02016-12-21 12:32:54 -08001467 if (!input_vec) {
1468 ret = -ENOMEM;
1469 goto out;
1470 }
1471
Eric Biggerseb095592016-11-23 10:24:35 -08001472 memset(output, 0, dlen);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001473 init_completion(&result.completion);
Laura Abbott02608e02016-12-21 12:32:54 -08001474 sg_init_one(&src, input_vec, ilen);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001475 sg_init_one(&dst, output, dlen);
1476
1477 req = acomp_request_alloc(tfm);
1478 if (!req) {
1479 pr_err("alg: acomp: request alloc failed for %s\n",
1480 algo);
Laura Abbott02608e02016-12-21 12:32:54 -08001481 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001482 ret = -ENOMEM;
1483 goto out;
1484 }
1485
1486 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1487 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1488 tcrypt_complete, &result);
1489
1490 ret = wait_async_op(&result, crypto_acomp_compress(req));
1491 if (ret) {
1492 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1493 i + 1, algo, -ret);
Laura Abbott02608e02016-12-21 12:32:54 -08001494 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001495 acomp_request_free(req);
1496 goto out;
1497 }
1498
1499 if (req->dlen != ctemplate[i].outlen) {
1500 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1501 i + 1, algo, req->dlen);
1502 ret = -EINVAL;
Laura Abbott02608e02016-12-21 12:32:54 -08001503 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001504 acomp_request_free(req);
1505 goto out;
1506 }
1507
1508 if (memcmp(output, ctemplate[i].output, req->dlen)) {
1509 pr_err("alg: acomp: Compression test %d failed for %s\n",
1510 i + 1, algo);
1511 hexdump(output, req->dlen);
1512 ret = -EINVAL;
Laura Abbott02608e02016-12-21 12:32:54 -08001513 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001514 acomp_request_free(req);
1515 goto out;
1516 }
1517
Laura Abbott02608e02016-12-21 12:32:54 -08001518 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001519 acomp_request_free(req);
1520 }
1521
1522 for (i = 0; i < dtcount; i++) {
1523 unsigned int dlen = COMP_BUF_SIZE;
1524 int ilen = dtemplate[i].inlen;
Laura Abbott02608e02016-12-21 12:32:54 -08001525 void *input_vec;
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001526
Eric Biggersd2110222016-12-30 14:12:00 -06001527 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
Laura Abbott02608e02016-12-21 12:32:54 -08001528 if (!input_vec) {
1529 ret = -ENOMEM;
1530 goto out;
1531 }
1532
Eric Biggerseb095592016-11-23 10:24:35 -08001533 memset(output, 0, dlen);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001534 init_completion(&result.completion);
Laura Abbott02608e02016-12-21 12:32:54 -08001535 sg_init_one(&src, input_vec, ilen);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001536 sg_init_one(&dst, output, dlen);
1537
1538 req = acomp_request_alloc(tfm);
1539 if (!req) {
1540 pr_err("alg: acomp: request alloc failed for %s\n",
1541 algo);
Laura Abbott02608e02016-12-21 12:32:54 -08001542 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001543 ret = -ENOMEM;
1544 goto out;
1545 }
1546
1547 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1548 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1549 tcrypt_complete, &result);
1550
1551 ret = wait_async_op(&result, crypto_acomp_decompress(req));
1552 if (ret) {
1553 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1554 i + 1, algo, -ret);
Laura Abbott02608e02016-12-21 12:32:54 -08001555 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001556 acomp_request_free(req);
1557 goto out;
1558 }
1559
1560 if (req->dlen != dtemplate[i].outlen) {
1561 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1562 i + 1, algo, req->dlen);
1563 ret = -EINVAL;
Laura Abbott02608e02016-12-21 12:32:54 -08001564 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001565 acomp_request_free(req);
1566 goto out;
1567 }
1568
1569 if (memcmp(output, dtemplate[i].output, req->dlen)) {
1570 pr_err("alg: acomp: Decompression test %d failed for %s\n",
1571 i + 1, algo);
1572 hexdump(output, req->dlen);
1573 ret = -EINVAL;
Laura Abbott02608e02016-12-21 12:32:54 -08001574 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001575 acomp_request_free(req);
1576 goto out;
1577 }
1578
Laura Abbott02608e02016-12-21 12:32:54 -08001579 kfree(input_vec);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001580 acomp_request_free(req);
1581 }
1582
1583 ret = 0;
1584
1585out:
Eric Biggerseb095592016-11-23 10:24:35 -08001586 kfree(output);
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001587 return ret;
1588}
1589
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001590static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1591 unsigned int tcount)
1592{
1593 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
Felipe Contrerasfa4ef8a2009-10-27 19:04:42 +08001594 int err = 0, i, j, seedsize;
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001595 u8 *seed;
1596 char result[32];
1597
1598 seedsize = crypto_rng_seedsize(tfm);
1599
1600 seed = kmalloc(seedsize, GFP_KERNEL);
1601 if (!seed) {
1602 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1603 "for %s\n", algo);
1604 return -ENOMEM;
1605 }
1606
1607 for (i = 0; i < tcount; i++) {
1608 memset(result, 0, 32);
1609
1610 memcpy(seed, template[i].v, template[i].vlen);
1611 memcpy(seed + template[i].vlen, template[i].key,
1612 template[i].klen);
1613 memcpy(seed + template[i].vlen + template[i].klen,
1614 template[i].dt, template[i].dtlen);
1615
1616 err = crypto_rng_reset(tfm, seed, seedsize);
1617 if (err) {
1618 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1619 "for %s\n", algo);
1620 goto out;
1621 }
1622
1623 for (j = 0; j < template[i].loops; j++) {
1624 err = crypto_rng_get_bytes(tfm, result,
1625 template[i].rlen);
Stephan Mueller19e60e12015-03-10 17:00:36 +01001626 if (err < 0) {
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001627 printk(KERN_ERR "alg: cprng: Failed to obtain "
1628 "the correct amount of random data for "
Stephan Mueller19e60e12015-03-10 17:00:36 +01001629 "%s (requested %d)\n", algo,
1630 template[i].rlen);
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001631 goto out;
1632 }
1633 }
1634
1635 err = memcmp(result, template[i].result,
1636 template[i].rlen);
1637 if (err) {
1638 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1639 i, algo);
1640 hexdump(result, template[i].rlen);
1641 err = -EINVAL;
1642 goto out;
1643 }
1644 }
1645
1646out:
1647 kfree(seed);
1648 return err;
1649}
1650
Herbert Xuda7f0332008-07-31 17:08:25 +08001651static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1652 u32 type, u32 mask)
1653{
1654 struct crypto_aead *tfm;
1655 int err = 0;
1656
Herbert Xueed93e02016-11-22 20:08:31 +08001657 tfm = crypto_alloc_aead(driver, type, mask);
Herbert Xuda7f0332008-07-31 17:08:25 +08001658 if (IS_ERR(tfm)) {
1659 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1660 "%ld\n", driver, PTR_ERR(tfm));
1661 return PTR_ERR(tfm);
1662 }
1663
1664 if (desc->suite.aead.enc.vecs) {
1665 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1666 desc->suite.aead.enc.count);
1667 if (err)
1668 goto out;
1669 }
1670
1671 if (!err && desc->suite.aead.dec.vecs)
1672 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1673 desc->suite.aead.dec.count);
1674
1675out:
1676 crypto_free_aead(tfm);
1677 return err;
1678}
1679
1680static int alg_test_cipher(const struct alg_test_desc *desc,
1681 const char *driver, u32 type, u32 mask)
1682{
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001683 struct crypto_cipher *tfm;
Herbert Xuda7f0332008-07-31 17:08:25 +08001684 int err = 0;
1685
Herbert Xueed93e02016-11-22 20:08:31 +08001686 tfm = crypto_alloc_cipher(driver, type, mask);
Herbert Xuda7f0332008-07-31 17:08:25 +08001687 if (IS_ERR(tfm)) {
1688 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1689 "%s: %ld\n", driver, PTR_ERR(tfm));
1690 return PTR_ERR(tfm);
1691 }
1692
1693 if (desc->suite.cipher.enc.vecs) {
1694 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1695 desc->suite.cipher.enc.count);
1696 if (err)
1697 goto out;
1698 }
1699
1700 if (desc->suite.cipher.dec.vecs)
1701 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1702 desc->suite.cipher.dec.count);
1703
1704out:
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001705 crypto_free_cipher(tfm);
1706 return err;
1707}
1708
1709static int alg_test_skcipher(const struct alg_test_desc *desc,
1710 const char *driver, u32 type, u32 mask)
1711{
Herbert Xu12773d92015-08-20 15:21:46 +08001712 struct crypto_skcipher *tfm;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001713 int err = 0;
1714
Herbert Xueed93e02016-11-22 20:08:31 +08001715 tfm = crypto_alloc_skcipher(driver, type, mask);
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10001716 if (IS_ERR(tfm)) {
1717 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1718 "%s: %ld\n", driver, PTR_ERR(tfm));
1719 return PTR_ERR(tfm);
1720 }
1721
1722 if (desc->suite.cipher.enc.vecs) {
1723 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1724 desc->suite.cipher.enc.count);
1725 if (err)
1726 goto out;
1727 }
1728
1729 if (desc->suite.cipher.dec.vecs)
1730 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1731 desc->suite.cipher.dec.count);
1732
1733out:
Herbert Xu12773d92015-08-20 15:21:46 +08001734 crypto_free_skcipher(tfm);
Herbert Xuda7f0332008-07-31 17:08:25 +08001735 return err;
1736}
1737
1738static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1739 u32 type, u32 mask)
1740{
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001741 struct crypto_comp *comp;
1742 struct crypto_acomp *acomp;
Herbert Xuda7f0332008-07-31 17:08:25 +08001743 int err;
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001744 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
Herbert Xuda7f0332008-07-31 17:08:25 +08001745
Giovanni Cabiddud7db7a82016-10-21 13:19:54 +01001746 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1747 acomp = crypto_alloc_acomp(driver, type, mask);
1748 if (IS_ERR(acomp)) {
1749 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1750 driver, PTR_ERR(acomp));
1751 return PTR_ERR(acomp);
1752 }
1753 err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1754 desc->suite.comp.decomp.vecs,
1755 desc->suite.comp.comp.count,
1756 desc->suite.comp.decomp.count);
1757 crypto_free_acomp(acomp);
1758 } else {
1759 comp = crypto_alloc_comp(driver, type, mask);
1760 if (IS_ERR(comp)) {
1761 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1762 driver, PTR_ERR(comp));
1763 return PTR_ERR(comp);
1764 }
1765
1766 err = test_comp(comp, desc->suite.comp.comp.vecs,
1767 desc->suite.comp.decomp.vecs,
1768 desc->suite.comp.comp.count,
1769 desc->suite.comp.decomp.count);
1770
1771 crypto_free_comp(comp);
Herbert Xuda7f0332008-07-31 17:08:25 +08001772 }
Herbert Xuda7f0332008-07-31 17:08:25 +08001773 return err;
1774}
1775
1776static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1777 u32 type, u32 mask)
1778{
1779 struct crypto_ahash *tfm;
1780 int err;
1781
Herbert Xueed93e02016-11-22 20:08:31 +08001782 tfm = crypto_alloc_ahash(driver, type, mask);
Herbert Xuda7f0332008-07-31 17:08:25 +08001783 if (IS_ERR(tfm)) {
1784 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1785 "%ld\n", driver, PTR_ERR(tfm));
1786 return PTR_ERR(tfm);
1787 }
1788
David S. Millera8f1a052010-05-19 14:12:03 +10001789 err = test_hash(tfm, desc->suite.hash.vecs,
1790 desc->suite.hash.count, true);
1791 if (!err)
1792 err = test_hash(tfm, desc->suite.hash.vecs,
1793 desc->suite.hash.count, false);
Herbert Xuda7f0332008-07-31 17:08:25 +08001794
1795 crypto_free_ahash(tfm);
1796 return err;
1797}
1798
Herbert Xu8e3ee852008-11-07 14:58:52 +08001799static int alg_test_crc32c(const struct alg_test_desc *desc,
1800 const char *driver, u32 type, u32 mask)
1801{
1802 struct crypto_shash *tfm;
1803 u32 val;
1804 int err;
1805
1806 err = alg_test_hash(desc, driver, type, mask);
1807 if (err)
1808 goto out;
1809
Herbert Xueed93e02016-11-22 20:08:31 +08001810 tfm = crypto_alloc_shash(driver, type, mask);
Herbert Xu8e3ee852008-11-07 14:58:52 +08001811 if (IS_ERR(tfm)) {
1812 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1813 "%ld\n", driver, PTR_ERR(tfm));
1814 err = PTR_ERR(tfm);
1815 goto out;
1816 }
1817
1818 do {
Jan-Simon Möller4c5c3022012-07-02 13:48:30 +02001819 SHASH_DESC_ON_STACK(shash, tfm);
1820 u32 *ctx = (u32 *)shash_desc_ctx(shash);
Herbert Xu8e3ee852008-11-07 14:58:52 +08001821
Jan-Simon Möller4c5c3022012-07-02 13:48:30 +02001822 shash->tfm = tfm;
1823 shash->flags = 0;
Herbert Xu8e3ee852008-11-07 14:58:52 +08001824
Jan-Simon Möller4c5c3022012-07-02 13:48:30 +02001825 *ctx = le32_to_cpu(420553207);
1826 err = crypto_shash_final(shash, (u8 *)&val);
Herbert Xu8e3ee852008-11-07 14:58:52 +08001827 if (err) {
1828 printk(KERN_ERR "alg: crc32c: Operation failed for "
1829 "%s: %d\n", driver, err);
1830 break;
1831 }
1832
1833 if (val != ~420553207) {
1834 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1835 "%d\n", driver, val);
1836 err = -EINVAL;
1837 }
1838 } while (0);
1839
1840 crypto_free_shash(tfm);
1841
1842out:
1843 return err;
1844}
1845
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001846static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1847 u32 type, u32 mask)
1848{
1849 struct crypto_rng *rng;
1850 int err;
1851
Herbert Xueed93e02016-11-22 20:08:31 +08001852 rng = crypto_alloc_rng(driver, type, mask);
Jarod Wilson7647d6c2009-05-04 19:44:50 +08001853 if (IS_ERR(rng)) {
1854 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1855 "%ld\n", driver, PTR_ERR(rng));
1856 return PTR_ERR(rng);
1857 }
1858
1859 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1860
1861 crypto_free_rng(rng);
1862
1863 return err;
1864}
1865
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001866
1867static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1868 const char *driver, u32 type, u32 mask)
1869{
1870 int ret = -EAGAIN;
1871 struct crypto_rng *drng;
1872 struct drbg_test_data test_data;
1873 struct drbg_string addtl, pers, testentropy;
1874 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1875
1876 if (!buf)
1877 return -ENOMEM;
1878
Herbert Xueed93e02016-11-22 20:08:31 +08001879 drng = crypto_alloc_rng(driver, type, mask);
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001880 if (IS_ERR(drng)) {
Jarod Wilson2fc0d252014-07-29 15:47:56 -04001881 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001882 "%s\n", driver);
1883 kzfree(buf);
1884 return -ENOMEM;
1885 }
1886
1887 test_data.testentropy = &testentropy;
1888 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1889 drbg_string_fill(&pers, test->pers, test->perslen);
1890 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1891 if (ret) {
1892 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1893 goto outbuf;
1894 }
1895
1896 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1897 if (pr) {
1898 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1899 ret = crypto_drbg_get_bytes_addtl_test(drng,
1900 buf, test->expectedlen, &addtl, &test_data);
1901 } else {
1902 ret = crypto_drbg_get_bytes_addtl(drng,
1903 buf, test->expectedlen, &addtl);
1904 }
Stephan Mueller19e60e12015-03-10 17:00:36 +01001905 if (ret < 0) {
Jarod Wilson2fc0d252014-07-29 15:47:56 -04001906 printk(KERN_ERR "alg: drbg: could not obtain random data for "
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001907 "driver %s\n", driver);
1908 goto outbuf;
1909 }
1910
1911 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1912 if (pr) {
1913 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1914 ret = crypto_drbg_get_bytes_addtl_test(drng,
1915 buf, test->expectedlen, &addtl, &test_data);
1916 } else {
1917 ret = crypto_drbg_get_bytes_addtl(drng,
1918 buf, test->expectedlen, &addtl);
1919 }
Stephan Mueller19e60e12015-03-10 17:00:36 +01001920 if (ret < 0) {
Jarod Wilson2fc0d252014-07-29 15:47:56 -04001921 printk(KERN_ERR "alg: drbg: could not obtain random data for "
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02001922 "driver %s\n", driver);
1923 goto outbuf;
1924 }
1925
1926 ret = memcmp(test->expected, buf, test->expectedlen);
1927
1928outbuf:
1929 crypto_free_rng(drng);
1930 kzfree(buf);
1931 return ret;
1932}
1933
1934
1935static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1936 u32 type, u32 mask)
1937{
1938 int err = 0;
1939 int pr = 0;
1940 int i = 0;
1941 struct drbg_testvec *template = desc->suite.drbg.vecs;
1942 unsigned int tcount = desc->suite.drbg.count;
1943
1944 if (0 == memcmp(driver, "drbg_pr_", 8))
1945 pr = 1;
1946
1947 for (i = 0; i < tcount; i++) {
1948 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1949 if (err) {
1950 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1951 i, driver);
1952 err = -EINVAL;
1953 break;
1954 }
1955 }
1956 return err;
1957
1958}
1959
Salvatore Benedetto802c7f12016-06-22 17:49:14 +01001960static int do_test_kpp(struct crypto_kpp *tfm, struct kpp_testvec *vec,
1961 const char *alg)
1962{
1963 struct kpp_request *req;
1964 void *input_buf = NULL;
1965 void *output_buf = NULL;
1966 struct tcrypt_result result;
1967 unsigned int out_len_max;
1968 int err = -ENOMEM;
1969 struct scatterlist src, dst;
1970
1971 req = kpp_request_alloc(tfm, GFP_KERNEL);
1972 if (!req)
1973 return err;
1974
1975 init_completion(&result.completion);
1976
1977 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1978 if (err < 0)
1979 goto free_req;
1980
1981 out_len_max = crypto_kpp_maxsize(tfm);
1982 output_buf = kzalloc(out_len_max, GFP_KERNEL);
1983 if (!output_buf) {
1984 err = -ENOMEM;
1985 goto free_req;
1986 }
1987
1988 /* Use appropriate parameter as base */
1989 kpp_request_set_input(req, NULL, 0);
1990 sg_init_one(&dst, output_buf, out_len_max);
1991 kpp_request_set_output(req, &dst, out_len_max);
1992 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1993 tcrypt_complete, &result);
1994
1995 /* Compute public key */
1996 err = wait_async_op(&result, crypto_kpp_generate_public_key(req));
1997 if (err) {
1998 pr_err("alg: %s: generate public key test failed. err %d\n",
1999 alg, err);
2000 goto free_output;
2001 }
2002 /* Verify calculated public key */
2003 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2004 vec->expected_a_public_size)) {
2005 pr_err("alg: %s: generate public key test failed. Invalid output\n",
2006 alg);
2007 err = -EINVAL;
2008 goto free_output;
2009 }
2010
2011 /* Calculate shared secret key by using counter part (b) public key. */
2012 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
2013 if (!input_buf) {
2014 err = -ENOMEM;
2015 goto free_output;
2016 }
2017
2018 memcpy(input_buf, vec->b_public, vec->b_public_size);
2019 sg_init_one(&src, input_buf, vec->b_public_size);
2020 sg_init_one(&dst, output_buf, out_len_max);
2021 kpp_request_set_input(req, &src, vec->b_public_size);
2022 kpp_request_set_output(req, &dst, out_len_max);
2023 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2024 tcrypt_complete, &result);
2025 err = wait_async_op(&result, crypto_kpp_compute_shared_secret(req));
2026 if (err) {
2027 pr_err("alg: %s: compute shard secret test failed. err %d\n",
2028 alg, err);
2029 goto free_all;
2030 }
2031 /*
2032 * verify shared secret from which the user will derive
2033 * secret key by executing whatever hash it has chosen
2034 */
2035 if (memcmp(vec->expected_ss, sg_virt(req->dst),
2036 vec->expected_ss_size)) {
2037 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2038 alg);
2039 err = -EINVAL;
2040 }
2041
2042free_all:
2043 kfree(input_buf);
2044free_output:
2045 kfree(output_buf);
2046free_req:
2047 kpp_request_free(req);
2048 return err;
2049}
2050
2051static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2052 struct kpp_testvec *vecs, unsigned int tcount)
2053{
2054 int ret, i;
2055
2056 for (i = 0; i < tcount; i++) {
2057 ret = do_test_kpp(tfm, vecs++, alg);
2058 if (ret) {
2059 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2060 alg, i + 1, ret);
2061 return ret;
2062 }
2063 }
2064 return 0;
2065}
2066
2067static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2068 u32 type, u32 mask)
2069{
2070 struct crypto_kpp *tfm;
2071 int err = 0;
2072
Herbert Xueed93e02016-11-22 20:08:31 +08002073 tfm = crypto_alloc_kpp(driver, type, mask);
Salvatore Benedetto802c7f12016-06-22 17:49:14 +01002074 if (IS_ERR(tfm)) {
2075 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2076 driver, PTR_ERR(tfm));
2077 return PTR_ERR(tfm);
2078 }
2079 if (desc->suite.kpp.vecs)
2080 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2081 desc->suite.kpp.count);
2082
2083 crypto_free_kpp(tfm);
2084 return err;
2085}
2086
Herbert Xu50d2b6432016-06-29 19:32:20 +08002087static int test_akcipher_one(struct crypto_akcipher *tfm,
2088 struct akcipher_testvec *vecs)
Tadeusz Struk946cc462015-06-16 10:31:06 -07002089{
Herbert Xudf27b262016-05-05 16:42:49 +08002090 char *xbuf[XBUFSIZE];
Tadeusz Struk946cc462015-06-16 10:31:06 -07002091 struct akcipher_request *req;
2092 void *outbuf_enc = NULL;
2093 void *outbuf_dec = NULL;
2094 struct tcrypt_result result;
2095 unsigned int out_len_max, out_len = 0;
2096 int err = -ENOMEM;
Tadeusz Struk22287b02015-10-08 09:26:55 -07002097 struct scatterlist src, dst, src_tab[2];
Tadeusz Struk946cc462015-06-16 10:31:06 -07002098
Herbert Xudf27b262016-05-05 16:42:49 +08002099 if (testmgr_alloc_buf(xbuf))
2100 return err;
2101
Tadeusz Struk946cc462015-06-16 10:31:06 -07002102 req = akcipher_request_alloc(tfm, GFP_KERNEL);
2103 if (!req)
Herbert Xudf27b262016-05-05 16:42:49 +08002104 goto free_xbuf;
Tadeusz Struk946cc462015-06-16 10:31:06 -07002105
2106 init_completion(&result.completion);
Tadeusz Struk22287b02015-10-08 09:26:55 -07002107
2108 if (vecs->public_key_vec)
2109 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2110 vecs->key_len);
2111 else
2112 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2113 vecs->key_len);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002114 if (err)
2115 goto free_req;
2116
Salvatore Benedetto57763f52016-07-04 10:52:34 +01002117 err = -ENOMEM;
Tadeusz Struk22287b02015-10-08 09:26:55 -07002118 out_len_max = crypto_akcipher_maxsize(tfm);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002119 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2120 if (!outbuf_enc)
2121 goto free_req;
2122
Herbert Xudf27b262016-05-05 16:42:49 +08002123 if (WARN_ON(vecs->m_size > PAGE_SIZE))
2124 goto free_all;
2125
2126 memcpy(xbuf[0], vecs->m, vecs->m_size);
2127
Tadeusz Struk22287b02015-10-08 09:26:55 -07002128 sg_init_table(src_tab, 2);
Herbert Xudf27b262016-05-05 16:42:49 +08002129 sg_set_buf(&src_tab[0], xbuf[0], 8);
2130 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
Tadeusz Struk22287b02015-10-08 09:26:55 -07002131 sg_init_one(&dst, outbuf_enc, out_len_max);
2132 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2133 out_len_max);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002134 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2135 tcrypt_complete, &result);
2136
2137 /* Run RSA encrypt - c = m^e mod n;*/
2138 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
2139 if (err) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002140 pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002141 goto free_all;
2142 }
Tadeusz Struk22287b02015-10-08 09:26:55 -07002143 if (req->dst_len != vecs->c_size) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002144 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
Tadeusz Struk946cc462015-06-16 10:31:06 -07002145 err = -EINVAL;
2146 goto free_all;
2147 }
2148 /* verify that encrypted message is equal to expected */
Herbert Xudf27b262016-05-05 16:42:49 +08002149 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002150 pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
2151 hexdump(outbuf_enc, vecs->c_size);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002152 err = -EINVAL;
2153 goto free_all;
2154 }
2155 /* Don't invoke decrypt for vectors with public key */
2156 if (vecs->public_key_vec) {
2157 err = 0;
2158 goto free_all;
2159 }
2160 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2161 if (!outbuf_dec) {
2162 err = -ENOMEM;
2163 goto free_all;
2164 }
Herbert Xudf27b262016-05-05 16:42:49 +08002165
2166 if (WARN_ON(vecs->c_size > PAGE_SIZE))
2167 goto free_all;
2168
2169 memcpy(xbuf[0], vecs->c, vecs->c_size);
2170
2171 sg_init_one(&src, xbuf[0], vecs->c_size);
Tadeusz Struk22287b02015-10-08 09:26:55 -07002172 sg_init_one(&dst, outbuf_dec, out_len_max);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002173 init_completion(&result.completion);
Tadeusz Struk22287b02015-10-08 09:26:55 -07002174 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002175
2176 /* Run RSA decrypt - m = c^d mod n;*/
2177 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
2178 if (err) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002179 pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002180 goto free_all;
2181 }
2182 out_len = req->dst_len;
Herbert Xu50d2b6432016-06-29 19:32:20 +08002183 if (out_len < vecs->m_size) {
2184 pr_err("alg: akcipher: decrypt test failed. "
2185 "Invalid output len %u\n", out_len);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002186 err = -EINVAL;
2187 goto free_all;
2188 }
2189 /* verify that decrypted message is equal to the original msg */
Herbert Xu50d2b6432016-06-29 19:32:20 +08002190 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2191 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2192 vecs->m_size)) {
2193 pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2194 hexdump(outbuf_dec, out_len);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002195 err = -EINVAL;
2196 }
2197free_all:
2198 kfree(outbuf_dec);
2199 kfree(outbuf_enc);
2200free_req:
2201 akcipher_request_free(req);
Herbert Xudf27b262016-05-05 16:42:49 +08002202free_xbuf:
2203 testmgr_free_buf(xbuf);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002204 return err;
2205}
2206
Herbert Xu50d2b6432016-06-29 19:32:20 +08002207static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2208 struct akcipher_testvec *vecs, unsigned int tcount)
Tadeusz Struk946cc462015-06-16 10:31:06 -07002209{
Herbert Xu15226e42016-07-18 18:20:10 +08002210 const char *algo =
2211 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
Tadeusz Struk946cc462015-06-16 10:31:06 -07002212 int ret, i;
2213
2214 for (i = 0; i < tcount; i++) {
Herbert Xu50d2b6432016-06-29 19:32:20 +08002215 ret = test_akcipher_one(tfm, vecs++);
2216 if (!ret)
2217 continue;
2218
Herbert Xu15226e42016-07-18 18:20:10 +08002219 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2220 i + 1, algo, ret);
Herbert Xu50d2b6432016-06-29 19:32:20 +08002221 return ret;
Tadeusz Struk946cc462015-06-16 10:31:06 -07002222 }
2223 return 0;
2224}
2225
Tadeusz Struk946cc462015-06-16 10:31:06 -07002226static int alg_test_akcipher(const struct alg_test_desc *desc,
2227 const char *driver, u32 type, u32 mask)
2228{
2229 struct crypto_akcipher *tfm;
2230 int err = 0;
2231
Herbert Xueed93e02016-11-22 20:08:31 +08002232 tfm = crypto_alloc_akcipher(driver, type, mask);
Tadeusz Struk946cc462015-06-16 10:31:06 -07002233 if (IS_ERR(tfm)) {
2234 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2235 driver, PTR_ERR(tfm));
2236 return PTR_ERR(tfm);
2237 }
2238 if (desc->suite.akcipher.vecs)
2239 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2240 desc->suite.akcipher.count);
2241
2242 crypto_free_akcipher(tfm);
2243 return err;
2244}
2245
Youquan, Song863b5572009-12-23 19:45:20 +08002246static int alg_test_null(const struct alg_test_desc *desc,
2247 const char *driver, u32 type, u32 mask)
2248{
2249 return 0;
2250}
2251
Herbert Xuda7f0332008-07-31 17:08:25 +08002252/* Please keep this list sorted by algorithm name. */
2253static const struct alg_test_desc alg_test_descs[] = {
2254 {
Jarod Wilsone08ca2d2009-05-04 19:46:29 +08002255 .alg = "ansi_cprng",
2256 .test = alg_test_cprng,
2257 .suite = {
2258 .cprng = {
2259 .vecs = ansi_cprng_aes_tv_template,
2260 .count = ANSI_CPRNG_AES_TEST_VECTORS
2261 }
2262 }
2263 }, {
Horia Geantabca4feb2014-03-14 17:46:51 +02002264 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2265 .test = alg_test_aead,
Horia Geantabca4feb2014-03-14 17:46:51 +02002266 .suite = {
2267 .aead = {
2268 .enc = {
2269 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2270 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2271 },
2272 .dec = {
2273 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2274 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2275 }
2276 }
2277 }
2278 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002279 .alg = "authenc(hmac(sha1),cbc(aes))",
Horia Geantae46e9a42012-07-03 19:16:54 +03002280 .test = alg_test_aead,
Horia Geantae46e9a42012-07-03 19:16:54 +03002281 .suite = {
2282 .aead = {
2283 .enc = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302284 .vecs =
2285 hmac_sha1_aes_cbc_enc_tv_temp,
2286 .count =
2287 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2288 }
2289 }
2290 }
2291 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002292 .alg = "authenc(hmac(sha1),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302293 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302294 .suite = {
2295 .aead = {
2296 .enc = {
2297 .vecs =
2298 hmac_sha1_des_cbc_enc_tv_temp,
2299 .count =
2300 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2301 }
2302 }
2303 }
2304 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002305 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302306 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002307 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302308 .suite = {
2309 .aead = {
2310 .enc = {
2311 .vecs =
2312 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2313 .count =
2314 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
Horia Geantae46e9a42012-07-03 19:16:54 +03002315 }
2316 }
2317 }
2318 }, {
Marcus Meissnerfb16abc2016-02-06 11:53:07 +01002319 .alg = "authenc(hmac(sha1),ctr(aes))",
2320 .test = alg_test_null,
2321 .fips_allowed = 1,
2322 }, {
Horia Geantabca4feb2014-03-14 17:46:51 +02002323 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2324 .test = alg_test_aead,
Horia Geantabca4feb2014-03-14 17:46:51 +02002325 .suite = {
2326 .aead = {
2327 .enc = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302328 .vecs =
2329 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2330 .count =
2331 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
Horia Geantabca4feb2014-03-14 17:46:51 +02002332 },
2333 .dec = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302334 .vecs =
2335 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2336 .count =
2337 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2338 }
2339 }
2340 }
2341 }, {
Marcus Meissner88886902016-02-19 13:34:28 +01002342 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2343 .test = alg_test_null,
2344 .fips_allowed = 1,
2345 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002346 .alg = "authenc(hmac(sha224),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302347 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302348 .suite = {
2349 .aead = {
2350 .enc = {
2351 .vecs =
2352 hmac_sha224_des_cbc_enc_tv_temp,
2353 .count =
2354 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2355 }
2356 }
2357 }
2358 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002359 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302360 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002361 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302362 .suite = {
2363 .aead = {
2364 .enc = {
2365 .vecs =
2366 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2367 .count =
2368 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
Horia Geantabca4feb2014-03-14 17:46:51 +02002369 }
2370 }
2371 }
2372 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002373 .alg = "authenc(hmac(sha256),cbc(aes))",
Horia Geantae46e9a42012-07-03 19:16:54 +03002374 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002375 .fips_allowed = 1,
Horia Geantae46e9a42012-07-03 19:16:54 +03002376 .suite = {
2377 .aead = {
2378 .enc = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302379 .vecs =
2380 hmac_sha256_aes_cbc_enc_tv_temp,
2381 .count =
2382 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2383 }
2384 }
2385 }
2386 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002387 .alg = "authenc(hmac(sha256),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302388 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302389 .suite = {
2390 .aead = {
2391 .enc = {
2392 .vecs =
2393 hmac_sha256_des_cbc_enc_tv_temp,
2394 .count =
2395 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2396 }
2397 }
2398 }
2399 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002400 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302401 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002402 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302403 .suite = {
2404 .aead = {
2405 .enc = {
2406 .vecs =
2407 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2408 .count =
2409 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2410 }
2411 }
2412 }
2413 }, {
Marcus Meissnerfb16abc2016-02-06 11:53:07 +01002414 .alg = "authenc(hmac(sha256),ctr(aes))",
2415 .test = alg_test_null,
2416 .fips_allowed = 1,
2417 }, {
Marcus Meissner88886902016-02-19 13:34:28 +01002418 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2419 .test = alg_test_null,
2420 .fips_allowed = 1,
2421 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002422 .alg = "authenc(hmac(sha384),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302423 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302424 .suite = {
2425 .aead = {
2426 .enc = {
2427 .vecs =
2428 hmac_sha384_des_cbc_enc_tv_temp,
2429 .count =
2430 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2431 }
2432 }
2433 }
2434 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002435 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302436 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002437 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302438 .suite = {
2439 .aead = {
2440 .enc = {
2441 .vecs =
2442 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2443 .count =
2444 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
Horia Geantae46e9a42012-07-03 19:16:54 +03002445 }
2446 }
2447 }
2448 }, {
Marcus Meissnerfb16abc2016-02-06 11:53:07 +01002449 .alg = "authenc(hmac(sha384),ctr(aes))",
2450 .test = alg_test_null,
2451 .fips_allowed = 1,
2452 }, {
Marcus Meissner88886902016-02-19 13:34:28 +01002453 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2454 .test = alg_test_null,
2455 .fips_allowed = 1,
2456 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002457 .alg = "authenc(hmac(sha512),cbc(aes))",
Marcus Meissnered1afac2016-02-05 14:23:33 +01002458 .fips_allowed = 1,
Horia Geantae46e9a42012-07-03 19:16:54 +03002459 .test = alg_test_aead,
Horia Geantae46e9a42012-07-03 19:16:54 +03002460 .suite = {
2461 .aead = {
2462 .enc = {
Nitesh Lal5208ed22014-05-21 17:09:08 +05302463 .vecs =
2464 hmac_sha512_aes_cbc_enc_tv_temp,
2465 .count =
2466 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2467 }
2468 }
2469 }
2470 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002471 .alg = "authenc(hmac(sha512),cbc(des))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302472 .test = alg_test_aead,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302473 .suite = {
2474 .aead = {
2475 .enc = {
2476 .vecs =
2477 hmac_sha512_des_cbc_enc_tv_temp,
2478 .count =
2479 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2480 }
2481 }
2482 }
2483 }, {
Herbert Xua4198fd2015-07-30 17:53:23 +08002484 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
Nitesh Lal5208ed22014-05-21 17:09:08 +05302485 .test = alg_test_aead,
Marcus Meissnered1afac2016-02-05 14:23:33 +01002486 .fips_allowed = 1,
Nitesh Lal5208ed22014-05-21 17:09:08 +05302487 .suite = {
2488 .aead = {
2489 .enc = {
2490 .vecs =
2491 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2492 .count =
2493 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
Horia Geantae46e9a42012-07-03 19:16:54 +03002494 }
2495 }
2496 }
2497 }, {
Marcus Meissnerfb16abc2016-02-06 11:53:07 +01002498 .alg = "authenc(hmac(sha512),ctr(aes))",
2499 .test = alg_test_null,
2500 .fips_allowed = 1,
2501 }, {
Marcus Meissner88886902016-02-19 13:34:28 +01002502 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2503 .test = alg_test_null,
2504 .fips_allowed = 1,
2505 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002506 .alg = "cbc(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002507 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002508 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002509 .suite = {
2510 .cipher = {
2511 .enc = {
2512 .vecs = aes_cbc_enc_tv_template,
2513 .count = AES_CBC_ENC_TEST_VECTORS
2514 },
2515 .dec = {
2516 .vecs = aes_cbc_dec_tv_template,
2517 .count = AES_CBC_DEC_TEST_VECTORS
2518 }
2519 }
2520 }
2521 }, {
2522 .alg = "cbc(anubis)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002523 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002524 .suite = {
2525 .cipher = {
2526 .enc = {
2527 .vecs = anubis_cbc_enc_tv_template,
2528 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2529 },
2530 .dec = {
2531 .vecs = anubis_cbc_dec_tv_template,
2532 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2533 }
2534 }
2535 }
2536 }, {
2537 .alg = "cbc(blowfish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002538 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002539 .suite = {
2540 .cipher = {
2541 .enc = {
2542 .vecs = bf_cbc_enc_tv_template,
2543 .count = BF_CBC_ENC_TEST_VECTORS
2544 },
2545 .dec = {
2546 .vecs = bf_cbc_dec_tv_template,
2547 .count = BF_CBC_DEC_TEST_VECTORS
2548 }
2549 }
2550 }
2551 }, {
2552 .alg = "cbc(camellia)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002553 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002554 .suite = {
2555 .cipher = {
2556 .enc = {
2557 .vecs = camellia_cbc_enc_tv_template,
2558 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2559 },
2560 .dec = {
2561 .vecs = camellia_cbc_dec_tv_template,
2562 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2563 }
2564 }
2565 }
2566 }, {
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002567 .alg = "cbc(cast5)",
2568 .test = alg_test_skcipher,
2569 .suite = {
2570 .cipher = {
2571 .enc = {
2572 .vecs = cast5_cbc_enc_tv_template,
2573 .count = CAST5_CBC_ENC_TEST_VECTORS
2574 },
2575 .dec = {
2576 .vecs = cast5_cbc_dec_tv_template,
2577 .count = CAST5_CBC_DEC_TEST_VECTORS
2578 }
2579 }
2580 }
2581 }, {
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002582 .alg = "cbc(cast6)",
2583 .test = alg_test_skcipher,
2584 .suite = {
2585 .cipher = {
2586 .enc = {
2587 .vecs = cast6_cbc_enc_tv_template,
2588 .count = CAST6_CBC_ENC_TEST_VECTORS
2589 },
2590 .dec = {
2591 .vecs = cast6_cbc_dec_tv_template,
2592 .count = CAST6_CBC_DEC_TEST_VECTORS
2593 }
2594 }
2595 }
2596 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002597 .alg = "cbc(des)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002598 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002599 .suite = {
2600 .cipher = {
2601 .enc = {
2602 .vecs = des_cbc_enc_tv_template,
2603 .count = DES_CBC_ENC_TEST_VECTORS
2604 },
2605 .dec = {
2606 .vecs = des_cbc_dec_tv_template,
2607 .count = DES_CBC_DEC_TEST_VECTORS
2608 }
2609 }
2610 }
2611 }, {
2612 .alg = "cbc(des3_ede)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002613 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002614 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002615 .suite = {
2616 .cipher = {
2617 .enc = {
2618 .vecs = des3_ede_cbc_enc_tv_template,
2619 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2620 },
2621 .dec = {
2622 .vecs = des3_ede_cbc_dec_tv_template,
2623 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2624 }
2625 }
2626 }
2627 }, {
Jussi Kivilinna9d259172011-10-18 00:02:53 +03002628 .alg = "cbc(serpent)",
2629 .test = alg_test_skcipher,
2630 .suite = {
2631 .cipher = {
2632 .enc = {
2633 .vecs = serpent_cbc_enc_tv_template,
2634 .count = SERPENT_CBC_ENC_TEST_VECTORS
2635 },
2636 .dec = {
2637 .vecs = serpent_cbc_dec_tv_template,
2638 .count = SERPENT_CBC_DEC_TEST_VECTORS
2639 }
2640 }
2641 }
2642 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002643 .alg = "cbc(twofish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002644 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002645 .suite = {
2646 .cipher = {
2647 .enc = {
2648 .vecs = tf_cbc_enc_tv_template,
2649 .count = TF_CBC_ENC_TEST_VECTORS
2650 },
2651 .dec = {
2652 .vecs = tf_cbc_dec_tv_template,
2653 .count = TF_CBC_DEC_TEST_VECTORS
2654 }
2655 }
2656 }
2657 }, {
2658 .alg = "ccm(aes)",
2659 .test = alg_test_aead,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002660 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002661 .suite = {
2662 .aead = {
2663 .enc = {
2664 .vecs = aes_ccm_enc_tv_template,
2665 .count = AES_CCM_ENC_TEST_VECTORS
2666 },
2667 .dec = {
2668 .vecs = aes_ccm_dec_tv_template,
2669 .count = AES_CCM_DEC_TEST_VECTORS
2670 }
2671 }
2672 }
2673 }, {
Martin Willi3590ebf2015-06-01 13:43:57 +02002674 .alg = "chacha20",
2675 .test = alg_test_skcipher,
2676 .suite = {
2677 .cipher = {
2678 .enc = {
2679 .vecs = chacha20_enc_tv_template,
2680 .count = CHACHA20_ENC_TEST_VECTORS
2681 },
2682 .dec = {
2683 .vecs = chacha20_enc_tv_template,
2684 .count = CHACHA20_ENC_TEST_VECTORS
2685 },
2686 }
2687 }
2688 }, {
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03002689 .alg = "cmac(aes)",
Stephan Mueller8f183752015-08-19 08:42:07 +02002690 .fips_allowed = 1,
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03002691 .test = alg_test_hash,
2692 .suite = {
2693 .hash = {
2694 .vecs = aes_cmac128_tv_template,
2695 .count = CMAC_AES_TEST_VECTORS
2696 }
2697 }
2698 }, {
2699 .alg = "cmac(des3_ede)",
Stephan Mueller8f183752015-08-19 08:42:07 +02002700 .fips_allowed = 1,
Jussi Kivilinna93b5e862013-04-08 10:48:44 +03002701 .test = alg_test_hash,
2702 .suite = {
2703 .hash = {
2704 .vecs = des3_ede_cmac64_tv_template,
2705 .count = CMAC_DES3_EDE_TEST_VECTORS
2706 }
2707 }
2708 }, {
Jussi Kivilinnae4483702013-04-07 16:43:56 +03002709 .alg = "compress_null",
2710 .test = alg_test_null,
2711 }, {
Ard Biesheuvelebb34722015-05-04 11:00:17 +02002712 .alg = "crc32",
2713 .test = alg_test_hash,
2714 .suite = {
2715 .hash = {
2716 .vecs = crc32_tv_template,
2717 .count = CRC32_TEST_VECTORS
2718 }
2719 }
2720 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002721 .alg = "crc32c",
Herbert Xu8e3ee852008-11-07 14:58:52 +08002722 .test = alg_test_crc32c,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002723 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002724 .suite = {
2725 .hash = {
2726 .vecs = crc32c_tv_template,
2727 .count = CRC32C_TEST_VECTORS
2728 }
2729 }
2730 }, {
Herbert Xu684115212013-09-07 12:56:26 +10002731 .alg = "crct10dif",
2732 .test = alg_test_hash,
2733 .fips_allowed = 1,
2734 .suite = {
2735 .hash = {
2736 .vecs = crct10dif_tv_template,
2737 .count = CRCT10DIF_TEST_VECTORS
2738 }
2739 }
2740 }, {
Jarod Wilsonf7cb80f2009-05-06 17:29:17 +08002741 .alg = "ctr(aes)",
2742 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10002743 .fips_allowed = 1,
Jarod Wilsonf7cb80f2009-05-06 17:29:17 +08002744 .suite = {
2745 .cipher = {
2746 .enc = {
2747 .vecs = aes_ctr_enc_tv_template,
2748 .count = AES_CTR_ENC_TEST_VECTORS
2749 },
2750 .dec = {
2751 .vecs = aes_ctr_dec_tv_template,
2752 .count = AES_CTR_DEC_TEST_VECTORS
2753 }
2754 }
2755 }
2756 }, {
Jussi Kivilinna85b63e32011-10-10 23:03:03 +03002757 .alg = "ctr(blowfish)",
2758 .test = alg_test_skcipher,
2759 .suite = {
2760 .cipher = {
2761 .enc = {
2762 .vecs = bf_ctr_enc_tv_template,
2763 .count = BF_CTR_ENC_TEST_VECTORS
2764 },
2765 .dec = {
2766 .vecs = bf_ctr_dec_tv_template,
2767 .count = BF_CTR_DEC_TEST_VECTORS
2768 }
2769 }
2770 }
2771 }, {
Jussi Kivilinna08406052012-03-05 20:26:21 +02002772 .alg = "ctr(camellia)",
2773 .test = alg_test_skcipher,
2774 .suite = {
2775 .cipher = {
2776 .enc = {
2777 .vecs = camellia_ctr_enc_tv_template,
2778 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2779 },
2780 .dec = {
2781 .vecs = camellia_ctr_dec_tv_template,
2782 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2783 }
2784 }
2785 }
2786 }, {
Johannes Goetzfrieda2c58262012-07-11 19:37:21 +02002787 .alg = "ctr(cast5)",
2788 .test = alg_test_skcipher,
2789 .suite = {
2790 .cipher = {
2791 .enc = {
2792 .vecs = cast5_ctr_enc_tv_template,
2793 .count = CAST5_CTR_ENC_TEST_VECTORS
2794 },
2795 .dec = {
2796 .vecs = cast5_ctr_dec_tv_template,
2797 .count = CAST5_CTR_DEC_TEST_VECTORS
2798 }
2799 }
2800 }
2801 }, {
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02002802 .alg = "ctr(cast6)",
2803 .test = alg_test_skcipher,
2804 .suite = {
2805 .cipher = {
2806 .enc = {
2807 .vecs = cast6_ctr_enc_tv_template,
2808 .count = CAST6_CTR_ENC_TEST_VECTORS
2809 },
2810 .dec = {
2811 .vecs = cast6_ctr_dec_tv_template,
2812 .count = CAST6_CTR_DEC_TEST_VECTORS
2813 }
2814 }
2815 }
2816 }, {
Jussi Kivilinna8163fc32012-10-20 14:53:07 +03002817 .alg = "ctr(des)",
2818 .test = alg_test_skcipher,
2819 .suite = {
2820 .cipher = {
2821 .enc = {
2822 .vecs = des_ctr_enc_tv_template,
2823 .count = DES_CTR_ENC_TEST_VECTORS
2824 },
2825 .dec = {
2826 .vecs = des_ctr_dec_tv_template,
2827 .count = DES_CTR_DEC_TEST_VECTORS
2828 }
2829 }
2830 }
2831 }, {
Jussi Kivilinnae080b172012-10-20 14:53:12 +03002832 .alg = "ctr(des3_ede)",
2833 .test = alg_test_skcipher,
2834 .suite = {
2835 .cipher = {
2836 .enc = {
2837 .vecs = des3_ede_ctr_enc_tv_template,
2838 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2839 },
2840 .dec = {
2841 .vecs = des3_ede_ctr_dec_tv_template,
2842 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2843 }
2844 }
2845 }
2846 }, {
Jussi Kivilinna9d259172011-10-18 00:02:53 +03002847 .alg = "ctr(serpent)",
2848 .test = alg_test_skcipher,
2849 .suite = {
2850 .cipher = {
2851 .enc = {
2852 .vecs = serpent_ctr_enc_tv_template,
2853 .count = SERPENT_CTR_ENC_TEST_VECTORS
2854 },
2855 .dec = {
2856 .vecs = serpent_ctr_dec_tv_template,
2857 .count = SERPENT_CTR_DEC_TEST_VECTORS
2858 }
2859 }
2860 }
2861 }, {
Jussi Kivilinna573da622011-10-10 23:03:12 +03002862 .alg = "ctr(twofish)",
2863 .test = alg_test_skcipher,
2864 .suite = {
2865 .cipher = {
2866 .enc = {
2867 .vecs = tf_ctr_enc_tv_template,
2868 .count = TF_CTR_ENC_TEST_VECTORS
2869 },
2870 .dec = {
2871 .vecs = tf_ctr_dec_tv_template,
2872 .count = TF_CTR_DEC_TEST_VECTORS
2873 }
2874 }
2875 }
2876 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08002877 .alg = "cts(cbc(aes))",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10002878 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08002879 .suite = {
2880 .cipher = {
2881 .enc = {
2882 .vecs = cts_mode_enc_tv_template,
2883 .count = CTS_MODE_ENC_TEST_VECTORS
2884 },
2885 .dec = {
2886 .vecs = cts_mode_dec_tv_template,
2887 .count = CTS_MODE_DEC_TEST_VECTORS
2888 }
2889 }
2890 }
2891 }, {
2892 .alg = "deflate",
2893 .test = alg_test_comp,
Milan Broz08189042012-12-06 17:16:28 +08002894 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08002895 .suite = {
2896 .comp = {
2897 .comp = {
2898 .vecs = deflate_comp_tv_template,
2899 .count = DEFLATE_COMP_TEST_VECTORS
2900 },
2901 .decomp = {
2902 .vecs = deflate_decomp_tv_template,
2903 .count = DEFLATE_DECOMP_TEST_VECTORS
2904 }
2905 }
2906 }
2907 }, {
Salvatore Benedetto802c7f12016-06-22 17:49:14 +01002908 .alg = "dh",
2909 .test = alg_test_kpp,
2910 .fips_allowed = 1,
2911 .suite = {
2912 .kpp = {
2913 .vecs = dh_tv_template,
2914 .count = DH_TEST_VECTORS
2915 }
2916 }
2917 }, {
Jussi Kivilinnae4483702013-04-07 16:43:56 +03002918 .alg = "digest_null",
2919 .test = alg_test_null,
2920 }, {
Stephan Mueller64d1cdf2014-05-31 17:25:36 +02002921 .alg = "drbg_nopr_ctr_aes128",
2922 .test = alg_test_drbg,
2923 .fips_allowed = 1,
2924 .suite = {
2925 .drbg = {
2926 .vecs = drbg_nopr_ctr_aes128_tv_template,
2927 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2928 }
2929 }
2930 }, {
2931 .alg = "drbg_nopr_ctr_aes192",
2932 .test = alg_test_drbg,
2933 .fips_allowed = 1,
2934 .suite = {
2935 .drbg = {
2936 .vecs = drbg_nopr_ctr_aes192_tv_template,
2937 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2938 }
2939 }
2940 }, {
2941 .alg = "drbg_nopr_ctr_aes256",
2942 .test = alg_test_drbg,
2943 .fips_allowed = 1,
2944 .suite = {
2945 .drbg = {
2946 .vecs = drbg_nopr_ctr_aes256_tv_template,
2947 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2948 }
2949 }
2950 }, {
2951 /*
2952 * There is no need to specifically test the DRBG with every
2953 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2954 */
2955 .alg = "drbg_nopr_hmac_sha1",
2956 .fips_allowed = 1,
2957 .test = alg_test_null,
2958 }, {
2959 .alg = "drbg_nopr_hmac_sha256",
2960 .test = alg_test_drbg,
2961 .fips_allowed = 1,
2962 .suite = {
2963 .drbg = {
2964 .vecs = drbg_nopr_hmac_sha256_tv_template,
2965 .count =
2966 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2967 }
2968 }
2969 }, {
2970 /* covered by drbg_nopr_hmac_sha256 test */
2971 .alg = "drbg_nopr_hmac_sha384",
2972 .fips_allowed = 1,
2973 .test = alg_test_null,
2974 }, {
2975 .alg = "drbg_nopr_hmac_sha512",
2976 .test = alg_test_null,
2977 .fips_allowed = 1,
2978 }, {
2979 .alg = "drbg_nopr_sha1",
2980 .fips_allowed = 1,
2981 .test = alg_test_null,
2982 }, {
2983 .alg = "drbg_nopr_sha256",
2984 .test = alg_test_drbg,
2985 .fips_allowed = 1,
2986 .suite = {
2987 .drbg = {
2988 .vecs = drbg_nopr_sha256_tv_template,
2989 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2990 }
2991 }
2992 }, {
2993 /* covered by drbg_nopr_sha256 test */
2994 .alg = "drbg_nopr_sha384",
2995 .fips_allowed = 1,
2996 .test = alg_test_null,
2997 }, {
2998 .alg = "drbg_nopr_sha512",
2999 .fips_allowed = 1,
3000 .test = alg_test_null,
3001 }, {
3002 .alg = "drbg_pr_ctr_aes128",
3003 .test = alg_test_drbg,
3004 .fips_allowed = 1,
3005 .suite = {
3006 .drbg = {
3007 .vecs = drbg_pr_ctr_aes128_tv_template,
3008 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
3009 }
3010 }
3011 }, {
3012 /* covered by drbg_pr_ctr_aes128 test */
3013 .alg = "drbg_pr_ctr_aes192",
3014 .fips_allowed = 1,
3015 .test = alg_test_null,
3016 }, {
3017 .alg = "drbg_pr_ctr_aes256",
3018 .fips_allowed = 1,
3019 .test = alg_test_null,
3020 }, {
3021 .alg = "drbg_pr_hmac_sha1",
3022 .fips_allowed = 1,
3023 .test = alg_test_null,
3024 }, {
3025 .alg = "drbg_pr_hmac_sha256",
3026 .test = alg_test_drbg,
3027 .fips_allowed = 1,
3028 .suite = {
3029 .drbg = {
3030 .vecs = drbg_pr_hmac_sha256_tv_template,
3031 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
3032 }
3033 }
3034 }, {
3035 /* covered by drbg_pr_hmac_sha256 test */
3036 .alg = "drbg_pr_hmac_sha384",
3037 .fips_allowed = 1,
3038 .test = alg_test_null,
3039 }, {
3040 .alg = "drbg_pr_hmac_sha512",
3041 .test = alg_test_null,
3042 .fips_allowed = 1,
3043 }, {
3044 .alg = "drbg_pr_sha1",
3045 .fips_allowed = 1,
3046 .test = alg_test_null,
3047 }, {
3048 .alg = "drbg_pr_sha256",
3049 .test = alg_test_drbg,
3050 .fips_allowed = 1,
3051 .suite = {
3052 .drbg = {
3053 .vecs = drbg_pr_sha256_tv_template,
3054 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
3055 }
3056 }
3057 }, {
3058 /* covered by drbg_pr_sha256 test */
3059 .alg = "drbg_pr_sha384",
3060 .fips_allowed = 1,
3061 .test = alg_test_null,
3062 }, {
3063 .alg = "drbg_pr_sha512",
3064 .fips_allowed = 1,
3065 .test = alg_test_null,
3066 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003067 .alg = "ecb(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003068 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003069 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003070 .suite = {
3071 .cipher = {
3072 .enc = {
3073 .vecs = aes_enc_tv_template,
3074 .count = AES_ENC_TEST_VECTORS
3075 },
3076 .dec = {
3077 .vecs = aes_dec_tv_template,
3078 .count = AES_DEC_TEST_VECTORS
3079 }
3080 }
3081 }
3082 }, {
3083 .alg = "ecb(anubis)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003084 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003085 .suite = {
3086 .cipher = {
3087 .enc = {
3088 .vecs = anubis_enc_tv_template,
3089 .count = ANUBIS_ENC_TEST_VECTORS
3090 },
3091 .dec = {
3092 .vecs = anubis_dec_tv_template,
3093 .count = ANUBIS_DEC_TEST_VECTORS
3094 }
3095 }
3096 }
3097 }, {
3098 .alg = "ecb(arc4)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003099 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003100 .suite = {
3101 .cipher = {
3102 .enc = {
3103 .vecs = arc4_enc_tv_template,
3104 .count = ARC4_ENC_TEST_VECTORS
3105 },
3106 .dec = {
3107 .vecs = arc4_dec_tv_template,
3108 .count = ARC4_DEC_TEST_VECTORS
3109 }
3110 }
3111 }
3112 }, {
3113 .alg = "ecb(blowfish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003114 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003115 .suite = {
3116 .cipher = {
3117 .enc = {
3118 .vecs = bf_enc_tv_template,
3119 .count = BF_ENC_TEST_VECTORS
3120 },
3121 .dec = {
3122 .vecs = bf_dec_tv_template,
3123 .count = BF_DEC_TEST_VECTORS
3124 }
3125 }
3126 }
3127 }, {
3128 .alg = "ecb(camellia)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003129 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003130 .suite = {
3131 .cipher = {
3132 .enc = {
3133 .vecs = camellia_enc_tv_template,
3134 .count = CAMELLIA_ENC_TEST_VECTORS
3135 },
3136 .dec = {
3137 .vecs = camellia_dec_tv_template,
3138 .count = CAMELLIA_DEC_TEST_VECTORS
3139 }
3140 }
3141 }
3142 }, {
3143 .alg = "ecb(cast5)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003144 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003145 .suite = {
3146 .cipher = {
3147 .enc = {
3148 .vecs = cast5_enc_tv_template,
3149 .count = CAST5_ENC_TEST_VECTORS
3150 },
3151 .dec = {
3152 .vecs = cast5_dec_tv_template,
3153 .count = CAST5_DEC_TEST_VECTORS
3154 }
3155 }
3156 }
3157 }, {
3158 .alg = "ecb(cast6)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003159 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003160 .suite = {
3161 .cipher = {
3162 .enc = {
3163 .vecs = cast6_enc_tv_template,
3164 .count = CAST6_ENC_TEST_VECTORS
3165 },
3166 .dec = {
3167 .vecs = cast6_dec_tv_template,
3168 .count = CAST6_DEC_TEST_VECTORS
3169 }
3170 }
3171 }
3172 }, {
Jussi Kivilinnae4483702013-04-07 16:43:56 +03003173 .alg = "ecb(cipher_null)",
3174 .test = alg_test_null,
3175 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003176 .alg = "ecb(des)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003177 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003178 .suite = {
3179 .cipher = {
3180 .enc = {
3181 .vecs = des_enc_tv_template,
3182 .count = DES_ENC_TEST_VECTORS
3183 },
3184 .dec = {
3185 .vecs = des_dec_tv_template,
3186 .count = DES_DEC_TEST_VECTORS
3187 }
3188 }
3189 }
3190 }, {
3191 .alg = "ecb(des3_ede)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003192 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003193 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003194 .suite = {
3195 .cipher = {
3196 .enc = {
3197 .vecs = des3_ede_enc_tv_template,
3198 .count = DES3_EDE_ENC_TEST_VECTORS
3199 },
3200 .dec = {
3201 .vecs = des3_ede_dec_tv_template,
3202 .count = DES3_EDE_DEC_TEST_VECTORS
3203 }
3204 }
3205 }
3206 }, {
Jussi Kivilinna66e5bd02013-01-19 13:31:36 +02003207 .alg = "ecb(fcrypt)",
3208 .test = alg_test_skcipher,
3209 .suite = {
3210 .cipher = {
3211 .enc = {
3212 .vecs = fcrypt_pcbc_enc_tv_template,
3213 .count = 1
3214 },
3215 .dec = {
3216 .vecs = fcrypt_pcbc_dec_tv_template,
3217 .count = 1
3218 }
3219 }
3220 }
3221 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003222 .alg = "ecb(khazad)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003223 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003224 .suite = {
3225 .cipher = {
3226 .enc = {
3227 .vecs = khazad_enc_tv_template,
3228 .count = KHAZAD_ENC_TEST_VECTORS
3229 },
3230 .dec = {
3231 .vecs = khazad_dec_tv_template,
3232 .count = KHAZAD_DEC_TEST_VECTORS
3233 }
3234 }
3235 }
3236 }, {
3237 .alg = "ecb(seed)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003238 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003239 .suite = {
3240 .cipher = {
3241 .enc = {
3242 .vecs = seed_enc_tv_template,
3243 .count = SEED_ENC_TEST_VECTORS
3244 },
3245 .dec = {
3246 .vecs = seed_dec_tv_template,
3247 .count = SEED_DEC_TEST_VECTORS
3248 }
3249 }
3250 }
3251 }, {
3252 .alg = "ecb(serpent)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003253 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003254 .suite = {
3255 .cipher = {
3256 .enc = {
3257 .vecs = serpent_enc_tv_template,
3258 .count = SERPENT_ENC_TEST_VECTORS
3259 },
3260 .dec = {
3261 .vecs = serpent_dec_tv_template,
3262 .count = SERPENT_DEC_TEST_VECTORS
3263 }
3264 }
3265 }
3266 }, {
3267 .alg = "ecb(tea)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003268 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003269 .suite = {
3270 .cipher = {
3271 .enc = {
3272 .vecs = tea_enc_tv_template,
3273 .count = TEA_ENC_TEST_VECTORS
3274 },
3275 .dec = {
3276 .vecs = tea_dec_tv_template,
3277 .count = TEA_DEC_TEST_VECTORS
3278 }
3279 }
3280 }
3281 }, {
3282 .alg = "ecb(tnepres)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003283 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003284 .suite = {
3285 .cipher = {
3286 .enc = {
3287 .vecs = tnepres_enc_tv_template,
3288 .count = TNEPRES_ENC_TEST_VECTORS
3289 },
3290 .dec = {
3291 .vecs = tnepres_dec_tv_template,
3292 .count = TNEPRES_DEC_TEST_VECTORS
3293 }
3294 }
3295 }
3296 }, {
3297 .alg = "ecb(twofish)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003298 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003299 .suite = {
3300 .cipher = {
3301 .enc = {
3302 .vecs = tf_enc_tv_template,
3303 .count = TF_ENC_TEST_VECTORS
3304 },
3305 .dec = {
3306 .vecs = tf_dec_tv_template,
3307 .count = TF_DEC_TEST_VECTORS
3308 }
3309 }
3310 }
3311 }, {
3312 .alg = "ecb(xeta)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003313 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003314 .suite = {
3315 .cipher = {
3316 .enc = {
3317 .vecs = xeta_enc_tv_template,
3318 .count = XETA_ENC_TEST_VECTORS
3319 },
3320 .dec = {
3321 .vecs = xeta_dec_tv_template,
3322 .count = XETA_DEC_TEST_VECTORS
3323 }
3324 }
3325 }
3326 }, {
3327 .alg = "ecb(xtea)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003328 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003329 .suite = {
3330 .cipher = {
3331 .enc = {
3332 .vecs = xtea_enc_tv_template,
3333 .count = XTEA_ENC_TEST_VECTORS
3334 },
3335 .dec = {
3336 .vecs = xtea_dec_tv_template,
3337 .count = XTEA_DEC_TEST_VECTORS
3338 }
3339 }
3340 }
3341 }, {
Salvatore Benedetto3c4b2392016-06-22 17:49:15 +01003342 .alg = "ecdh",
3343 .test = alg_test_kpp,
3344 .fips_allowed = 1,
3345 .suite = {
3346 .kpp = {
3347 .vecs = ecdh_tv_template,
3348 .count = ECDH_TEST_VECTORS
3349 }
3350 }
3351 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003352 .alg = "gcm(aes)",
3353 .test = alg_test_aead,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003354 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003355 .suite = {
3356 .aead = {
3357 .enc = {
3358 .vecs = aes_gcm_enc_tv_template,
3359 .count = AES_GCM_ENC_TEST_VECTORS
3360 },
3361 .dec = {
3362 .vecs = aes_gcm_dec_tv_template,
3363 .count = AES_GCM_DEC_TEST_VECTORS
3364 }
3365 }
3366 }
3367 }, {
Youquan, Song507069c2009-11-23 20:23:04 +08003368 .alg = "ghash",
3369 .test = alg_test_hash,
Jarod Wilson18c0ebd2011-01-29 15:14:35 +11003370 .fips_allowed = 1,
Youquan, Song507069c2009-11-23 20:23:04 +08003371 .suite = {
3372 .hash = {
3373 .vecs = ghash_tv_template,
3374 .count = GHASH_TEST_VECTORS
3375 }
3376 }
3377 }, {
Sonic Zhanga482b082012-05-25 17:54:13 +08003378 .alg = "hmac(crc32)",
3379 .test = alg_test_hash,
3380 .suite = {
3381 .hash = {
3382 .vecs = bfin_crc_tv_template,
3383 .count = BFIN_CRC_TEST_VECTORS
3384 }
3385 }
3386 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003387 .alg = "hmac(md5)",
3388 .test = alg_test_hash,
3389 .suite = {
3390 .hash = {
3391 .vecs = hmac_md5_tv_template,
3392 .count = HMAC_MD5_TEST_VECTORS
3393 }
3394 }
3395 }, {
3396 .alg = "hmac(rmd128)",
3397 .test = alg_test_hash,
3398 .suite = {
3399 .hash = {
3400 .vecs = hmac_rmd128_tv_template,
3401 .count = HMAC_RMD128_TEST_VECTORS
3402 }
3403 }
3404 }, {
3405 .alg = "hmac(rmd160)",
3406 .test = alg_test_hash,
3407 .suite = {
3408 .hash = {
3409 .vecs = hmac_rmd160_tv_template,
3410 .count = HMAC_RMD160_TEST_VECTORS
3411 }
3412 }
3413 }, {
3414 .alg = "hmac(sha1)",
3415 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003416 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003417 .suite = {
3418 .hash = {
3419 .vecs = hmac_sha1_tv_template,
3420 .count = HMAC_SHA1_TEST_VECTORS
3421 }
3422 }
3423 }, {
3424 .alg = "hmac(sha224)",
3425 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003426 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003427 .suite = {
3428 .hash = {
3429 .vecs = hmac_sha224_tv_template,
3430 .count = HMAC_SHA224_TEST_VECTORS
3431 }
3432 }
3433 }, {
3434 .alg = "hmac(sha256)",
3435 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003436 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003437 .suite = {
3438 .hash = {
3439 .vecs = hmac_sha256_tv_template,
3440 .count = HMAC_SHA256_TEST_VECTORS
3441 }
3442 }
3443 }, {
raveendra padasalagi98eca722016-07-01 11:16:54 +05303444 .alg = "hmac(sha3-224)",
3445 .test = alg_test_hash,
3446 .fips_allowed = 1,
3447 .suite = {
3448 .hash = {
3449 .vecs = hmac_sha3_224_tv_template,
3450 .count = HMAC_SHA3_224_TEST_VECTORS
3451 }
3452 }
3453 }, {
3454 .alg = "hmac(sha3-256)",
3455 .test = alg_test_hash,
3456 .fips_allowed = 1,
3457 .suite = {
3458 .hash = {
3459 .vecs = hmac_sha3_256_tv_template,
3460 .count = HMAC_SHA3_256_TEST_VECTORS
3461 }
3462 }
3463 }, {
3464 .alg = "hmac(sha3-384)",
3465 .test = alg_test_hash,
3466 .fips_allowed = 1,
3467 .suite = {
3468 .hash = {
3469 .vecs = hmac_sha3_384_tv_template,
3470 .count = HMAC_SHA3_384_TEST_VECTORS
3471 }
3472 }
3473 }, {
3474 .alg = "hmac(sha3-512)",
3475 .test = alg_test_hash,
3476 .fips_allowed = 1,
3477 .suite = {
3478 .hash = {
3479 .vecs = hmac_sha3_512_tv_template,
3480 .count = HMAC_SHA3_512_TEST_VECTORS
3481 }
3482 }
3483 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003484 .alg = "hmac(sha384)",
3485 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003486 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003487 .suite = {
3488 .hash = {
3489 .vecs = hmac_sha384_tv_template,
3490 .count = HMAC_SHA384_TEST_VECTORS
3491 }
3492 }
3493 }, {
3494 .alg = "hmac(sha512)",
3495 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003496 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003497 .suite = {
3498 .hash = {
3499 .vecs = hmac_sha512_tv_template,
3500 .count = HMAC_SHA512_TEST_VECTORS
3501 }
3502 }
3503 }, {
Stephan Muellerbb5530e2015-05-25 15:10:20 +02003504 .alg = "jitterentropy_rng",
3505 .fips_allowed = 1,
3506 .test = alg_test_null,
3507 }, {
Stephan Mueller35351982015-09-21 20:59:56 +02003508 .alg = "kw(aes)",
3509 .test = alg_test_skcipher,
3510 .fips_allowed = 1,
3511 .suite = {
3512 .cipher = {
3513 .enc = {
3514 .vecs = aes_kw_enc_tv_template,
3515 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3516 },
3517 .dec = {
3518 .vecs = aes_kw_dec_tv_template,
3519 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3520 }
3521 }
3522 }
3523 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003524 .alg = "lrw(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003525 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003526 .suite = {
3527 .cipher = {
3528 .enc = {
3529 .vecs = aes_lrw_enc_tv_template,
3530 .count = AES_LRW_ENC_TEST_VECTORS
3531 },
3532 .dec = {
3533 .vecs = aes_lrw_dec_tv_template,
3534 .count = AES_LRW_DEC_TEST_VECTORS
3535 }
3536 }
3537 }
3538 }, {
Jussi Kivilinna08406052012-03-05 20:26:21 +02003539 .alg = "lrw(camellia)",
3540 .test = alg_test_skcipher,
3541 .suite = {
3542 .cipher = {
3543 .enc = {
3544 .vecs = camellia_lrw_enc_tv_template,
3545 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3546 },
3547 .dec = {
3548 .vecs = camellia_lrw_dec_tv_template,
3549 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3550 }
3551 }
3552 }
3553 }, {
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02003554 .alg = "lrw(cast6)",
3555 .test = alg_test_skcipher,
3556 .suite = {
3557 .cipher = {
3558 .enc = {
3559 .vecs = cast6_lrw_enc_tv_template,
3560 .count = CAST6_LRW_ENC_TEST_VECTORS
3561 },
3562 .dec = {
3563 .vecs = cast6_lrw_dec_tv_template,
3564 .count = CAST6_LRW_DEC_TEST_VECTORS
3565 }
3566 }
3567 }
3568 }, {
Jussi Kivilinnad7bfc0f2011-10-18 13:32:34 +03003569 .alg = "lrw(serpent)",
3570 .test = alg_test_skcipher,
3571 .suite = {
3572 .cipher = {
3573 .enc = {
3574 .vecs = serpent_lrw_enc_tv_template,
3575 .count = SERPENT_LRW_ENC_TEST_VECTORS
3576 },
3577 .dec = {
3578 .vecs = serpent_lrw_dec_tv_template,
3579 .count = SERPENT_LRW_DEC_TEST_VECTORS
3580 }
3581 }
3582 }
3583 }, {
Jussi Kivilinna0b2a1552011-10-18 13:32:50 +03003584 .alg = "lrw(twofish)",
3585 .test = alg_test_skcipher,
3586 .suite = {
3587 .cipher = {
3588 .enc = {
3589 .vecs = tf_lrw_enc_tv_template,
3590 .count = TF_LRW_ENC_TEST_VECTORS
3591 },
3592 .dec = {
3593 .vecs = tf_lrw_dec_tv_template,
3594 .count = TF_LRW_DEC_TEST_VECTORS
3595 }
3596 }
3597 }
3598 }, {
KOVACS Krisztian1443cc92014-08-22 10:44:36 +02003599 .alg = "lz4",
3600 .test = alg_test_comp,
3601 .fips_allowed = 1,
3602 .suite = {
3603 .comp = {
3604 .comp = {
3605 .vecs = lz4_comp_tv_template,
3606 .count = LZ4_COMP_TEST_VECTORS
3607 },
3608 .decomp = {
3609 .vecs = lz4_decomp_tv_template,
3610 .count = LZ4_DECOMP_TEST_VECTORS
3611 }
3612 }
3613 }
3614 }, {
3615 .alg = "lz4hc",
3616 .test = alg_test_comp,
3617 .fips_allowed = 1,
3618 .suite = {
3619 .comp = {
3620 .comp = {
3621 .vecs = lz4hc_comp_tv_template,
3622 .count = LZ4HC_COMP_TEST_VECTORS
3623 },
3624 .decomp = {
3625 .vecs = lz4hc_decomp_tv_template,
3626 .count = LZ4HC_DECOMP_TEST_VECTORS
3627 }
3628 }
3629 }
3630 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003631 .alg = "lzo",
3632 .test = alg_test_comp,
Milan Broz08189042012-12-06 17:16:28 +08003633 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003634 .suite = {
3635 .comp = {
3636 .comp = {
3637 .vecs = lzo_comp_tv_template,
3638 .count = LZO_COMP_TEST_VECTORS
3639 },
3640 .decomp = {
3641 .vecs = lzo_decomp_tv_template,
3642 .count = LZO_DECOMP_TEST_VECTORS
3643 }
3644 }
3645 }
3646 }, {
3647 .alg = "md4",
3648 .test = alg_test_hash,
3649 .suite = {
3650 .hash = {
3651 .vecs = md4_tv_template,
3652 .count = MD4_TEST_VECTORS
3653 }
3654 }
3655 }, {
3656 .alg = "md5",
3657 .test = alg_test_hash,
3658 .suite = {
3659 .hash = {
3660 .vecs = md5_tv_template,
3661 .count = MD5_TEST_VECTORS
3662 }
3663 }
3664 }, {
3665 .alg = "michael_mic",
3666 .test = alg_test_hash,
3667 .suite = {
3668 .hash = {
3669 .vecs = michael_mic_tv_template,
3670 .count = MICHAEL_MIC_TEST_VECTORS
3671 }
3672 }
3673 }, {
Puneet Saxenaba0e14a2011-05-04 15:04:10 +10003674 .alg = "ofb(aes)",
3675 .test = alg_test_skcipher,
3676 .fips_allowed = 1,
3677 .suite = {
3678 .cipher = {
3679 .enc = {
3680 .vecs = aes_ofb_enc_tv_template,
3681 .count = AES_OFB_ENC_TEST_VECTORS
3682 },
3683 .dec = {
3684 .vecs = aes_ofb_dec_tv_template,
3685 .count = AES_OFB_DEC_TEST_VECTORS
3686 }
3687 }
3688 }
3689 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003690 .alg = "pcbc(fcrypt)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003691 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003692 .suite = {
3693 .cipher = {
3694 .enc = {
3695 .vecs = fcrypt_pcbc_enc_tv_template,
3696 .count = FCRYPT_ENC_TEST_VECTORS
3697 },
3698 .dec = {
3699 .vecs = fcrypt_pcbc_dec_tv_template,
3700 .count = FCRYPT_DEC_TEST_VECTORS
3701 }
3702 }
3703 }
3704 }, {
Martin Willieee9dc62015-06-01 13:43:59 +02003705 .alg = "poly1305",
3706 .test = alg_test_hash,
3707 .suite = {
3708 .hash = {
3709 .vecs = poly1305_tv_template,
3710 .count = POLY1305_TEST_VECTORS
3711 }
3712 }
3713 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003714 .alg = "rfc3686(ctr(aes))",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003715 .test = alg_test_skcipher,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003716 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003717 .suite = {
3718 .cipher = {
3719 .enc = {
Jarod Wilsonf7cb80f2009-05-06 17:29:17 +08003720 .vecs = aes_ctr_rfc3686_enc_tv_template,
3721 .count = AES_CTR_3686_ENC_TEST_VECTORS
Herbert Xuda7f0332008-07-31 17:08:25 +08003722 },
3723 .dec = {
Jarod Wilsonf7cb80f2009-05-06 17:29:17 +08003724 .vecs = aes_ctr_rfc3686_dec_tv_template,
3725 .count = AES_CTR_3686_DEC_TEST_VECTORS
Herbert Xuda7f0332008-07-31 17:08:25 +08003726 }
3727 }
3728 }
3729 }, {
Herbert Xu3f31a742015-07-09 07:17:34 +08003730 .alg = "rfc4106(gcm(aes))",
Adrian Hoban69435b92010-11-04 15:02:04 -04003731 .test = alg_test_aead,
Jarod Wilsondb71f29a2015-01-23 12:42:15 -05003732 .fips_allowed = 1,
Adrian Hoban69435b92010-11-04 15:02:04 -04003733 .suite = {
3734 .aead = {
3735 .enc = {
3736 .vecs = aes_gcm_rfc4106_enc_tv_template,
3737 .count = AES_GCM_4106_ENC_TEST_VECTORS
3738 },
3739 .dec = {
3740 .vecs = aes_gcm_rfc4106_dec_tv_template,
3741 .count = AES_GCM_4106_DEC_TEST_VECTORS
3742 }
3743 }
3744 }
3745 }, {
Herbert Xu544c4362015-07-14 16:53:22 +08003746 .alg = "rfc4309(ccm(aes))",
Jarod Wilson5d667322009-05-04 19:23:40 +08003747 .test = alg_test_aead,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003748 .fips_allowed = 1,
Jarod Wilson5d667322009-05-04 19:23:40 +08003749 .suite = {
3750 .aead = {
3751 .enc = {
3752 .vecs = aes_ccm_rfc4309_enc_tv_template,
3753 .count = AES_CCM_4309_ENC_TEST_VECTORS
3754 },
3755 .dec = {
3756 .vecs = aes_ccm_rfc4309_dec_tv_template,
3757 .count = AES_CCM_4309_DEC_TEST_VECTORS
3758 }
3759 }
3760 }
3761 }, {
Herbert Xubb687452015-06-16 13:54:24 +08003762 .alg = "rfc4543(gcm(aes))",
Jussi Kivilinnae9b74412013-04-07 16:43:51 +03003763 .test = alg_test_aead,
3764 .suite = {
3765 .aead = {
3766 .enc = {
3767 .vecs = aes_gcm_rfc4543_enc_tv_template,
3768 .count = AES_GCM_4543_ENC_TEST_VECTORS
3769 },
3770 .dec = {
3771 .vecs = aes_gcm_rfc4543_dec_tv_template,
3772 .count = AES_GCM_4543_DEC_TEST_VECTORS
3773 },
3774 }
3775 }
3776 }, {
Martin Williaf2b76b2015-06-01 13:44:01 +02003777 .alg = "rfc7539(chacha20,poly1305)",
3778 .test = alg_test_aead,
3779 .suite = {
3780 .aead = {
3781 .enc = {
3782 .vecs = rfc7539_enc_tv_template,
3783 .count = RFC7539_ENC_TEST_VECTORS
3784 },
3785 .dec = {
3786 .vecs = rfc7539_dec_tv_template,
3787 .count = RFC7539_DEC_TEST_VECTORS
3788 },
3789 }
3790 }
3791 }, {
Martin Willi59007582015-06-01 13:44:03 +02003792 .alg = "rfc7539esp(chacha20,poly1305)",
3793 .test = alg_test_aead,
3794 .suite = {
3795 .aead = {
3796 .enc = {
3797 .vecs = rfc7539esp_enc_tv_template,
3798 .count = RFC7539ESP_ENC_TEST_VECTORS
3799 },
3800 .dec = {
3801 .vecs = rfc7539esp_dec_tv_template,
3802 .count = RFC7539ESP_DEC_TEST_VECTORS
3803 },
3804 }
3805 }
3806 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003807 .alg = "rmd128",
3808 .test = alg_test_hash,
3809 .suite = {
3810 .hash = {
3811 .vecs = rmd128_tv_template,
3812 .count = RMD128_TEST_VECTORS
3813 }
3814 }
3815 }, {
3816 .alg = "rmd160",
3817 .test = alg_test_hash,
3818 .suite = {
3819 .hash = {
3820 .vecs = rmd160_tv_template,
3821 .count = RMD160_TEST_VECTORS
3822 }
3823 }
3824 }, {
3825 .alg = "rmd256",
3826 .test = alg_test_hash,
3827 .suite = {
3828 .hash = {
3829 .vecs = rmd256_tv_template,
3830 .count = RMD256_TEST_VECTORS
3831 }
3832 }
3833 }, {
3834 .alg = "rmd320",
3835 .test = alg_test_hash,
3836 .suite = {
3837 .hash = {
3838 .vecs = rmd320_tv_template,
3839 .count = RMD320_TEST_VECTORS
3840 }
3841 }
3842 }, {
Tadeusz Struk946cc462015-06-16 10:31:06 -07003843 .alg = "rsa",
3844 .test = alg_test_akcipher,
3845 .fips_allowed = 1,
3846 .suite = {
3847 .akcipher = {
3848 .vecs = rsa_tv_template,
3849 .count = RSA_TEST_VECTORS
3850 }
3851 }
3852 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003853 .alg = "salsa20",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10003854 .test = alg_test_skcipher,
Herbert Xuda7f0332008-07-31 17:08:25 +08003855 .suite = {
3856 .cipher = {
3857 .enc = {
3858 .vecs = salsa20_stream_enc_tv_template,
3859 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3860 }
3861 }
3862 }
3863 }, {
3864 .alg = "sha1",
3865 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003866 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003867 .suite = {
3868 .hash = {
3869 .vecs = sha1_tv_template,
3870 .count = SHA1_TEST_VECTORS
3871 }
3872 }
3873 }, {
3874 .alg = "sha224",
3875 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003876 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003877 .suite = {
3878 .hash = {
3879 .vecs = sha224_tv_template,
3880 .count = SHA224_TEST_VECTORS
3881 }
3882 }
3883 }, {
3884 .alg = "sha256",
3885 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003886 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003887 .suite = {
3888 .hash = {
3889 .vecs = sha256_tv_template,
3890 .count = SHA256_TEST_VECTORS
3891 }
3892 }
3893 }, {
raveendra padasalagi79cc6ab2016-06-17 10:30:36 +05303894 .alg = "sha3-224",
3895 .test = alg_test_hash,
3896 .fips_allowed = 1,
3897 .suite = {
3898 .hash = {
3899 .vecs = sha3_224_tv_template,
3900 .count = SHA3_224_TEST_VECTORS
3901 }
3902 }
3903 }, {
3904 .alg = "sha3-256",
3905 .test = alg_test_hash,
3906 .fips_allowed = 1,
3907 .suite = {
3908 .hash = {
3909 .vecs = sha3_256_tv_template,
3910 .count = SHA3_256_TEST_VECTORS
3911 }
3912 }
3913 }, {
3914 .alg = "sha3-384",
3915 .test = alg_test_hash,
3916 .fips_allowed = 1,
3917 .suite = {
3918 .hash = {
3919 .vecs = sha3_384_tv_template,
3920 .count = SHA3_384_TEST_VECTORS
3921 }
3922 }
3923 }, {
3924 .alg = "sha3-512",
3925 .test = alg_test_hash,
3926 .fips_allowed = 1,
3927 .suite = {
3928 .hash = {
3929 .vecs = sha3_512_tv_template,
3930 .count = SHA3_512_TEST_VECTORS
3931 }
3932 }
3933 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003934 .alg = "sha384",
3935 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003936 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003937 .suite = {
3938 .hash = {
3939 .vecs = sha384_tv_template,
3940 .count = SHA384_TEST_VECTORS
3941 }
3942 }
3943 }, {
3944 .alg = "sha512",
3945 .test = alg_test_hash,
Jarod Wilsona1915d52009-05-15 15:16:03 +10003946 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08003947 .suite = {
3948 .hash = {
3949 .vecs = sha512_tv_template,
3950 .count = SHA512_TEST_VECTORS
3951 }
3952 }
3953 }, {
3954 .alg = "tgr128",
3955 .test = alg_test_hash,
3956 .suite = {
3957 .hash = {
3958 .vecs = tgr128_tv_template,
3959 .count = TGR128_TEST_VECTORS
3960 }
3961 }
3962 }, {
3963 .alg = "tgr160",
3964 .test = alg_test_hash,
3965 .suite = {
3966 .hash = {
3967 .vecs = tgr160_tv_template,
3968 .count = TGR160_TEST_VECTORS
3969 }
3970 }
3971 }, {
3972 .alg = "tgr192",
3973 .test = alg_test_hash,
3974 .suite = {
3975 .hash = {
3976 .vecs = tgr192_tv_template,
3977 .count = TGR192_TEST_VECTORS
3978 }
3979 }
3980 }, {
Shane Wangf1939f72009-09-02 20:05:22 +10003981 .alg = "vmac(aes)",
3982 .test = alg_test_hash,
3983 .suite = {
3984 .hash = {
3985 .vecs = aes_vmac128_tv_template,
3986 .count = VMAC_AES_TEST_VECTORS
3987 }
3988 }
3989 }, {
Herbert Xuda7f0332008-07-31 17:08:25 +08003990 .alg = "wp256",
3991 .test = alg_test_hash,
3992 .suite = {
3993 .hash = {
3994 .vecs = wp256_tv_template,
3995 .count = WP256_TEST_VECTORS
3996 }
3997 }
3998 }, {
3999 .alg = "wp384",
4000 .test = alg_test_hash,
4001 .suite = {
4002 .hash = {
4003 .vecs = wp384_tv_template,
4004 .count = WP384_TEST_VECTORS
4005 }
4006 }
4007 }, {
4008 .alg = "wp512",
4009 .test = alg_test_hash,
4010 .suite = {
4011 .hash = {
4012 .vecs = wp512_tv_template,
4013 .count = WP512_TEST_VECTORS
4014 }
4015 }
4016 }, {
4017 .alg = "xcbc(aes)",
4018 .test = alg_test_hash,
4019 .suite = {
4020 .hash = {
4021 .vecs = aes_xcbc128_tv_template,
4022 .count = XCBC_AES_TEST_VECTORS
4023 }
4024 }
4025 }, {
4026 .alg = "xts(aes)",
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004027 .test = alg_test_skcipher,
Jarod Wilson2918aa82011-01-29 15:14:01 +11004028 .fips_allowed = 1,
Herbert Xuda7f0332008-07-31 17:08:25 +08004029 .suite = {
4030 .cipher = {
4031 .enc = {
4032 .vecs = aes_xts_enc_tv_template,
4033 .count = AES_XTS_ENC_TEST_VECTORS
4034 },
4035 .dec = {
4036 .vecs = aes_xts_dec_tv_template,
4037 .count = AES_XTS_DEC_TEST_VECTORS
4038 }
4039 }
4040 }
Geert Uytterhoeven0c01aed2009-03-04 15:42:15 +08004041 }, {
Jussi Kivilinna08406052012-03-05 20:26:21 +02004042 .alg = "xts(camellia)",
4043 .test = alg_test_skcipher,
4044 .suite = {
4045 .cipher = {
4046 .enc = {
4047 .vecs = camellia_xts_enc_tv_template,
4048 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
4049 },
4050 .dec = {
4051 .vecs = camellia_xts_dec_tv_template,
4052 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
4053 }
4054 }
4055 }
4056 }, {
Johannes Goetzfried9b8b0402012-07-11 19:38:29 +02004057 .alg = "xts(cast6)",
4058 .test = alg_test_skcipher,
4059 .suite = {
4060 .cipher = {
4061 .enc = {
4062 .vecs = cast6_xts_enc_tv_template,
4063 .count = CAST6_XTS_ENC_TEST_VECTORS
4064 },
4065 .dec = {
4066 .vecs = cast6_xts_dec_tv_template,
4067 .count = CAST6_XTS_DEC_TEST_VECTORS
4068 }
4069 }
4070 }
4071 }, {
Jussi Kivilinna18be20b92011-10-18 13:33:17 +03004072 .alg = "xts(serpent)",
4073 .test = alg_test_skcipher,
4074 .suite = {
4075 .cipher = {
4076 .enc = {
4077 .vecs = serpent_xts_enc_tv_template,
4078 .count = SERPENT_XTS_ENC_TEST_VECTORS
4079 },
4080 .dec = {
4081 .vecs = serpent_xts_dec_tv_template,
4082 .count = SERPENT_XTS_DEC_TEST_VECTORS
4083 }
4084 }
4085 }
4086 }, {
Jussi Kivilinnaaed265b2011-10-18 13:33:33 +03004087 .alg = "xts(twofish)",
4088 .test = alg_test_skcipher,
4089 .suite = {
4090 .cipher = {
4091 .enc = {
4092 .vecs = tf_xts_enc_tv_template,
4093 .count = TF_XTS_ENC_TEST_VECTORS
4094 },
4095 .dec = {
4096 .vecs = tf_xts_dec_tv_template,
4097 .count = TF_XTS_DEC_TEST_VECTORS
4098 }
4099 }
4100 }
Herbert Xuda7f0332008-07-31 17:08:25 +08004101 }
4102};
4103
Jussi Kivilinna57147582013-06-13 17:37:40 +03004104static bool alg_test_descs_checked;
4105
4106static void alg_test_descs_check_order(void)
4107{
4108 int i;
4109
4110 /* only check once */
4111 if (alg_test_descs_checked)
4112 return;
4113
4114 alg_test_descs_checked = true;
4115
4116 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
4117 int diff = strcmp(alg_test_descs[i - 1].alg,
4118 alg_test_descs[i].alg);
4119
4120 if (WARN_ON(diff > 0)) {
4121 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
4122 alg_test_descs[i - 1].alg,
4123 alg_test_descs[i].alg);
4124 }
4125
4126 if (WARN_ON(diff == 0)) {
4127 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
4128 alg_test_descs[i].alg);
4129 }
4130 }
4131}
4132
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004133static int alg_find_test(const char *alg)
Herbert Xuda7f0332008-07-31 17:08:25 +08004134{
4135 int start = 0;
4136 int end = ARRAY_SIZE(alg_test_descs);
4137
4138 while (start < end) {
4139 int i = (start + end) / 2;
4140 int diff = strcmp(alg_test_descs[i].alg, alg);
4141
4142 if (diff > 0) {
4143 end = i;
4144 continue;
4145 }
4146
4147 if (diff < 0) {
4148 start = i + 1;
4149 continue;
4150 }
4151
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004152 return i;
Herbert Xuda7f0332008-07-31 17:08:25 +08004153 }
4154
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004155 return -1;
4156}
4157
4158int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
4159{
4160 int i;
Herbert Xua68f6612009-07-02 16:32:12 +08004161 int j;
Neil Hormand12d6b62008-10-12 20:36:51 +08004162 int rc;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004163
Richard W.M. Jones9e5c9fe2016-05-03 10:00:17 +01004164 if (!fips_enabled && notests) {
4165 printk_once(KERN_INFO "alg: self-tests disabled\n");
4166 return 0;
4167 }
4168
Jussi Kivilinna57147582013-06-13 17:37:40 +03004169 alg_test_descs_check_order();
4170
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004171 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
4172 char nalg[CRYPTO_MAX_ALG_NAME];
4173
4174 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
4175 sizeof(nalg))
4176 return -ENAMETOOLONG;
4177
4178 i = alg_find_test(nalg);
4179 if (i < 0)
4180 goto notest;
4181
Jarod Wilsona3bef3a2009-05-15 15:17:05 +10004182 if (fips_enabled && !alg_test_descs[i].fips_allowed)
4183 goto non_fips_alg;
4184
Jarod Wilson941fb322009-05-04 19:49:23 +08004185 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
4186 goto test_done;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004187 }
4188
4189 i = alg_find_test(alg);
Herbert Xua68f6612009-07-02 16:32:12 +08004190 j = alg_find_test(driver);
4191 if (i < 0 && j < 0)
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004192 goto notest;
4193
Herbert Xua68f6612009-07-02 16:32:12 +08004194 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
4195 (j >= 0 && !alg_test_descs[j].fips_allowed)))
Jarod Wilsona3bef3a2009-05-15 15:17:05 +10004196 goto non_fips_alg;
4197
Herbert Xua68f6612009-07-02 16:32:12 +08004198 rc = 0;
4199 if (i >= 0)
4200 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
4201 type, mask);
Cristian Stoica032c8ca2013-07-18 18:57:07 +03004202 if (j >= 0 && j != i)
Herbert Xua68f6612009-07-02 16:32:12 +08004203 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
4204 type, mask);
4205
Jarod Wilson941fb322009-05-04 19:49:23 +08004206test_done:
Neil Hormand12d6b62008-10-12 20:36:51 +08004207 if (fips_enabled && rc)
4208 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
4209
Jarod Wilson29ecd4a2009-05-04 19:51:17 +08004210 if (fips_enabled && !rc)
Masanari Iida3e8cffd2014-10-07 00:37:54 +09004211 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
Jarod Wilson29ecd4a2009-05-04 19:51:17 +08004212
Neil Hormand12d6b62008-10-12 20:36:51 +08004213 return rc;
Herbert Xu1aa4ecd2008-08-17 17:01:56 +10004214
4215notest:
Herbert Xuda7f0332008-07-31 17:08:25 +08004216 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4217 return 0;
Jarod Wilsona3bef3a2009-05-15 15:17:05 +10004218non_fips_alg:
4219 return -EINVAL;
Herbert Xuda7f0332008-07-31 17:08:25 +08004220}
Alexander Shishkin0b767f92010-06-03 20:53:43 +10004221
Herbert Xu326a6342010-08-06 09:40:28 +08004222#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
Alexander Shishkin0b767f92010-06-03 20:53:43 +10004223
Herbert Xuda7f0332008-07-31 17:08:25 +08004224EXPORT_SYMBOL_GPL(alg_test);