blob: a366cb3e8aa1840760f86c5767a906f9a88f6387 [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Herbert Xucce9e062006-08-21 21:08:13 +10002/*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
Herbert Xucce9e062006-08-21 21:08:13 +10006 */
7
Salvatore Mesoraca13c935b2018-04-09 15:54:46 +02008#include <crypto/algapi.h>
Herbert Xu6bfd4802006-09-21 11:39:29 +10009#include <linux/err.h>
Herbert Xucce9e062006-08-21 21:08:13 +100010#include <linux/errno.h>
Herbert Xu3133d762015-04-22 13:25:53 +080011#include <linux/fips.h>
Herbert Xucce9e062006-08-21 21:08:13 +100012#include <linux/init.h>
13#include <linux/kernel.h>
Herbert Xu4cc77202006-08-06 21:16:34 +100014#include <linux/list.h>
Herbert Xucce9e062006-08-21 21:08:13 +100015#include <linux/module.h>
Herbert Xu7fed0bf2006-08-06 23:10:45 +100016#include <linux/rtnetlink.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090017#include <linux/slab.h>
Herbert Xucce9e062006-08-21 21:08:13 +100018#include <linux/string.h>
19
20#include "internal.h"
21
Herbert Xu4cc77202006-08-06 21:16:34 +100022static LIST_HEAD(crypto_template_list);
23
Jarod Wilson002c77a2014-07-02 15:37:30 -040024static inline void crypto_check_module_sig(struct module *mod)
25{
Herbert Xu59afdc72015-04-22 11:28:46 +080026 if (fips_enabled && mod && !module_sig_ok(mod))
Jarod Wilson002c77a2014-07-02 15:37:30 -040027 panic("Module %s signature verification failed in FIPS mode\n",
Herbert Xubd4a7c62015-04-23 14:48:05 +080028 module_name(mod));
Jarod Wilson002c77a2014-07-02 15:37:30 -040029}
30
Herbert Xu4cc77202006-08-06 21:16:34 +100031static int crypto_check_alg(struct crypto_alg *alg)
Herbert Xucce9e062006-08-21 21:08:13 +100032{
Jarod Wilson002c77a2014-07-02 15:37:30 -040033 crypto_check_module_sig(alg->cra_module);
34
Eric Biggers177f87d2019-06-02 22:40:58 -070035 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 return -EINVAL;
37
Herbert Xucce9e062006-08-21 21:08:13 +100038 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 return -EINVAL;
40
Kees Cooka9f7f882018-08-07 14:18:40 -070041 /* General maximums for all algs. */
42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
Herbert Xucce9e062006-08-21 21:08:13 +100043 return -EINVAL;
44
Kees Cooka9f7f882018-08-07 14:18:40 -070045 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 return -EINVAL;
47
48 /* Lower maximums for specific alg types. */
Salvatore Mesoraca13c935b2018-04-09 15:54:46 +020049 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 CRYPTO_ALG_TYPE_CIPHER) {
51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 return -EINVAL;
53
54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 return -EINVAL;
56 }
57
Herbert Xucce9e062006-08-21 21:08:13 +100058 if (alg->cra_priority < 0)
59 return -EINVAL;
Herbert Xucce9e062006-08-21 21:08:13 +100060
Eric Biggersce8614a2017-12-29 10:00:46 -060061 refcount_set(&alg->cra_refcnt, 1);
Herbert Xue9b8e5b2015-04-09 17:40:35 +080062
Eric Biggers177f87d2019-06-02 22:40:58 -070063 return 0;
Herbert Xu4cc77202006-08-06 21:16:34 +100064}
65
Herbert Xu319382a2015-07-09 07:17:15 +080066static void crypto_free_instance(struct crypto_instance *inst)
67{
Herbert Xu319382a2015-07-09 07:17:15 +080068 inst->alg.cra_type->free(inst);
69}
70
Herbert Xu6bfd4802006-09-21 11:39:29 +100071static void crypto_destroy_instance(struct crypto_alg *alg)
72{
73 struct crypto_instance *inst = (void *)alg;
74 struct crypto_template *tmpl = inst->tmpl;
75
Herbert Xu319382a2015-07-09 07:17:15 +080076 crypto_free_instance(inst);
Herbert Xu6bfd4802006-09-21 11:39:29 +100077 crypto_tmpl_put(tmpl);
78}
79
Herbert Xu02244ba2019-12-07 22:33:51 +080080/*
81 * This function adds a spawn to the list secondary_spawns which
82 * will be used at the end of crypto_remove_spawns to unregister
83 * instances, unless the spawn happens to be one that is depended
84 * on by the new algorithm (nalg in crypto_remove_spawns).
85 *
86 * This function is also responsible for resurrecting any algorithms
87 * in the dependency chain of nalg by unsetting n->dead.
88 */
Herbert Xu2bf29012009-08-31 15:56:54 +100089static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
90 struct list_head *stack,
91 struct list_head *top,
92 struct list_head *secondary_spawns)
93{
94 struct crypto_spawn *spawn, *n;
95
Geliang Tang304e4812015-11-16 22:37:14 +080096 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
97 if (!spawn)
Herbert Xu2bf29012009-08-31 15:56:54 +100098 return NULL;
99
Herbert Xu4f87ee12019-12-07 22:15:17 +0800100 n = list_prev_entry(spawn, list);
Herbert Xu2bf29012009-08-31 15:56:54 +1000101 list_move(&spawn->list, secondary_spawns);
102
Herbert Xu4f87ee12019-12-07 22:15:17 +0800103 if (list_is_last(&n->list, stack))
104 return top;
105
106 n = list_next_entry(n, list);
107 if (!spawn->dead)
108 n->dead = false;
109
110 return &n->inst->alg.cra_users;
Herbert Xu2bf29012009-08-31 15:56:54 +1000111}
112
Herbert Xu1f723712015-04-02 22:31:22 +0800113static void crypto_remove_instance(struct crypto_instance *inst,
114 struct list_head *list)
Herbert Xua73e6992007-04-08 21:31:36 +1000115{
Herbert Xua73e6992007-04-08 21:31:36 +1000116 struct crypto_template *tmpl = inst->tmpl;
117
Herbert Xua73e6992007-04-08 21:31:36 +1000118 if (crypto_is_dead(&inst->alg))
119 return;
120
121 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
Herbert Xu38cb2412007-11-17 22:09:38 +0800122
Herbert Xua73e6992007-04-08 21:31:36 +1000123 if (!tmpl || !crypto_tmpl_get(tmpl))
124 return;
125
Herbert Xua73e6992007-04-08 21:31:36 +1000126 list_move(&inst->alg.cra_list, list);
127 hlist_del(&inst->list);
128 inst->alg.cra_destroy = crypto_destroy_instance;
129
Herbert Xu2bf29012009-08-31 15:56:54 +1000130 BUG_ON(!list_empty(&inst->alg.cra_users));
Herbert Xua73e6992007-04-08 21:31:36 +1000131}
132
Herbert Xu02244ba2019-12-07 22:33:51 +0800133/*
134 * Given an algorithm alg, remove all algorithms that depend on it
135 * through spawns. If nalg is not null, then exempt any algorithms
136 * that is depended on by nalg. This is useful when nalg itself
137 * depends on alg.
138 */
Steffen Klassert89b596b2011-09-27 07:22:08 +0200139void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
140 struct crypto_alg *nalg)
Herbert Xu6bfd4802006-09-21 11:39:29 +1000141{
Herbert Xu2bf29012009-08-31 15:56:54 +1000142 u32 new_type = (nalg ?: alg)->cra_flags;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000143 struct crypto_spawn *spawn, *n;
Herbert Xua73e6992007-04-08 21:31:36 +1000144 LIST_HEAD(secondary_spawns);
Herbert Xu2bf29012009-08-31 15:56:54 +1000145 struct list_head *spawns;
146 LIST_HEAD(stack);
147 LIST_HEAD(top);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000148
Herbert Xu2bf29012009-08-31 15:56:54 +1000149 spawns = &alg->cra_users;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000150 list_for_each_entry_safe(spawn, n, spawns, list) {
Herbert Xua73e6992007-04-08 21:31:36 +1000151 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
Herbert Xu6bfd4802006-09-21 11:39:29 +1000152 continue;
153
Herbert Xu2bf29012009-08-31 15:56:54 +1000154 list_move(&spawn->list, &top);
Herbert Xua73e6992007-04-08 21:31:36 +1000155 }
Herbert Xu6bfd4802006-09-21 11:39:29 +1000156
Herbert Xu02244ba2019-12-07 22:33:51 +0800157 /*
158 * Perform a depth-first walk starting from alg through
159 * the cra_users tree. The list stack records the path
160 * from alg to the current spawn.
161 */
Herbert Xu2bf29012009-08-31 15:56:54 +1000162 spawns = &top;
163 do {
164 while (!list_empty(spawns)) {
165 struct crypto_instance *inst;
166
167 spawn = list_first_entry(spawns, struct crypto_spawn,
168 list);
169 inst = spawn->inst;
170
Herbert Xu2bf29012009-08-31 15:56:54 +1000171 list_move(&spawn->list, &stack);
Herbert Xu5f567ff2019-12-18 15:53:01 +0800172 spawn->dead = !spawn->registered || &inst->alg != nalg;
173
174 if (!spawn->registered)
175 break;
176
177 BUG_ON(&inst->alg == alg);
Herbert Xu2bf29012009-08-31 15:56:54 +1000178
179 if (&inst->alg == nalg)
180 break;
181
Herbert Xu2bf29012009-08-31 15:56:54 +1000182 spawns = &inst->alg.cra_users;
Eric Biggers9a006742017-12-29 14:30:19 -0600183
184 /*
Herbert Xu5f567ff2019-12-18 15:53:01 +0800185 * Even if spawn->registered is true, the
186 * instance itself may still be unregistered.
187 * This is because it may have failed during
188 * registration. Therefore we still need to
189 * make the following test.
190 *
Eric Biggers9a006742017-12-29 14:30:19 -0600191 * We may encounter an unregistered instance here, since
192 * an instance's spawns are set up prior to the instance
193 * being registered. An unregistered instance will have
194 * NULL ->cra_users.next, since ->cra_users isn't
195 * properly initialized until registration. But an
196 * unregistered instance cannot have any users, so treat
197 * it the same as ->cra_users being empty.
198 */
199 if (spawns->next == NULL)
200 break;
Herbert Xu2bf29012009-08-31 15:56:54 +1000201 }
202 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
203 &secondary_spawns)));
204
Herbert Xu02244ba2019-12-07 22:33:51 +0800205 /*
206 * Remove all instances that are marked as dead. Also
207 * complete the resurrection of the others by moving them
208 * back to the cra_users list.
209 */
Herbert Xu2bf29012009-08-31 15:56:54 +1000210 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
Herbert Xu4f87ee12019-12-07 22:15:17 +0800211 if (!spawn->dead)
Herbert Xu2bf29012009-08-31 15:56:54 +1000212 list_move(&spawn->list, &spawn->alg->cra_users);
Herbert Xu5f567ff2019-12-18 15:53:01 +0800213 else if (spawn->registered)
Herbert Xu1f723712015-04-02 22:31:22 +0800214 crypto_remove_instance(spawn->inst, list);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000215 }
216}
Steffen Klassert89b596b2011-09-27 07:22:08 +0200217EXPORT_SYMBOL_GPL(crypto_remove_spawns);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000218
Herbert Xucad439f2021-10-19 21:28:02 +0800219static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
220{
221 struct crypto_larval *larval;
222
223 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER))
224 return NULL;
225
226 larval = crypto_larval_alloc(alg->cra_name,
227 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
228 if (IS_ERR(larval))
229 return larval;
230
231 larval->adult = crypto_mod_get(alg);
232 if (!larval->adult) {
233 kfree(larval);
234 return ERR_PTR(-ENOENT);
235 }
236
237 refcount_set(&larval->alg.cra_refcnt, 1);
238 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
239 CRYPTO_MAX_ALG_NAME);
240 larval->alg.cra_priority = alg->cra_priority;
241
242 return larval;
243}
244
Herbert Xu73d38642008-08-03 21:15:23 +0800245static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
Herbert Xu4cc77202006-08-06 21:16:34 +1000246{
247 struct crypto_alg *q;
Herbert Xu73d38642008-08-03 21:15:23 +0800248 struct crypto_larval *larval;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000249 int ret = -EAGAIN;
250
251 if (crypto_is_dead(alg))
Herbert Xu73d38642008-08-03 21:15:23 +0800252 goto err;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000253
254 INIT_LIST_HEAD(&alg->cra_users);
255
Herbert Xu73d38642008-08-03 21:15:23 +0800256 /* No cheating! */
257 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
258
Herbert Xu6bfd4802006-09-21 11:39:29 +1000259 ret = -EEXIST;
Herbert Xu4cc77202006-08-06 21:16:34 +1000260
Herbert Xucce9e062006-08-21 21:08:13 +1000261 list_for_each_entry(q, &crypto_alg_list, cra_list) {
Herbert Xu4cc77202006-08-06 21:16:34 +1000262 if (q == alg)
Herbert Xu73d38642008-08-03 21:15:23 +0800263 goto err;
264
Herbert Xub8e15992009-01-28 14:09:59 +1100265 if (crypto_is_moribund(q))
266 continue;
267
Herbert Xu73d38642008-08-03 21:15:23 +0800268 if (crypto_is_larval(q)) {
269 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
270 goto err;
271 continue;
272 }
273
274 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
275 !strcmp(q->cra_name, alg->cra_driver_name))
276 goto err;
277 }
278
Herbert Xucad439f2021-10-19 21:28:02 +0800279 larval = crypto_alloc_test_larval(alg);
Herbert Xu73d38642008-08-03 21:15:23 +0800280 if (IS_ERR(larval))
281 goto out;
282
Herbert Xu73d38642008-08-03 21:15:23 +0800283 list_add(&alg->cra_list, &crypto_alg_list);
Herbert Xucad439f2021-10-19 21:28:02 +0800284
285 if (larval)
286 list_add(&larval->alg.cra_list, &crypto_alg_list);
Herbert Xubeaaaa32021-11-05 15:26:08 +0800287 else
288 alg->cra_flags |= CRYPTO_ALG_TESTED;
Herbert Xu73d38642008-08-03 21:15:23 +0800289
Corentin Labbe1f6669b2018-11-29 14:42:26 +0000290 crypto_stats_init(alg);
Corentin Labbecac58182018-09-19 10:10:54 +0000291
Richard Hartmann5357c6c2010-02-16 20:25:21 +0800292out:
Herbert Xu73d38642008-08-03 21:15:23 +0800293 return larval;
294
Herbert Xu73d38642008-08-03 21:15:23 +0800295err:
296 larval = ERR_PTR(ret);
297 goto out;
298}
299
300void crypto_alg_tested(const char *name, int err)
301{
302 struct crypto_larval *test;
303 struct crypto_alg *alg;
304 struct crypto_alg *q;
305 LIST_HEAD(list);
Herbert Xu2bbb3372019-12-11 10:50:11 +0800306 bool best;
Herbert Xu73d38642008-08-03 21:15:23 +0800307
308 down_write(&crypto_alg_sem);
309 list_for_each_entry(q, &crypto_alg_list, cra_list) {
Herbert Xub8e15992009-01-28 14:09:59 +1100310 if (crypto_is_moribund(q) || !crypto_is_larval(q))
Herbert Xu73d38642008-08-03 21:15:23 +0800311 continue;
312
313 test = (struct crypto_larval *)q;
314
315 if (!strcmp(q->cra_driver_name, name))
316 goto found;
317 }
318
Karim Eshapac7235852017-05-13 21:05:19 +0200319 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
Herbert Xu73d38642008-08-03 21:15:23 +0800320 goto unlock;
321
322found:
Herbert Xub8e15992009-01-28 14:09:59 +1100323 q->cra_flags |= CRYPTO_ALG_DEAD;
Herbert Xu73d38642008-08-03 21:15:23 +0800324 alg = test->adult;
325 if (err || list_empty(&alg->cra_list))
326 goto complete;
327
328 alg->cra_flags |= CRYPTO_ALG_TESTED;
329
Herbert Xu2bbb3372019-12-11 10:50:11 +0800330 /* Only satisfy larval waiters if we are the best. */
331 best = true;
332 list_for_each_entry(q, &crypto_alg_list, cra_list) {
333 if (crypto_is_moribund(q) || !crypto_is_larval(q))
334 continue;
335
336 if (strcmp(alg->cra_name, q->cra_name))
337 continue;
338
339 if (q->cra_priority > alg->cra_priority) {
340 best = false;
341 break;
342 }
343 }
344
Herbert Xu73d38642008-08-03 21:15:23 +0800345 list_for_each_entry(q, &crypto_alg_list, cra_list) {
346 if (q == alg)
347 continue;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000348
349 if (crypto_is_moribund(q))
350 continue;
351
352 if (crypto_is_larval(q)) {
Herbert Xu28259822006-08-06 21:23:26 +1000353 struct crypto_larval *larval = (void *)q;
354
Herbert Xud8058482007-10-02 22:27:29 +0800355 /*
356 * Check to see if either our generic name or
357 * specific name can satisfy the name requested
358 * by the larval entry q.
359 */
Herbert Xu6bfd4802006-09-21 11:39:29 +1000360 if (strcmp(alg->cra_name, q->cra_name) &&
361 strcmp(alg->cra_driver_name, q->cra_name))
362 continue;
363
364 if (larval->adult)
365 continue;
Herbert Xu492e2b62006-09-21 11:35:17 +1000366 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
367 continue;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000368
Herbert Xu2bbb3372019-12-11 10:50:11 +0800369 if (best && crypto_mod_get(alg))
370 larval->adult = alg;
371 else
372 larval->adult = ERR_PTR(-EAGAIN);
373
Herbert Xu6bfd4802006-09-21 11:39:29 +1000374 continue;
Herbert Xu28259822006-08-06 21:23:26 +1000375 }
Herbert Xu6bfd4802006-09-21 11:39:29 +1000376
377 if (strcmp(alg->cra_name, q->cra_name))
378 continue;
379
380 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
381 q->cra_priority > alg->cra_priority)
382 continue;
383
Herbert Xu2bf29012009-08-31 15:56:54 +1000384 crypto_remove_spawns(q, &list, alg);
Herbert Xucce9e062006-08-21 21:08:13 +1000385 }
Herbert Xu28259822006-08-06 21:23:26 +1000386
Herbert Xu73d38642008-08-03 21:15:23 +0800387complete:
388 complete_all(&test->completion);
Herbert Xu28259822006-08-06 21:23:26 +1000389
Herbert Xu73d38642008-08-03 21:15:23 +0800390unlock:
391 up_write(&crypto_alg_sem);
392
393 crypto_remove_final(&list);
Herbert Xucce9e062006-08-21 21:08:13 +1000394}
Herbert Xu73d38642008-08-03 21:15:23 +0800395EXPORT_SYMBOL_GPL(crypto_alg_tested);
Herbert Xu4cc77202006-08-06 21:16:34 +1000396
Steffen Klassert22e5b202011-09-27 07:23:07 +0200397void crypto_remove_final(struct list_head *list)
Herbert Xu6bfd4802006-09-21 11:39:29 +1000398{
399 struct crypto_alg *alg;
400 struct crypto_alg *n;
401
402 list_for_each_entry_safe(alg, n, list, cra_list) {
403 list_del_init(&alg->cra_list);
404 crypto_alg_put(alg);
405 }
406}
Steffen Klassert22e5b202011-09-27 07:23:07 +0200407EXPORT_SYMBOL_GPL(crypto_remove_final);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000408
Herbert Xu4cc77202006-08-06 21:16:34 +1000409int crypto_register_alg(struct crypto_alg *alg)
410{
Herbert Xu73d38642008-08-03 21:15:23 +0800411 struct crypto_larval *larval;
Herbert Xuadad5562021-09-17 08:26:19 +0800412 bool test_started;
Herbert Xu4cc77202006-08-06 21:16:34 +1000413 int err;
414
Salvatore Benedettod6040762017-01-13 11:54:08 +0000415 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
Herbert Xu4cc77202006-08-06 21:16:34 +1000416 err = crypto_check_alg(alg);
417 if (err)
418 return err;
419
420 down_write(&crypto_alg_sem);
Herbert Xu73d38642008-08-03 21:15:23 +0800421 larval = __crypto_register_alg(alg);
Herbert Xuadad5562021-09-17 08:26:19 +0800422 test_started = static_key_enabled(&crypto_boot_test_finished);
Herbert Xucad439f2021-10-19 21:28:02 +0800423 if (!IS_ERR_OR_NULL(larval))
424 larval->test_started = test_started;
Herbert Xu4cc77202006-08-06 21:16:34 +1000425 up_write(&crypto_alg_sem);
426
Herbert Xucad439f2021-10-19 21:28:02 +0800427 if (IS_ERR_OR_NULL(larval))
Herbert Xu73d38642008-08-03 21:15:23 +0800428 return PTR_ERR(larval);
429
Herbert Xuadad5562021-09-17 08:26:19 +0800430 if (test_started)
431 crypto_wait_for_test(larval);
Herbert Xu73d38642008-08-03 21:15:23 +0800432 return 0;
Herbert Xu4cc77202006-08-06 21:16:34 +1000433}
Herbert Xucce9e062006-08-21 21:08:13 +1000434EXPORT_SYMBOL_GPL(crypto_register_alg);
435
Herbert Xu6bfd4802006-09-21 11:39:29 +1000436static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
437{
438 if (unlikely(list_empty(&alg->cra_list)))
439 return -ENOENT;
440
441 alg->cra_flags |= CRYPTO_ALG_DEAD;
442
Herbert Xu6bfd4802006-09-21 11:39:29 +1000443 list_del_init(&alg->cra_list);
Herbert Xu2bf29012009-08-31 15:56:54 +1000444 crypto_remove_spawns(alg, list, NULL);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000445
446 return 0;
447}
448
Eric Biggersc6d633a2019-12-15 15:51:19 -0800449void crypto_unregister_alg(struct crypto_alg *alg)
Herbert Xucce9e062006-08-21 21:08:13 +1000450{
Herbert Xu6bfd4802006-09-21 11:39:29 +1000451 int ret;
452 LIST_HEAD(list);
Richard Hartmann5357c6c2010-02-16 20:25:21 +0800453
Herbert Xucce9e062006-08-21 21:08:13 +1000454 down_write(&crypto_alg_sem);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000455 ret = crypto_remove_alg(alg, &list);
Herbert Xucce9e062006-08-21 21:08:13 +1000456 up_write(&crypto_alg_sem);
457
Eric Biggersc6d633a2019-12-15 15:51:19 -0800458 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
459 return;
Herbert Xucce9e062006-08-21 21:08:13 +1000460
Eric Biggersce8614a2017-12-29 10:00:46 -0600461 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
Herbert Xucce9e062006-08-21 21:08:13 +1000462 if (alg->cra_destroy)
463 alg->cra_destroy(alg);
464
Herbert Xu6bfd4802006-09-21 11:39:29 +1000465 crypto_remove_final(&list);
Herbert Xucce9e062006-08-21 21:08:13 +1000466}
467EXPORT_SYMBOL_GPL(crypto_unregister_alg);
468
Mark Brown4b004342012-01-17 23:34:26 +0000469int crypto_register_algs(struct crypto_alg *algs, int count)
470{
471 int i, ret;
472
473 for (i = 0; i < count; i++) {
474 ret = crypto_register_alg(&algs[i]);
475 if (ret)
476 goto err;
477 }
478
479 return 0;
480
481err:
482 for (--i; i >= 0; --i)
483 crypto_unregister_alg(&algs[i]);
484
485 return ret;
486}
487EXPORT_SYMBOL_GPL(crypto_register_algs);
488
Eric Biggersc6d633a2019-12-15 15:51:19 -0800489void crypto_unregister_algs(struct crypto_alg *algs, int count)
Mark Brown4b004342012-01-17 23:34:26 +0000490{
Eric Biggersc6d633a2019-12-15 15:51:19 -0800491 int i;
Mark Brown4b004342012-01-17 23:34:26 +0000492
Eric Biggersc6d633a2019-12-15 15:51:19 -0800493 for (i = 0; i < count; i++)
494 crypto_unregister_alg(&algs[i]);
Mark Brown4b004342012-01-17 23:34:26 +0000495}
496EXPORT_SYMBOL_GPL(crypto_unregister_algs);
497
Herbert Xu4cc77202006-08-06 21:16:34 +1000498int crypto_register_template(struct crypto_template *tmpl)
499{
500 struct crypto_template *q;
501 int err = -EEXIST;
502
503 down_write(&crypto_alg_sem);
504
Jarod Wilson002c77a2014-07-02 15:37:30 -0400505 crypto_check_module_sig(tmpl->module);
506
Herbert Xu4cc77202006-08-06 21:16:34 +1000507 list_for_each_entry(q, &crypto_template_list, list) {
508 if (q == tmpl)
509 goto out;
510 }
511
512 list_add(&tmpl->list, &crypto_template_list);
513 err = 0;
514out:
515 up_write(&crypto_alg_sem);
516 return err;
517}
518EXPORT_SYMBOL_GPL(crypto_register_template);
519
Xiongfeng Wang95724422019-01-18 13:58:11 +0800520int crypto_register_templates(struct crypto_template *tmpls, int count)
521{
522 int i, err;
523
524 for (i = 0; i < count; i++) {
525 err = crypto_register_template(&tmpls[i]);
526 if (err)
527 goto out;
528 }
529 return 0;
530
531out:
532 for (--i; i >= 0; --i)
533 crypto_unregister_template(&tmpls[i]);
534 return err;
535}
536EXPORT_SYMBOL_GPL(crypto_register_templates);
537
Herbert Xu4cc77202006-08-06 21:16:34 +1000538void crypto_unregister_template(struct crypto_template *tmpl)
539{
540 struct crypto_instance *inst;
Sasha Levinb67bfe02013-02-27 17:06:00 -0800541 struct hlist_node *n;
Herbert Xu4cc77202006-08-06 21:16:34 +1000542 struct hlist_head *list;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000543 LIST_HEAD(users);
Herbert Xu4cc77202006-08-06 21:16:34 +1000544
545 down_write(&crypto_alg_sem);
546
547 BUG_ON(list_empty(&tmpl->list));
548 list_del_init(&tmpl->list);
549
550 list = &tmpl->instances;
Sasha Levinb67bfe02013-02-27 17:06:00 -0800551 hlist_for_each_entry(inst, list, list) {
Herbert Xu6bfd4802006-09-21 11:39:29 +1000552 int err = crypto_remove_alg(&inst->alg, &users);
Joshua I. James0efcb8d2014-12-05 15:00:10 +0900553
Herbert Xu6bfd4802006-09-21 11:39:29 +1000554 BUG_ON(err);
Herbert Xu4cc77202006-08-06 21:16:34 +1000555 }
556
557 up_write(&crypto_alg_sem);
558
Sasha Levinb67bfe02013-02-27 17:06:00 -0800559 hlist_for_each_entry_safe(inst, n, list, list) {
Eric Biggersce8614a2017-12-29 10:00:46 -0600560 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
Herbert Xu319382a2015-07-09 07:17:15 +0800561 crypto_free_instance(inst);
Herbert Xu4cc77202006-08-06 21:16:34 +1000562 }
Herbert Xu6bfd4802006-09-21 11:39:29 +1000563 crypto_remove_final(&users);
Herbert Xu4cc77202006-08-06 21:16:34 +1000564}
565EXPORT_SYMBOL_GPL(crypto_unregister_template);
566
Xiongfeng Wang95724422019-01-18 13:58:11 +0800567void crypto_unregister_templates(struct crypto_template *tmpls, int count)
568{
569 int i;
570
571 for (i = count - 1; i >= 0; --i)
572 crypto_unregister_template(&tmpls[i]);
573}
574EXPORT_SYMBOL_GPL(crypto_unregister_templates);
575
Herbert Xu4cc77202006-08-06 21:16:34 +1000576static struct crypto_template *__crypto_lookup_template(const char *name)
577{
578 struct crypto_template *q, *tmpl = NULL;
579
580 down_read(&crypto_alg_sem);
581 list_for_each_entry(q, &crypto_template_list, list) {
582 if (strcmp(q->name, name))
583 continue;
584 if (unlikely(!crypto_tmpl_get(q)))
585 continue;
586
587 tmpl = q;
588 break;
589 }
590 up_read(&crypto_alg_sem);
591
592 return tmpl;
593}
594
595struct crypto_template *crypto_lookup_template(const char *name)
596{
Kees Cook4943ba12014-11-24 16:32:38 -0800597 return try_then_request_module(__crypto_lookup_template(name),
598 "crypto-%s", name);
Herbert Xu4cc77202006-08-06 21:16:34 +1000599}
600EXPORT_SYMBOL_GPL(crypto_lookup_template);
601
602int crypto_register_instance(struct crypto_template *tmpl,
603 struct crypto_instance *inst)
604{
Herbert Xu73d38642008-08-03 21:15:23 +0800605 struct crypto_larval *larval;
Herbert Xu5f567ff2019-12-18 15:53:01 +0800606 struct crypto_spawn *spawn;
Herbert Xu73d38642008-08-03 21:15:23 +0800607 int err;
Herbert Xu4cc77202006-08-06 21:16:34 +1000608
Herbert Xu4cc77202006-08-06 21:16:34 +1000609 err = crypto_check_alg(&inst->alg);
610 if (err)
Stephan Mueller9c521a22015-04-09 12:09:55 +0200611 return err;
612
Herbert Xu4cc77202006-08-06 21:16:34 +1000613 inst->alg.cra_module = tmpl->module;
Steffen Klassert64a947b2011-09-27 07:21:26 +0200614 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
Herbert Xu4cc77202006-08-06 21:16:34 +1000615
616 down_write(&crypto_alg_sem);
617
Herbert Xu5f567ff2019-12-18 15:53:01 +0800618 larval = ERR_PTR(-EAGAIN);
619 for (spawn = inst->spawns; spawn;) {
620 struct crypto_spawn *next;
621
622 if (spawn->dead)
623 goto unlock;
624
625 next = spawn->next;
626 spawn->inst = inst;
627 spawn->registered = true;
628
Eric Biggersaed11cf2020-01-02 19:59:08 -0800629 crypto_mod_put(spawn->alg);
Herbert Xu5f567ff2019-12-18 15:53:01 +0800630
631 spawn = next;
632 }
633
Herbert Xu73d38642008-08-03 21:15:23 +0800634 larval = __crypto_register_alg(&inst->alg);
635 if (IS_ERR(larval))
Herbert Xu4cc77202006-08-06 21:16:34 +1000636 goto unlock;
Herbert Xucad439f2021-10-19 21:28:02 +0800637 else if (larval)
638 larval->test_started = true;
Herbert Xuadad5562021-09-17 08:26:19 +0800639
Herbert Xu4cc77202006-08-06 21:16:34 +1000640 hlist_add_head(&inst->list, &tmpl->instances);
641 inst->tmpl = tmpl;
642
643unlock:
644 up_write(&crypto_alg_sem);
645
Herbert Xu73d38642008-08-03 21:15:23 +0800646 err = PTR_ERR(larval);
Herbert Xucad439f2021-10-19 21:28:02 +0800647 if (IS_ERR_OR_NULL(larval))
Herbert Xu73d38642008-08-03 21:15:23 +0800648 goto err;
649
650 crypto_wait_for_test(larval);
651 err = 0;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000652
Herbert Xu4cc77202006-08-06 21:16:34 +1000653err:
654 return err;
655}
656EXPORT_SYMBOL_GPL(crypto_register_instance);
657
Eric Biggersc6d633a2019-12-15 15:51:19 -0800658void crypto_unregister_instance(struct crypto_instance *inst)
Steffen Klassertce3fd842011-11-08 10:09:17 +0100659{
Herbert Xu1f723712015-04-02 22:31:22 +0800660 LIST_HEAD(list);
Steffen Klassertce3fd842011-11-08 10:09:17 +0100661
Steffen Klassertce3fd842011-11-08 10:09:17 +0100662 down_write(&crypto_alg_sem);
663
Herbert Xu87b16752015-04-02 22:39:40 +0800664 crypto_remove_spawns(&inst->alg, &list, NULL);
Herbert Xu1f723712015-04-02 22:31:22 +0800665 crypto_remove_instance(inst, &list);
Steffen Klassertce3fd842011-11-08 10:09:17 +0100666
667 up_write(&crypto_alg_sem);
668
Herbert Xu1f723712015-04-02 22:31:22 +0800669 crypto_remove_final(&list);
Steffen Klassertce3fd842011-11-08 10:09:17 +0100670}
671EXPORT_SYMBOL_GPL(crypto_unregister_instance);
672
Eric Biggersde95c952020-01-02 19:58:48 -0800673int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
674 const char *name, u32 type, u32 mask)
Herbert Xud6ef2f12015-05-11 17:47:39 +0800675{
676 struct crypto_alg *alg;
Eric Biggersaed11cf2020-01-02 19:59:08 -0800677 int err = -EAGAIN;
678
679 if (WARN_ON_ONCE(inst == NULL))
680 return -EINVAL;
Herbert Xud6ef2f12015-05-11 17:47:39 +0800681
Eric Biggersca94e932020-01-02 19:58:42 -0800682 /* Allow the result of crypto_attr_alg_name() to be passed directly */
683 if (IS_ERR(name))
684 return PTR_ERR(name);
685
Herbert Xud6ef2f12015-05-11 17:47:39 +0800686 alg = crypto_find_alg(name, spawn->frontend, type, mask);
687 if (IS_ERR(alg))
688 return PTR_ERR(alg);
689
Eric Biggersaed11cf2020-01-02 19:59:08 -0800690 down_write(&crypto_alg_sem);
691 if (!crypto_is_moribund(alg)) {
692 list_add(&spawn->list, &alg->cra_users);
693 spawn->alg = alg;
694 spawn->mask = mask;
695 spawn->next = inst->spawns;
696 inst->spawns = spawn;
Eric Biggers7bcb2c92020-07-09 23:20:38 -0700697 inst->alg.cra_flags |=
698 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
Eric Biggersaed11cf2020-01-02 19:59:08 -0800699 err = 0;
700 }
701 up_write(&crypto_alg_sem);
Herbert Xu5f567ff2019-12-18 15:53:01 +0800702 if (err)
703 crypto_mod_put(alg);
Herbert Xud6ef2f12015-05-11 17:47:39 +0800704 return err;
705}
706EXPORT_SYMBOL_GPL(crypto_grab_spawn);
707
Herbert Xu6bfd4802006-09-21 11:39:29 +1000708void crypto_drop_spawn(struct crypto_spawn *spawn)
709{
Eric Biggersff670622020-01-02 19:58:41 -0800710 if (!spawn->alg) /* not yet initialized? */
711 return;
712
Herbert Xu6bfd4802006-09-21 11:39:29 +1000713 down_write(&crypto_alg_sem);
Herbert Xu4f87ee12019-12-07 22:15:17 +0800714 if (!spawn->dead)
Herbert Xu7db3b612019-12-06 13:55:17 +0800715 list_del(&spawn->list);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000716 up_write(&crypto_alg_sem);
Herbert Xu5f567ff2019-12-18 15:53:01 +0800717
Eric Biggersaed11cf2020-01-02 19:59:08 -0800718 if (!spawn->registered)
Herbert Xu5f567ff2019-12-18 15:53:01 +0800719 crypto_mod_put(spawn->alg);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000720}
721EXPORT_SYMBOL_GPL(crypto_drop_spawn);
722
Herbert Xu97eedce2009-07-08 15:55:52 +0800723static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
Herbert Xu6bfd4802006-09-21 11:39:29 +1000724{
Herbert Xu66035232020-04-10 16:09:42 +1000725 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
726 struct crypto_alg *target;
727 bool shoot = false;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000728
729 down_read(&crypto_alg_sem);
Herbert Xu66035232020-04-10 16:09:42 +1000730 if (!spawn->dead) {
731 alg = spawn->alg;
732 if (!crypto_mod_get(alg)) {
733 target = crypto_alg_get(alg);
734 shoot = true;
735 alg = ERR_PTR(-EAGAIN);
736 }
Herbert Xu73669cc2019-12-07 22:15:15 +0800737 }
Herbert Xu6bfd4802006-09-21 11:39:29 +1000738 up_read(&crypto_alg_sem);
739
Herbert Xu66035232020-04-10 16:09:42 +1000740 if (shoot) {
741 crypto_shoot_alg(target);
742 crypto_alg_put(target);
743 }
744
745 return alg;
Herbert Xu97eedce2009-07-08 15:55:52 +0800746}
747
748struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
749 u32 mask)
750{
751 struct crypto_alg *alg;
752 struct crypto_tfm *tfm;
753
754 alg = crypto_spawn_alg(spawn);
755 if (IS_ERR(alg))
756 return ERR_CAST(alg);
757
Herbert Xu2e306ee2006-12-17 10:05:58 +1100758 tfm = ERR_PTR(-EINVAL);
759 if (unlikely((alg->cra_flags ^ type) & mask))
760 goto out_put_alg;
761
Herbert Xu27d2a332007-01-24 20:50:26 +1100762 tfm = __crypto_alloc_tfm(alg, type, mask);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000763 if (IS_ERR(tfm))
Herbert Xu2e306ee2006-12-17 10:05:58 +1100764 goto out_put_alg;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000765
766 return tfm;
Herbert Xu2e306ee2006-12-17 10:05:58 +1100767
768out_put_alg:
769 crypto_mod_put(alg);
770 return tfm;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000771}
772EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
773
Herbert Xu97eedce2009-07-08 15:55:52 +0800774void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
775{
776 struct crypto_alg *alg;
777 struct crypto_tfm *tfm;
778
779 alg = crypto_spawn_alg(spawn);
780 if (IS_ERR(alg))
781 return ERR_CAST(alg);
782
783 tfm = crypto_create_tfm(alg, spawn->frontend);
784 if (IS_ERR(tfm))
785 goto out_put_alg;
786
787 return tfm;
788
789out_put_alg:
790 crypto_mod_put(alg);
791 return tfm;
792}
793EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
794
Herbert Xu28259822006-08-06 21:23:26 +1000795int crypto_register_notifier(struct notifier_block *nb)
796{
797 return blocking_notifier_chain_register(&crypto_chain, nb);
798}
799EXPORT_SYMBOL_GPL(crypto_register_notifier);
800
801int crypto_unregister_notifier(struct notifier_block *nb)
802{
803 return blocking_notifier_chain_unregister(&crypto_chain, nb);
804}
805EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
806
Herbert Xuebc610e2007-01-01 18:37:02 +1100807struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000808{
Herbert Xu39e1ee012007-08-29 19:27:26 +0800809 struct rtattr *rta = tb[0];
Herbert Xuebc610e2007-01-01 18:37:02 +1100810 struct crypto_attr_type *algt;
811
812 if (!rta)
813 return ERR_PTR(-ENOENT);
814 if (RTA_PAYLOAD(rta) < sizeof(*algt))
815 return ERR_PTR(-EINVAL);
Herbert Xu39e1ee012007-08-29 19:27:26 +0800816 if (rta->rta_type != CRYPTOA_TYPE)
817 return ERR_PTR(-EINVAL);
Herbert Xuebc610e2007-01-01 18:37:02 +1100818
819 algt = RTA_DATA(rta);
820
821 return algt;
822}
823EXPORT_SYMBOL_GPL(crypto_get_attr_type);
824
Eric Biggers7bcb2c92020-07-09 23:20:38 -0700825/**
826 * crypto_check_attr_type() - check algorithm type and compute inherited mask
827 * @tb: the template parameters
828 * @type: the algorithm type the template would be instantiated as
829 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
830 * to restrict the flags of any inner algorithms
831 *
832 * Validate that the algorithm type the user requested is compatible with the
833 * one the template would actually be instantiated as. E.g., if the user is
834 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
835 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
836 *
837 * Also compute the mask to use to restrict the flags of any inner algorithms.
838 *
839 * Return: 0 on success; -errno on failure
840 */
841int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
Herbert Xuebc610e2007-01-01 18:37:02 +1100842{
843 struct crypto_attr_type *algt;
844
845 algt = crypto_get_attr_type(tb);
846 if (IS_ERR(algt))
847 return PTR_ERR(algt);
848
849 if ((algt->type ^ type) & algt->mask)
850 return -EINVAL;
851
Eric Biggers7bcb2c92020-07-09 23:20:38 -0700852 *mask_ret = crypto_algt_inherited_mask(algt);
Herbert Xuebc610e2007-01-01 18:37:02 +1100853 return 0;
854}
855EXPORT_SYMBOL_GPL(crypto_check_attr_type);
856
Herbert Xu68b6c7d2007-12-07 20:18:17 +0800857const char *crypto_attr_alg_name(struct rtattr *rta)
Herbert Xuebc610e2007-01-01 18:37:02 +1100858{
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000859 struct crypto_attr_alg *alga;
860
Herbert Xuebc610e2007-01-01 18:37:02 +1100861 if (!rta)
862 return ERR_PTR(-ENOENT);
863 if (RTA_PAYLOAD(rta) < sizeof(*alga))
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000864 return ERR_PTR(-EINVAL);
Herbert Xu39e1ee012007-08-29 19:27:26 +0800865 if (rta->rta_type != CRYPTOA_ALG)
866 return ERR_PTR(-EINVAL);
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000867
868 alga = RTA_DATA(rta);
869 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
870
Herbert Xu68b6c7d2007-12-07 20:18:17 +0800871 return alga->name;
872}
873EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
874
Herbert Xu32f27c72016-06-29 18:04:13 +0800875int crypto_inst_setname(struct crypto_instance *inst, const char *name,
876 struct crypto_alg *alg)
877{
878 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
879 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
880 return -ENAMETOOLONG;
881
882 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
883 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
884 return -ENAMETOOLONG;
885
886 return 0;
887}
888EXPORT_SYMBOL_GPL(crypto_inst_setname);
889
Herbert Xub5b7f082007-04-16 20:48:54 +1000890void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
891{
892 INIT_LIST_HEAD(&queue->list);
893 queue->backlog = &queue->list;
894 queue->qlen = 0;
895 queue->max_qlen = max_qlen;
896}
897EXPORT_SYMBOL_GPL(crypto_init_queue);
898
899int crypto_enqueue_request(struct crypto_queue *queue,
900 struct crypto_async_request *request)
901{
902 int err = -EINPROGRESS;
903
904 if (unlikely(queue->qlen >= queue->max_qlen)) {
Gilad Ben-Yossef6b80ea32017-10-18 08:00:33 +0100905 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
906 err = -ENOSPC;
Herbert Xub5b7f082007-04-16 20:48:54 +1000907 goto out;
Gilad Ben-Yossef6b80ea32017-10-18 08:00:33 +0100908 }
909 err = -EBUSY;
Herbert Xub5b7f082007-04-16 20:48:54 +1000910 if (queue->backlog == &queue->list)
911 queue->backlog = &request->list;
912 }
913
914 queue->qlen++;
915 list_add_tail(&request->list, &queue->list);
916
917out:
918 return err;
919}
920EXPORT_SYMBOL_GPL(crypto_enqueue_request);
921
Iuliana Prodanec6e2bf32020-04-28 18:49:03 +0300922void crypto_enqueue_request_head(struct crypto_queue *queue,
923 struct crypto_async_request *request)
924{
925 queue->qlen++;
926 list_add(&request->list, &queue->list);
927}
928EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
929
Herbert Xu31d228c2015-07-08 11:55:30 +0800930struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
Herbert Xub5b7f082007-04-16 20:48:54 +1000931{
932 struct list_head *request;
933
934 if (unlikely(!queue->qlen))
935 return NULL;
936
937 queue->qlen--;
938
939 if (queue->backlog != &queue->list)
940 queue->backlog = queue->backlog->next;
941
942 request = queue->list.next;
943 list_del(request);
944
Herbert Xu31d228c2015-07-08 11:55:30 +0800945 return list_entry(request, struct crypto_async_request, list);
Herbert Xub5b7f082007-04-16 20:48:54 +1000946}
947EXPORT_SYMBOL_GPL(crypto_dequeue_request);
948
Herbert Xu76136362007-11-20 17:26:06 +0800949static inline void crypto_inc_byte(u8 *a, unsigned int size)
950{
951 u8 *b = (a + size);
952 u8 c;
953
954 for (; size; size--) {
955 c = *--b + 1;
956 *b = c;
957 if (c)
958 break;
959 }
960}
961
962void crypto_inc(u8 *a, unsigned int size)
963{
964 __be32 *b = (__be32 *)(a + size);
965 u32 c;
966
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000967 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
Ard Biesheuvel27c539a2017-02-14 21:51:02 +0000968 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000969 for (; size >= 4; size -= 4) {
970 c = be32_to_cpu(*--b) + 1;
971 *b = cpu_to_be32(c);
Ard Biesheuvel27c539a2017-02-14 21:51:02 +0000972 if (likely(c))
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000973 return;
974 }
Herbert Xu76136362007-11-20 17:26:06 +0800975
976 crypto_inc_byte(a, size);
977}
978EXPORT_SYMBOL_GPL(crypto_inc);
979
Ard Biesheuvela7c391f2017-07-24 11:28:03 +0100980void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
Herbert Xu76136362007-11-20 17:26:06 +0800981{
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000982 int relalign = 0;
983
984 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
985 int size = sizeof(unsigned long);
Ard Biesheuvela7c391f2017-07-24 11:28:03 +0100986 int d = (((unsigned long)dst ^ (unsigned long)src1) |
987 ((unsigned long)dst ^ (unsigned long)src2)) &
988 (size - 1);
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000989
990 relalign = d ? 1 << __ffs(d) : size;
991
992 /*
993 * If we care about alignment, process as many bytes as
994 * needed to advance dst and src to values whose alignments
995 * equal their relative alignment. This will allow us to
996 * process the remainder of the input using optimal strides.
997 */
998 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
Ard Biesheuvela7c391f2017-07-24 11:28:03 +0100999 *dst++ = *src1++ ^ *src2++;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001000 len--;
1001 }
1002 }
1003
1004 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001005 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001006 dst += 8;
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001007 src1 += 8;
1008 src2 += 8;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001009 len -= 8;
1010 }
1011
1012 while (len >= 4 && !(relalign & 3)) {
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001013 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001014 dst += 4;
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001015 src1 += 4;
1016 src2 += 4;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001017 len -= 4;
1018 }
1019
1020 while (len >= 2 && !(relalign & 1)) {
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001021 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001022 dst += 2;
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001023 src1 += 2;
1024 src2 += 2;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001025 len -= 2;
1026 }
1027
1028 while (len--)
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001029 *dst++ = *src1++ ^ *src2++;
Herbert Xu76136362007-11-20 17:26:06 +08001030}
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001031EXPORT_SYMBOL_GPL(__crypto_xor);
Herbert Xu76136362007-11-20 17:26:06 +08001032
Herbert Xu38d21432015-04-20 13:39:00 +08001033unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1034{
Herbert Xuc2110f22015-05-28 22:07:56 +08001035 return alg->cra_ctxsize +
1036 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
Herbert Xu38d21432015-04-20 13:39:00 +08001037}
1038EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1039
Herbert Xuf2aefda2016-01-23 13:51:01 +08001040int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1041 u32 type, u32 mask)
1042{
1043 int ret = 0;
1044 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1045
1046 if (!IS_ERR(alg)) {
1047 crypto_mod_put(alg);
1048 ret = 1;
1049 }
1050
1051 return ret;
1052}
1053EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1054
Corentin Labbef7d76e02018-11-29 14:42:21 +00001055#ifdef CONFIG_CRYPTO_STATS
Corentin Labbe1f6669b2018-11-29 14:42:26 +00001056void crypto_stats_init(struct crypto_alg *alg)
1057{
1058 memset(&alg->stats, 0, sizeof(alg->stats));
1059}
1060EXPORT_SYMBOL_GPL(crypto_stats_init);
1061
Corentin Labbef7d76e02018-11-29 14:42:21 +00001062void crypto_stats_get(struct crypto_alg *alg)
1063{
1064 crypto_alg_get(alg);
1065}
1066EXPORT_SYMBOL_GPL(crypto_stats_get);
1067
Corentin Labbef7d76e02018-11-29 14:42:21 +00001068void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1069 int ret)
1070{
1071 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001072 atomic64_inc(&alg->stats.aead.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001073 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001074 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1075 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001076 }
1077 crypto_alg_put(alg);
1078}
1079EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1080
1081void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1082 int ret)
1083{
1084 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001085 atomic64_inc(&alg->stats.aead.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001086 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001087 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1088 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001089 }
1090 crypto_alg_put(alg);
1091}
1092EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1093
1094void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1095 struct crypto_alg *alg)
1096{
1097 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001098 atomic64_inc(&alg->stats.akcipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001099 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001100 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1101 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001102 }
1103 crypto_alg_put(alg);
1104}
1105EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1106
1107void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1108 struct crypto_alg *alg)
1109{
1110 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001111 atomic64_inc(&alg->stats.akcipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001112 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001113 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1114 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001115 }
1116 crypto_alg_put(alg);
1117}
1118EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1119
1120void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1121{
1122 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
Corentin Labbe44f13132018-11-29 14:42:25 +00001123 atomic64_inc(&alg->stats.akcipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001124 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001125 atomic64_inc(&alg->stats.akcipher.sign_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001126 crypto_alg_put(alg);
1127}
1128EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1129
1130void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1131{
1132 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
Corentin Labbe44f13132018-11-29 14:42:25 +00001133 atomic64_inc(&alg->stats.akcipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001134 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001135 atomic64_inc(&alg->stats.akcipher.verify_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001136 crypto_alg_put(alg);
1137}
1138EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1139
1140void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1141{
1142 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001143 atomic64_inc(&alg->stats.compress.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001144 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001145 atomic64_inc(&alg->stats.compress.compress_cnt);
1146 atomic64_add(slen, &alg->stats.compress.compress_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001147 }
1148 crypto_alg_put(alg);
1149}
1150EXPORT_SYMBOL_GPL(crypto_stats_compress);
1151
1152void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1153{
1154 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001155 atomic64_inc(&alg->stats.compress.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001156 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001157 atomic64_inc(&alg->stats.compress.decompress_cnt);
1158 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001159 }
1160 crypto_alg_put(alg);
1161}
1162EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1163
1164void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1165 struct crypto_alg *alg)
1166{
1167 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
Corentin Labbe44f13132018-11-29 14:42:25 +00001168 atomic64_inc(&alg->stats.hash.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001169 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001170 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001171 crypto_alg_put(alg);
1172}
1173EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1174
1175void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1176 struct crypto_alg *alg)
1177{
1178 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001179 atomic64_inc(&alg->stats.hash.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001180 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001181 atomic64_inc(&alg->stats.hash.hash_cnt);
1182 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001183 }
1184 crypto_alg_put(alg);
1185}
1186EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1187
1188void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1189{
1190 if (ret)
Corentin Labbe44f13132018-11-29 14:42:25 +00001191 atomic64_inc(&alg->stats.kpp.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001192 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001193 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001194 crypto_alg_put(alg);
1195}
1196EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1197
1198void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1199{
1200 if (ret)
Corentin Labbe44f13132018-11-29 14:42:25 +00001201 atomic64_inc(&alg->stats.kpp.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001202 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001203 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001204 crypto_alg_put(alg);
1205}
1206EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1207
1208void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1209{
1210 if (ret)
Corentin Labbe44f13132018-11-29 14:42:25 +00001211 atomic64_inc(&alg->stats.kpp.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001212 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001213 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001214 crypto_alg_put(alg);
1215}
1216EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1217
1218void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1219{
1220 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
Corentin Labbe44f13132018-11-29 14:42:25 +00001221 atomic64_inc(&alg->stats.rng.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001222 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001223 atomic64_inc(&alg->stats.rng.seed_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001224 crypto_alg_put(alg);
1225}
1226EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1227
1228void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1229 int ret)
1230{
1231 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001232 atomic64_inc(&alg->stats.rng.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001233 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001234 atomic64_inc(&alg->stats.rng.generate_cnt);
1235 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001236 }
1237 crypto_alg_put(alg);
1238}
1239EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1240
1241void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1242 struct crypto_alg *alg)
1243{
1244 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001245 atomic64_inc(&alg->stats.cipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001246 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001247 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1248 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001249 }
1250 crypto_alg_put(alg);
1251}
1252EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1253
1254void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1255 struct crypto_alg *alg)
1256{
1257 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001258 atomic64_inc(&alg->stats.cipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001259 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001260 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1261 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001262 }
1263 crypto_alg_put(alg);
1264}
1265EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1266#endif
1267
Herbert Xuadad5562021-09-17 08:26:19 +08001268static void __init crypto_start_tests(void)
1269{
1270 for (;;) {
1271 struct crypto_larval *larval = NULL;
1272 struct crypto_alg *q;
1273
1274 down_write(&crypto_alg_sem);
1275
1276 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1277 struct crypto_larval *l;
1278
1279 if (!crypto_is_larval(q))
1280 continue;
1281
1282 l = (void *)q;
1283
1284 if (!crypto_is_test_larval(l))
1285 continue;
1286
1287 if (l->test_started)
1288 continue;
1289
1290 l->test_started = true;
1291 larval = l;
1292 break;
1293 }
1294
1295 up_write(&crypto_alg_sem);
1296
1297 if (!larval)
1298 break;
1299
1300 crypto_wait_for_test(larval);
1301 }
1302
1303 static_branch_enable(&crypto_boot_test_finished);
1304}
1305
Herbert Xucce9e062006-08-21 21:08:13 +10001306static int __init crypto_algapi_init(void)
1307{
1308 crypto_init_proc();
Herbert Xuadad5562021-09-17 08:26:19 +08001309 crypto_start_tests();
Herbert Xucce9e062006-08-21 21:08:13 +10001310 return 0;
1311}
1312
1313static void __exit crypto_algapi_exit(void)
1314{
1315 crypto_exit_proc();
1316}
1317
Herbert Xuadad5562021-09-17 08:26:19 +08001318/*
1319 * We run this at late_initcall so that all the built-in algorithms
1320 * have had a chance to register themselves first.
1321 */
1322late_initcall(crypto_algapi_init);
Herbert Xucce9e062006-08-21 21:08:13 +10001323module_exit(crypto_algapi_exit);
1324
1325MODULE_LICENSE("GPL");
1326MODULE_DESCRIPTION("Cryptographic algorithms API");