blob: cd643e2946644660a23e0e22712864e10d00b7b6 [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Herbert Xucce9e062006-08-21 21:08:13 +10002/*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
Herbert Xucce9e062006-08-21 21:08:13 +10006 */
7
Salvatore Mesoraca13c935b2018-04-09 15:54:46 +02008#include <crypto/algapi.h>
Herbert Xu6bfd4802006-09-21 11:39:29 +10009#include <linux/err.h>
Herbert Xucce9e062006-08-21 21:08:13 +100010#include <linux/errno.h>
Herbert Xu3133d762015-04-22 13:25:53 +080011#include <linux/fips.h>
Herbert Xucce9e062006-08-21 21:08:13 +100012#include <linux/init.h>
13#include <linux/kernel.h>
Herbert Xu4cc77202006-08-06 21:16:34 +100014#include <linux/list.h>
Herbert Xucce9e062006-08-21 21:08:13 +100015#include <linux/module.h>
Herbert Xu7fed0bf2006-08-06 23:10:45 +100016#include <linux/rtnetlink.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090017#include <linux/slab.h>
Herbert Xucce9e062006-08-21 21:08:13 +100018#include <linux/string.h>
19
20#include "internal.h"
21
Herbert Xu4cc77202006-08-06 21:16:34 +100022static LIST_HEAD(crypto_template_list);
23
Jarod Wilson002c77a2014-07-02 15:37:30 -040024static inline void crypto_check_module_sig(struct module *mod)
25{
Herbert Xu59afdc72015-04-22 11:28:46 +080026 if (fips_enabled && mod && !module_sig_ok(mod))
Jarod Wilson002c77a2014-07-02 15:37:30 -040027 panic("Module %s signature verification failed in FIPS mode\n",
Herbert Xubd4a7c62015-04-23 14:48:05 +080028 module_name(mod));
Jarod Wilson002c77a2014-07-02 15:37:30 -040029}
30
Herbert Xu4cc77202006-08-06 21:16:34 +100031static int crypto_check_alg(struct crypto_alg *alg)
Herbert Xucce9e062006-08-21 21:08:13 +100032{
Jarod Wilson002c77a2014-07-02 15:37:30 -040033 crypto_check_module_sig(alg->cra_module);
34
Eric Biggers177f87d2019-06-02 22:40:58 -070035 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 return -EINVAL;
37
Herbert Xucce9e062006-08-21 21:08:13 +100038 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 return -EINVAL;
40
Kees Cooka9f7f882018-08-07 14:18:40 -070041 /* General maximums for all algs. */
42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
Herbert Xucce9e062006-08-21 21:08:13 +100043 return -EINVAL;
44
Kees Cooka9f7f882018-08-07 14:18:40 -070045 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 return -EINVAL;
47
48 /* Lower maximums for specific alg types. */
Salvatore Mesoraca13c935b2018-04-09 15:54:46 +020049 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 CRYPTO_ALG_TYPE_CIPHER) {
51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 return -EINVAL;
53
54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 return -EINVAL;
56 }
57
Herbert Xucce9e062006-08-21 21:08:13 +100058 if (alg->cra_priority < 0)
59 return -EINVAL;
Herbert Xucce9e062006-08-21 21:08:13 +100060
Eric Biggersce8614a2017-12-29 10:00:46 -060061 refcount_set(&alg->cra_refcnt, 1);
Herbert Xue9b8e5b2015-04-09 17:40:35 +080062
Eric Biggers177f87d2019-06-02 22:40:58 -070063 return 0;
Herbert Xu4cc77202006-08-06 21:16:34 +100064}
65
Herbert Xu319382a2015-07-09 07:17:15 +080066static void crypto_free_instance(struct crypto_instance *inst)
67{
68 if (!inst->alg.cra_type->free) {
69 inst->tmpl->free(inst);
70 return;
71 }
72
73 inst->alg.cra_type->free(inst);
74}
75
Herbert Xu6bfd4802006-09-21 11:39:29 +100076static void crypto_destroy_instance(struct crypto_alg *alg)
77{
78 struct crypto_instance *inst = (void *)alg;
79 struct crypto_template *tmpl = inst->tmpl;
80
Herbert Xu319382a2015-07-09 07:17:15 +080081 crypto_free_instance(inst);
Herbert Xu6bfd4802006-09-21 11:39:29 +100082 crypto_tmpl_put(tmpl);
83}
84
Herbert Xu02244ba2019-12-07 22:33:51 +080085/*
86 * This function adds a spawn to the list secondary_spawns which
87 * will be used at the end of crypto_remove_spawns to unregister
88 * instances, unless the spawn happens to be one that is depended
89 * on by the new algorithm (nalg in crypto_remove_spawns).
90 *
91 * This function is also responsible for resurrecting any algorithms
92 * in the dependency chain of nalg by unsetting n->dead.
93 */
Herbert Xu2bf29012009-08-31 15:56:54 +100094static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
95 struct list_head *stack,
96 struct list_head *top,
97 struct list_head *secondary_spawns)
98{
99 struct crypto_spawn *spawn, *n;
100
Geliang Tang304e4812015-11-16 22:37:14 +0800101 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
102 if (!spawn)
Herbert Xu2bf29012009-08-31 15:56:54 +1000103 return NULL;
104
Herbert Xu4f87ee12019-12-07 22:15:17 +0800105 n = list_prev_entry(spawn, list);
Herbert Xu2bf29012009-08-31 15:56:54 +1000106 list_move(&spawn->list, secondary_spawns);
107
Herbert Xu4f87ee12019-12-07 22:15:17 +0800108 if (list_is_last(&n->list, stack))
109 return top;
110
111 n = list_next_entry(n, list);
112 if (!spawn->dead)
113 n->dead = false;
114
115 return &n->inst->alg.cra_users;
Herbert Xu2bf29012009-08-31 15:56:54 +1000116}
117
Herbert Xu1f723712015-04-02 22:31:22 +0800118static void crypto_remove_instance(struct crypto_instance *inst,
119 struct list_head *list)
Herbert Xua73e6992007-04-08 21:31:36 +1000120{
Herbert Xua73e6992007-04-08 21:31:36 +1000121 struct crypto_template *tmpl = inst->tmpl;
122
Herbert Xua73e6992007-04-08 21:31:36 +1000123 if (crypto_is_dead(&inst->alg))
124 return;
125
126 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
Herbert Xu38cb2412007-11-17 22:09:38 +0800127 if (hlist_unhashed(&inst->list))
128 return;
129
Herbert Xua73e6992007-04-08 21:31:36 +1000130 if (!tmpl || !crypto_tmpl_get(tmpl))
131 return;
132
Herbert Xua73e6992007-04-08 21:31:36 +1000133 list_move(&inst->alg.cra_list, list);
134 hlist_del(&inst->list);
135 inst->alg.cra_destroy = crypto_destroy_instance;
136
Herbert Xu2bf29012009-08-31 15:56:54 +1000137 BUG_ON(!list_empty(&inst->alg.cra_users));
Herbert Xua73e6992007-04-08 21:31:36 +1000138}
139
Herbert Xu02244ba2019-12-07 22:33:51 +0800140/*
141 * Given an algorithm alg, remove all algorithms that depend on it
142 * through spawns. If nalg is not null, then exempt any algorithms
143 * that is depended on by nalg. This is useful when nalg itself
144 * depends on alg.
145 */
Steffen Klassert89b596b2011-09-27 07:22:08 +0200146void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
147 struct crypto_alg *nalg)
Herbert Xu6bfd4802006-09-21 11:39:29 +1000148{
Herbert Xu2bf29012009-08-31 15:56:54 +1000149 u32 new_type = (nalg ?: alg)->cra_flags;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000150 struct crypto_spawn *spawn, *n;
Herbert Xua73e6992007-04-08 21:31:36 +1000151 LIST_HEAD(secondary_spawns);
Herbert Xu2bf29012009-08-31 15:56:54 +1000152 struct list_head *spawns;
153 LIST_HEAD(stack);
154 LIST_HEAD(top);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000155
Herbert Xu2bf29012009-08-31 15:56:54 +1000156 spawns = &alg->cra_users;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000157 list_for_each_entry_safe(spawn, n, spawns, list) {
Herbert Xua73e6992007-04-08 21:31:36 +1000158 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
Herbert Xu6bfd4802006-09-21 11:39:29 +1000159 continue;
160
Herbert Xu2bf29012009-08-31 15:56:54 +1000161 list_move(&spawn->list, &top);
Herbert Xua73e6992007-04-08 21:31:36 +1000162 }
Herbert Xu6bfd4802006-09-21 11:39:29 +1000163
Herbert Xu02244ba2019-12-07 22:33:51 +0800164 /*
165 * Perform a depth-first walk starting from alg through
166 * the cra_users tree. The list stack records the path
167 * from alg to the current spawn.
168 */
Herbert Xu2bf29012009-08-31 15:56:54 +1000169 spawns = &top;
170 do {
171 while (!list_empty(spawns)) {
172 struct crypto_instance *inst;
173
174 spawn = list_first_entry(spawns, struct crypto_spawn,
175 list);
176 inst = spawn->inst;
177
178 BUG_ON(&inst->alg == alg);
179
180 list_move(&spawn->list, &stack);
181
182 if (&inst->alg == nalg)
183 break;
184
Herbert Xu4f87ee12019-12-07 22:15:17 +0800185 spawn->dead = true;
Herbert Xu2bf29012009-08-31 15:56:54 +1000186 spawns = &inst->alg.cra_users;
Eric Biggers9a006742017-12-29 14:30:19 -0600187
188 /*
189 * We may encounter an unregistered instance here, since
190 * an instance's spawns are set up prior to the instance
191 * being registered. An unregistered instance will have
192 * NULL ->cra_users.next, since ->cra_users isn't
193 * properly initialized until registration. But an
194 * unregistered instance cannot have any users, so treat
195 * it the same as ->cra_users being empty.
196 */
197 if (spawns->next == NULL)
198 break;
Herbert Xu2bf29012009-08-31 15:56:54 +1000199 }
200 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
201 &secondary_spawns)));
202
Herbert Xu02244ba2019-12-07 22:33:51 +0800203 /*
204 * Remove all instances that are marked as dead. Also
205 * complete the resurrection of the others by moving them
206 * back to the cra_users list.
207 */
Herbert Xu2bf29012009-08-31 15:56:54 +1000208 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
Herbert Xu4f87ee12019-12-07 22:15:17 +0800209 if (!spawn->dead)
Herbert Xu2bf29012009-08-31 15:56:54 +1000210 list_move(&spawn->list, &spawn->alg->cra_users);
211 else
Herbert Xu1f723712015-04-02 22:31:22 +0800212 crypto_remove_instance(spawn->inst, list);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000213 }
214}
Steffen Klassert89b596b2011-09-27 07:22:08 +0200215EXPORT_SYMBOL_GPL(crypto_remove_spawns);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000216
Herbert Xu73d38642008-08-03 21:15:23 +0800217static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
Herbert Xu4cc77202006-08-06 21:16:34 +1000218{
219 struct crypto_alg *q;
Herbert Xu73d38642008-08-03 21:15:23 +0800220 struct crypto_larval *larval;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000221 int ret = -EAGAIN;
222
223 if (crypto_is_dead(alg))
Herbert Xu73d38642008-08-03 21:15:23 +0800224 goto err;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000225
226 INIT_LIST_HEAD(&alg->cra_users);
227
Herbert Xu73d38642008-08-03 21:15:23 +0800228 /* No cheating! */
229 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
230
Herbert Xu6bfd4802006-09-21 11:39:29 +1000231 ret = -EEXIST;
Herbert Xu4cc77202006-08-06 21:16:34 +1000232
Herbert Xucce9e062006-08-21 21:08:13 +1000233 list_for_each_entry(q, &crypto_alg_list, cra_list) {
Herbert Xu4cc77202006-08-06 21:16:34 +1000234 if (q == alg)
Herbert Xu73d38642008-08-03 21:15:23 +0800235 goto err;
236
Herbert Xub8e15992009-01-28 14:09:59 +1100237 if (crypto_is_moribund(q))
238 continue;
239
Herbert Xu73d38642008-08-03 21:15:23 +0800240 if (crypto_is_larval(q)) {
241 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
242 goto err;
243 continue;
244 }
245
246 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
247 !strcmp(q->cra_name, alg->cra_driver_name))
248 goto err;
249 }
250
251 larval = crypto_larval_alloc(alg->cra_name,
252 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
253 if (IS_ERR(larval))
254 goto out;
255
256 ret = -ENOENT;
257 larval->adult = crypto_mod_get(alg);
258 if (!larval->adult)
259 goto free_larval;
260
Eric Biggersce8614a2017-12-29 10:00:46 -0600261 refcount_set(&larval->alg.cra_refcnt, 1);
Herbert Xu73d38642008-08-03 21:15:23 +0800262 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
263 CRYPTO_MAX_ALG_NAME);
264 larval->alg.cra_priority = alg->cra_priority;
265
266 list_add(&alg->cra_list, &crypto_alg_list);
267 list_add(&larval->alg.cra_list, &crypto_alg_list);
268
Corentin Labbe1f6669b2018-11-29 14:42:26 +0000269 crypto_stats_init(alg);
Corentin Labbecac58182018-09-19 10:10:54 +0000270
Richard Hartmann5357c6c2010-02-16 20:25:21 +0800271out:
Herbert Xu73d38642008-08-03 21:15:23 +0800272 return larval;
273
274free_larval:
275 kfree(larval);
276err:
277 larval = ERR_PTR(ret);
278 goto out;
279}
280
281void crypto_alg_tested(const char *name, int err)
282{
283 struct crypto_larval *test;
284 struct crypto_alg *alg;
285 struct crypto_alg *q;
286 LIST_HEAD(list);
287
288 down_write(&crypto_alg_sem);
289 list_for_each_entry(q, &crypto_alg_list, cra_list) {
Herbert Xub8e15992009-01-28 14:09:59 +1100290 if (crypto_is_moribund(q) || !crypto_is_larval(q))
Herbert Xu73d38642008-08-03 21:15:23 +0800291 continue;
292
293 test = (struct crypto_larval *)q;
294
295 if (!strcmp(q->cra_driver_name, name))
296 goto found;
297 }
298
Karim Eshapac7235852017-05-13 21:05:19 +0200299 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
Herbert Xu73d38642008-08-03 21:15:23 +0800300 goto unlock;
301
302found:
Herbert Xub8e15992009-01-28 14:09:59 +1100303 q->cra_flags |= CRYPTO_ALG_DEAD;
Herbert Xu73d38642008-08-03 21:15:23 +0800304 alg = test->adult;
305 if (err || list_empty(&alg->cra_list))
306 goto complete;
307
308 alg->cra_flags |= CRYPTO_ALG_TESTED;
309
310 list_for_each_entry(q, &crypto_alg_list, cra_list) {
311 if (q == alg)
312 continue;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000313
314 if (crypto_is_moribund(q))
315 continue;
316
317 if (crypto_is_larval(q)) {
Herbert Xu28259822006-08-06 21:23:26 +1000318 struct crypto_larval *larval = (void *)q;
319
Herbert Xud8058482007-10-02 22:27:29 +0800320 /*
321 * Check to see if either our generic name or
322 * specific name can satisfy the name requested
323 * by the larval entry q.
324 */
Herbert Xu6bfd4802006-09-21 11:39:29 +1000325 if (strcmp(alg->cra_name, q->cra_name) &&
326 strcmp(alg->cra_driver_name, q->cra_name))
327 continue;
328
329 if (larval->adult)
330 continue;
Herbert Xu492e2b62006-09-21 11:35:17 +1000331 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
332 continue;
Herbert Xu28259822006-08-06 21:23:26 +1000333 if (!crypto_mod_get(alg))
334 continue;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000335
Herbert Xu28259822006-08-06 21:23:26 +1000336 larval->adult = alg;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000337 continue;
Herbert Xu28259822006-08-06 21:23:26 +1000338 }
Herbert Xu6bfd4802006-09-21 11:39:29 +1000339
340 if (strcmp(alg->cra_name, q->cra_name))
341 continue;
342
343 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
344 q->cra_priority > alg->cra_priority)
345 continue;
346
Herbert Xu2bf29012009-08-31 15:56:54 +1000347 crypto_remove_spawns(q, &list, alg);
Herbert Xucce9e062006-08-21 21:08:13 +1000348 }
Herbert Xu28259822006-08-06 21:23:26 +1000349
Herbert Xu73d38642008-08-03 21:15:23 +0800350complete:
351 complete_all(&test->completion);
Herbert Xu28259822006-08-06 21:23:26 +1000352
Herbert Xu73d38642008-08-03 21:15:23 +0800353unlock:
354 up_write(&crypto_alg_sem);
355
356 crypto_remove_final(&list);
Herbert Xucce9e062006-08-21 21:08:13 +1000357}
Herbert Xu73d38642008-08-03 21:15:23 +0800358EXPORT_SYMBOL_GPL(crypto_alg_tested);
Herbert Xu4cc77202006-08-06 21:16:34 +1000359
Steffen Klassert22e5b202011-09-27 07:23:07 +0200360void crypto_remove_final(struct list_head *list)
Herbert Xu6bfd4802006-09-21 11:39:29 +1000361{
362 struct crypto_alg *alg;
363 struct crypto_alg *n;
364
365 list_for_each_entry_safe(alg, n, list, cra_list) {
366 list_del_init(&alg->cra_list);
367 crypto_alg_put(alg);
368 }
369}
Steffen Klassert22e5b202011-09-27 07:23:07 +0200370EXPORT_SYMBOL_GPL(crypto_remove_final);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000371
Herbert Xu73d38642008-08-03 21:15:23 +0800372static void crypto_wait_for_test(struct crypto_larval *larval)
373{
374 int err;
375
376 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
377 if (err != NOTIFY_STOP) {
378 if (WARN_ON(err != NOTIFY_DONE))
379 goto out;
380 crypto_alg_tested(larval->alg.cra_driver_name, 0);
381 }
382
Herbert Xu3fc89ad2015-10-19 18:23:57 +0800383 err = wait_for_completion_killable(&larval->completion);
Herbert Xu73d38642008-08-03 21:15:23 +0800384 WARN_ON(err);
Martin K. Petersendd8b0832018-08-30 11:00:14 -0400385 if (!err)
386 crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval);
Herbert Xu73d38642008-08-03 21:15:23 +0800387
388out:
389 crypto_larval_kill(&larval->alg);
390}
391
Herbert Xu4cc77202006-08-06 21:16:34 +1000392int crypto_register_alg(struct crypto_alg *alg)
393{
Herbert Xu73d38642008-08-03 21:15:23 +0800394 struct crypto_larval *larval;
Herbert Xu4cc77202006-08-06 21:16:34 +1000395 int err;
396
Salvatore Benedettod6040762017-01-13 11:54:08 +0000397 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
Herbert Xu4cc77202006-08-06 21:16:34 +1000398 err = crypto_check_alg(alg);
399 if (err)
400 return err;
401
402 down_write(&crypto_alg_sem);
Herbert Xu73d38642008-08-03 21:15:23 +0800403 larval = __crypto_register_alg(alg);
Herbert Xu4cc77202006-08-06 21:16:34 +1000404 up_write(&crypto_alg_sem);
405
Herbert Xu73d38642008-08-03 21:15:23 +0800406 if (IS_ERR(larval))
407 return PTR_ERR(larval);
408
409 crypto_wait_for_test(larval);
410 return 0;
Herbert Xu4cc77202006-08-06 21:16:34 +1000411}
Herbert Xucce9e062006-08-21 21:08:13 +1000412EXPORT_SYMBOL_GPL(crypto_register_alg);
413
Herbert Xu6bfd4802006-09-21 11:39:29 +1000414static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
415{
416 if (unlikely(list_empty(&alg->cra_list)))
417 return -ENOENT;
418
419 alg->cra_flags |= CRYPTO_ALG_DEAD;
420
Herbert Xu6bfd4802006-09-21 11:39:29 +1000421 list_del_init(&alg->cra_list);
Herbert Xu2bf29012009-08-31 15:56:54 +1000422 crypto_remove_spawns(alg, list, NULL);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000423
424 return 0;
425}
426
Herbert Xucce9e062006-08-21 21:08:13 +1000427int crypto_unregister_alg(struct crypto_alg *alg)
428{
Herbert Xu6bfd4802006-09-21 11:39:29 +1000429 int ret;
430 LIST_HEAD(list);
Richard Hartmann5357c6c2010-02-16 20:25:21 +0800431
Herbert Xucce9e062006-08-21 21:08:13 +1000432 down_write(&crypto_alg_sem);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000433 ret = crypto_remove_alg(alg, &list);
Herbert Xucce9e062006-08-21 21:08:13 +1000434 up_write(&crypto_alg_sem);
435
436 if (ret)
437 return ret;
438
Eric Biggersce8614a2017-12-29 10:00:46 -0600439 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
Herbert Xucce9e062006-08-21 21:08:13 +1000440 if (alg->cra_destroy)
441 alg->cra_destroy(alg);
442
Herbert Xu6bfd4802006-09-21 11:39:29 +1000443 crypto_remove_final(&list);
Herbert Xucce9e062006-08-21 21:08:13 +1000444 return 0;
445}
446EXPORT_SYMBOL_GPL(crypto_unregister_alg);
447
Mark Brown4b004342012-01-17 23:34:26 +0000448int crypto_register_algs(struct crypto_alg *algs, int count)
449{
450 int i, ret;
451
452 for (i = 0; i < count; i++) {
453 ret = crypto_register_alg(&algs[i]);
454 if (ret)
455 goto err;
456 }
457
458 return 0;
459
460err:
461 for (--i; i >= 0; --i)
462 crypto_unregister_alg(&algs[i]);
463
464 return ret;
465}
466EXPORT_SYMBOL_GPL(crypto_register_algs);
467
468int crypto_unregister_algs(struct crypto_alg *algs, int count)
469{
470 int i, ret;
471
472 for (i = 0; i < count; i++) {
473 ret = crypto_unregister_alg(&algs[i]);
474 if (ret)
475 pr_err("Failed to unregister %s %s: %d\n",
476 algs[i].cra_driver_name, algs[i].cra_name, ret);
477 }
478
479 return 0;
480}
481EXPORT_SYMBOL_GPL(crypto_unregister_algs);
482
Herbert Xu4cc77202006-08-06 21:16:34 +1000483int crypto_register_template(struct crypto_template *tmpl)
484{
485 struct crypto_template *q;
486 int err = -EEXIST;
487
488 down_write(&crypto_alg_sem);
489
Jarod Wilson002c77a2014-07-02 15:37:30 -0400490 crypto_check_module_sig(tmpl->module);
491
Herbert Xu4cc77202006-08-06 21:16:34 +1000492 list_for_each_entry(q, &crypto_template_list, list) {
493 if (q == tmpl)
494 goto out;
495 }
496
497 list_add(&tmpl->list, &crypto_template_list);
498 err = 0;
499out:
500 up_write(&crypto_alg_sem);
501 return err;
502}
503EXPORT_SYMBOL_GPL(crypto_register_template);
504
Xiongfeng Wang95724422019-01-18 13:58:11 +0800505int crypto_register_templates(struct crypto_template *tmpls, int count)
506{
507 int i, err;
508
509 for (i = 0; i < count; i++) {
510 err = crypto_register_template(&tmpls[i]);
511 if (err)
512 goto out;
513 }
514 return 0;
515
516out:
517 for (--i; i >= 0; --i)
518 crypto_unregister_template(&tmpls[i]);
519 return err;
520}
521EXPORT_SYMBOL_GPL(crypto_register_templates);
522
Herbert Xu4cc77202006-08-06 21:16:34 +1000523void crypto_unregister_template(struct crypto_template *tmpl)
524{
525 struct crypto_instance *inst;
Sasha Levinb67bfe02013-02-27 17:06:00 -0800526 struct hlist_node *n;
Herbert Xu4cc77202006-08-06 21:16:34 +1000527 struct hlist_head *list;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000528 LIST_HEAD(users);
Herbert Xu4cc77202006-08-06 21:16:34 +1000529
530 down_write(&crypto_alg_sem);
531
532 BUG_ON(list_empty(&tmpl->list));
533 list_del_init(&tmpl->list);
534
535 list = &tmpl->instances;
Sasha Levinb67bfe02013-02-27 17:06:00 -0800536 hlist_for_each_entry(inst, list, list) {
Herbert Xu6bfd4802006-09-21 11:39:29 +1000537 int err = crypto_remove_alg(&inst->alg, &users);
Joshua I. James0efcb8d2014-12-05 15:00:10 +0900538
Herbert Xu6bfd4802006-09-21 11:39:29 +1000539 BUG_ON(err);
Herbert Xu4cc77202006-08-06 21:16:34 +1000540 }
541
542 up_write(&crypto_alg_sem);
543
Sasha Levinb67bfe02013-02-27 17:06:00 -0800544 hlist_for_each_entry_safe(inst, n, list, list) {
Eric Biggersce8614a2017-12-29 10:00:46 -0600545 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
Herbert Xu319382a2015-07-09 07:17:15 +0800546 crypto_free_instance(inst);
Herbert Xu4cc77202006-08-06 21:16:34 +1000547 }
Herbert Xu6bfd4802006-09-21 11:39:29 +1000548 crypto_remove_final(&users);
Herbert Xu4cc77202006-08-06 21:16:34 +1000549}
550EXPORT_SYMBOL_GPL(crypto_unregister_template);
551
Xiongfeng Wang95724422019-01-18 13:58:11 +0800552void crypto_unregister_templates(struct crypto_template *tmpls, int count)
553{
554 int i;
555
556 for (i = count - 1; i >= 0; --i)
557 crypto_unregister_template(&tmpls[i]);
558}
559EXPORT_SYMBOL_GPL(crypto_unregister_templates);
560
Herbert Xu4cc77202006-08-06 21:16:34 +1000561static struct crypto_template *__crypto_lookup_template(const char *name)
562{
563 struct crypto_template *q, *tmpl = NULL;
564
565 down_read(&crypto_alg_sem);
566 list_for_each_entry(q, &crypto_template_list, list) {
567 if (strcmp(q->name, name))
568 continue;
569 if (unlikely(!crypto_tmpl_get(q)))
570 continue;
571
572 tmpl = q;
573 break;
574 }
575 up_read(&crypto_alg_sem);
576
577 return tmpl;
578}
579
580struct crypto_template *crypto_lookup_template(const char *name)
581{
Kees Cook4943ba12014-11-24 16:32:38 -0800582 return try_then_request_module(__crypto_lookup_template(name),
583 "crypto-%s", name);
Herbert Xu4cc77202006-08-06 21:16:34 +1000584}
585EXPORT_SYMBOL_GPL(crypto_lookup_template);
586
587int crypto_register_instance(struct crypto_template *tmpl,
588 struct crypto_instance *inst)
589{
Herbert Xu73d38642008-08-03 21:15:23 +0800590 struct crypto_larval *larval;
591 int err;
Herbert Xu4cc77202006-08-06 21:16:34 +1000592
Herbert Xu4cc77202006-08-06 21:16:34 +1000593 err = crypto_check_alg(&inst->alg);
594 if (err)
Stephan Mueller9c521a22015-04-09 12:09:55 +0200595 return err;
596
Herbert Xu4cc77202006-08-06 21:16:34 +1000597 inst->alg.cra_module = tmpl->module;
Steffen Klassert64a947b2011-09-27 07:21:26 +0200598 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
Herbert Xu4cc77202006-08-06 21:16:34 +1000599
600 down_write(&crypto_alg_sem);
601
Herbert Xu73d38642008-08-03 21:15:23 +0800602 larval = __crypto_register_alg(&inst->alg);
603 if (IS_ERR(larval))
Herbert Xu4cc77202006-08-06 21:16:34 +1000604 goto unlock;
605
606 hlist_add_head(&inst->list, &tmpl->instances);
607 inst->tmpl = tmpl;
608
609unlock:
610 up_write(&crypto_alg_sem);
611
Herbert Xu73d38642008-08-03 21:15:23 +0800612 err = PTR_ERR(larval);
613 if (IS_ERR(larval))
614 goto err;
615
616 crypto_wait_for_test(larval);
617 err = 0;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000618
Herbert Xu4cc77202006-08-06 21:16:34 +1000619err:
620 return err;
621}
622EXPORT_SYMBOL_GPL(crypto_register_instance);
623
Herbert Xu87b16752015-04-02 22:39:40 +0800624int crypto_unregister_instance(struct crypto_instance *inst)
Steffen Klassertce3fd842011-11-08 10:09:17 +0100625{
Herbert Xu1f723712015-04-02 22:31:22 +0800626 LIST_HEAD(list);
Steffen Klassertce3fd842011-11-08 10:09:17 +0100627
Steffen Klassertce3fd842011-11-08 10:09:17 +0100628 down_write(&crypto_alg_sem);
629
Herbert Xu87b16752015-04-02 22:39:40 +0800630 crypto_remove_spawns(&inst->alg, &list, NULL);
Herbert Xu1f723712015-04-02 22:31:22 +0800631 crypto_remove_instance(inst, &list);
Steffen Klassertce3fd842011-11-08 10:09:17 +0100632
633 up_write(&crypto_alg_sem);
634
Herbert Xu1f723712015-04-02 22:31:22 +0800635 crypto_remove_final(&list);
Steffen Klassertce3fd842011-11-08 10:09:17 +0100636
637 return 0;
638}
639EXPORT_SYMBOL_GPL(crypto_unregister_instance);
640
Herbert Xu6bfd4802006-09-21 11:39:29 +1000641int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
Herbert Xua73e6992007-04-08 21:31:36 +1000642 struct crypto_instance *inst, u32 mask)
Herbert Xu6bfd4802006-09-21 11:39:29 +1000643{
644 int err = -EAGAIN;
645
Eric Biggers6b476662019-01-06 12:46:06 -0800646 if (WARN_ON_ONCE(inst == NULL))
647 return -EINVAL;
648
Herbert Xu6bfd4802006-09-21 11:39:29 +1000649 spawn->inst = inst;
Herbert Xua73e6992007-04-08 21:31:36 +1000650 spawn->mask = mask;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000651
652 down_write(&crypto_alg_sem);
653 if (!crypto_is_moribund(alg)) {
654 list_add(&spawn->list, &alg->cra_users);
655 spawn->alg = alg;
656 err = 0;
657 }
658 up_write(&crypto_alg_sem);
659
660 return err;
661}
662EXPORT_SYMBOL_GPL(crypto_init_spawn);
663
Herbert Xu97eedce2009-07-08 15:55:52 +0800664int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
665 struct crypto_instance *inst,
666 const struct crypto_type *frontend)
667{
668 int err = -EINVAL;
669
Dan Carpenterc614e102010-05-03 11:08:15 +0800670 if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
Herbert Xu97eedce2009-07-08 15:55:52 +0800671 goto out;
672
673 spawn->frontend = frontend;
674 err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
675
676out:
677 return err;
678}
679EXPORT_SYMBOL_GPL(crypto_init_spawn2);
680
Herbert Xud6ef2f12015-05-11 17:47:39 +0800681int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
682 u32 type, u32 mask)
683{
684 struct crypto_alg *alg;
685 int err;
686
687 alg = crypto_find_alg(name, spawn->frontend, type, mask);
688 if (IS_ERR(alg))
689 return PTR_ERR(alg);
690
691 err = crypto_init_spawn(spawn, alg, spawn->inst, mask);
692 crypto_mod_put(alg);
693 return err;
694}
695EXPORT_SYMBOL_GPL(crypto_grab_spawn);
696
Herbert Xu6bfd4802006-09-21 11:39:29 +1000697void crypto_drop_spawn(struct crypto_spawn *spawn)
698{
699 down_write(&crypto_alg_sem);
Herbert Xu4f87ee12019-12-07 22:15:17 +0800700 if (!spawn->dead)
Herbert Xu7db3b612019-12-06 13:55:17 +0800701 list_del(&spawn->list);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000702 up_write(&crypto_alg_sem);
703}
704EXPORT_SYMBOL_GPL(crypto_drop_spawn);
705
Herbert Xu97eedce2009-07-08 15:55:52 +0800706static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
Herbert Xu6bfd4802006-09-21 11:39:29 +1000707{
708 struct crypto_alg *alg;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000709
710 down_read(&crypto_alg_sem);
711 alg = spawn->alg;
Herbert Xu4f87ee12019-12-07 22:15:17 +0800712 if (!spawn->dead && !crypto_mod_get(alg)) {
Herbert Xu73669cc2019-12-07 22:15:15 +0800713 alg->cra_flags |= CRYPTO_ALG_DYING;
714 alg = NULL;
715 }
Herbert Xu6bfd4802006-09-21 11:39:29 +1000716 up_read(&crypto_alg_sem);
717
Herbert Xu73669cc2019-12-07 22:15:15 +0800718 return alg ?: ERR_PTR(-EAGAIN);
Herbert Xu97eedce2009-07-08 15:55:52 +0800719}
720
721struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
722 u32 mask)
723{
724 struct crypto_alg *alg;
725 struct crypto_tfm *tfm;
726
727 alg = crypto_spawn_alg(spawn);
728 if (IS_ERR(alg))
729 return ERR_CAST(alg);
730
Herbert Xu2e306ee2006-12-17 10:05:58 +1100731 tfm = ERR_PTR(-EINVAL);
732 if (unlikely((alg->cra_flags ^ type) & mask))
733 goto out_put_alg;
734
Herbert Xu27d2a332007-01-24 20:50:26 +1100735 tfm = __crypto_alloc_tfm(alg, type, mask);
Herbert Xu6bfd4802006-09-21 11:39:29 +1000736 if (IS_ERR(tfm))
Herbert Xu2e306ee2006-12-17 10:05:58 +1100737 goto out_put_alg;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000738
739 return tfm;
Herbert Xu2e306ee2006-12-17 10:05:58 +1100740
741out_put_alg:
742 crypto_mod_put(alg);
743 return tfm;
Herbert Xu6bfd4802006-09-21 11:39:29 +1000744}
745EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
746
Herbert Xu97eedce2009-07-08 15:55:52 +0800747void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
748{
749 struct crypto_alg *alg;
750 struct crypto_tfm *tfm;
751
752 alg = crypto_spawn_alg(spawn);
753 if (IS_ERR(alg))
754 return ERR_CAST(alg);
755
756 tfm = crypto_create_tfm(alg, spawn->frontend);
757 if (IS_ERR(tfm))
758 goto out_put_alg;
759
760 return tfm;
761
762out_put_alg:
763 crypto_mod_put(alg);
764 return tfm;
765}
766EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
767
Herbert Xu28259822006-08-06 21:23:26 +1000768int crypto_register_notifier(struct notifier_block *nb)
769{
770 return blocking_notifier_chain_register(&crypto_chain, nb);
771}
772EXPORT_SYMBOL_GPL(crypto_register_notifier);
773
774int crypto_unregister_notifier(struct notifier_block *nb)
775{
776 return blocking_notifier_chain_unregister(&crypto_chain, nb);
777}
778EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
779
Herbert Xuebc610e2007-01-01 18:37:02 +1100780struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000781{
Herbert Xu39e1ee012007-08-29 19:27:26 +0800782 struct rtattr *rta = tb[0];
Herbert Xuebc610e2007-01-01 18:37:02 +1100783 struct crypto_attr_type *algt;
784
785 if (!rta)
786 return ERR_PTR(-ENOENT);
787 if (RTA_PAYLOAD(rta) < sizeof(*algt))
788 return ERR_PTR(-EINVAL);
Herbert Xu39e1ee012007-08-29 19:27:26 +0800789 if (rta->rta_type != CRYPTOA_TYPE)
790 return ERR_PTR(-EINVAL);
Herbert Xuebc610e2007-01-01 18:37:02 +1100791
792 algt = RTA_DATA(rta);
793
794 return algt;
795}
796EXPORT_SYMBOL_GPL(crypto_get_attr_type);
797
798int crypto_check_attr_type(struct rtattr **tb, u32 type)
799{
800 struct crypto_attr_type *algt;
801
802 algt = crypto_get_attr_type(tb);
803 if (IS_ERR(algt))
804 return PTR_ERR(algt);
805
806 if ((algt->type ^ type) & algt->mask)
807 return -EINVAL;
808
809 return 0;
810}
811EXPORT_SYMBOL_GPL(crypto_check_attr_type);
812
Herbert Xu68b6c7d2007-12-07 20:18:17 +0800813const char *crypto_attr_alg_name(struct rtattr *rta)
Herbert Xuebc610e2007-01-01 18:37:02 +1100814{
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000815 struct crypto_attr_alg *alga;
816
Herbert Xuebc610e2007-01-01 18:37:02 +1100817 if (!rta)
818 return ERR_PTR(-ENOENT);
819 if (RTA_PAYLOAD(rta) < sizeof(*alga))
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000820 return ERR_PTR(-EINVAL);
Herbert Xu39e1ee012007-08-29 19:27:26 +0800821 if (rta->rta_type != CRYPTOA_ALG)
822 return ERR_PTR(-EINVAL);
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000823
824 alga = RTA_DATA(rta);
825 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
826
Herbert Xu68b6c7d2007-12-07 20:18:17 +0800827 return alga->name;
828}
829EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
830
Herbert Xud06854f2009-07-08 17:53:16 +0800831struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
832 const struct crypto_type *frontend,
833 u32 type, u32 mask)
Herbert Xu68b6c7d2007-12-07 20:18:17 +0800834{
835 const char *name;
Herbert Xu68b6c7d2007-12-07 20:18:17 +0800836
837 name = crypto_attr_alg_name(rta);
Herbert Xu68b6c7d2007-12-07 20:18:17 +0800838 if (IS_ERR(name))
Julia Lawall3e8afe32013-01-22 12:29:26 +0100839 return ERR_CAST(name);
Herbert Xu68b6c7d2007-12-07 20:18:17 +0800840
Herbert Xud06854f2009-07-08 17:53:16 +0800841 return crypto_find_alg(name, frontend, type, mask);
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000842}
Herbert Xud06854f2009-07-08 17:53:16 +0800843EXPORT_SYMBOL_GPL(crypto_attr_alg2);
Herbert Xu3c09f172007-08-30 16:24:15 +0800844
845int crypto_attr_u32(struct rtattr *rta, u32 *num)
846{
847 struct crypto_attr_u32 *nu32;
848
849 if (!rta)
850 return -ENOENT;
851 if (RTA_PAYLOAD(rta) < sizeof(*nu32))
852 return -EINVAL;
853 if (rta->rta_type != CRYPTOA_U32)
854 return -EINVAL;
855
856 nu32 = RTA_DATA(rta);
857 *num = nu32->num;
858
859 return 0;
860}
861EXPORT_SYMBOL_GPL(crypto_attr_u32);
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000862
Herbert Xu32f27c72016-06-29 18:04:13 +0800863int crypto_inst_setname(struct crypto_instance *inst, const char *name,
864 struct crypto_alg *alg)
865{
866 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
867 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
868 return -ENAMETOOLONG;
869
870 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
871 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
872 return -ENAMETOOLONG;
873
874 return 0;
875}
876EXPORT_SYMBOL_GPL(crypto_inst_setname);
877
Eric Biggers14aa1a82019-01-03 20:16:25 -0800878void *crypto_alloc_instance(const char *name, struct crypto_alg *alg,
879 unsigned int head)
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000880{
881 struct crypto_instance *inst;
Herbert Xu70ec7bb2009-07-07 14:07:37 +0800882 char *p;
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000883 int err;
884
Herbert Xu70ec7bb2009-07-07 14:07:37 +0800885 p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
886 GFP_KERNEL);
887 if (!p)
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000888 return ERR_PTR(-ENOMEM);
889
Herbert Xu70ec7bb2009-07-07 14:07:37 +0800890 inst = (void *)(p + head);
891
Herbert Xu32f27c72016-06-29 18:04:13 +0800892 err = crypto_inst_setname(inst, name, alg);
893 if (err)
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000894 goto err_free_inst;
895
Herbert Xu70ec7bb2009-07-07 14:07:37 +0800896 return p;
897
898err_free_inst:
899 kfree(p);
900 return ERR_PTR(err);
901}
Herbert Xu7fed0bf2006-08-06 23:10:45 +1000902EXPORT_SYMBOL_GPL(crypto_alloc_instance);
903
Herbert Xub5b7f082007-04-16 20:48:54 +1000904void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
905{
906 INIT_LIST_HEAD(&queue->list);
907 queue->backlog = &queue->list;
908 queue->qlen = 0;
909 queue->max_qlen = max_qlen;
910}
911EXPORT_SYMBOL_GPL(crypto_init_queue);
912
913int crypto_enqueue_request(struct crypto_queue *queue,
914 struct crypto_async_request *request)
915{
916 int err = -EINPROGRESS;
917
918 if (unlikely(queue->qlen >= queue->max_qlen)) {
Gilad Ben-Yossef6b80ea32017-10-18 08:00:33 +0100919 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
920 err = -ENOSPC;
Herbert Xub5b7f082007-04-16 20:48:54 +1000921 goto out;
Gilad Ben-Yossef6b80ea32017-10-18 08:00:33 +0100922 }
923 err = -EBUSY;
Herbert Xub5b7f082007-04-16 20:48:54 +1000924 if (queue->backlog == &queue->list)
925 queue->backlog = &request->list;
926 }
927
928 queue->qlen++;
929 list_add_tail(&request->list, &queue->list);
930
931out:
932 return err;
933}
934EXPORT_SYMBOL_GPL(crypto_enqueue_request);
935
Herbert Xu31d228c2015-07-08 11:55:30 +0800936struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
Herbert Xub5b7f082007-04-16 20:48:54 +1000937{
938 struct list_head *request;
939
940 if (unlikely(!queue->qlen))
941 return NULL;
942
943 queue->qlen--;
944
945 if (queue->backlog != &queue->list)
946 queue->backlog = queue->backlog->next;
947
948 request = queue->list.next;
949 list_del(request);
950
Herbert Xu31d228c2015-07-08 11:55:30 +0800951 return list_entry(request, struct crypto_async_request, list);
Herbert Xub5b7f082007-04-16 20:48:54 +1000952}
953EXPORT_SYMBOL_GPL(crypto_dequeue_request);
954
Herbert Xu76136362007-11-20 17:26:06 +0800955static inline void crypto_inc_byte(u8 *a, unsigned int size)
956{
957 u8 *b = (a + size);
958 u8 c;
959
960 for (; size; size--) {
961 c = *--b + 1;
962 *b = c;
963 if (c)
964 break;
965 }
966}
967
968void crypto_inc(u8 *a, unsigned int size)
969{
970 __be32 *b = (__be32 *)(a + size);
971 u32 c;
972
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000973 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
Ard Biesheuvel27c539a2017-02-14 21:51:02 +0000974 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000975 for (; size >= 4; size -= 4) {
976 c = be32_to_cpu(*--b) + 1;
977 *b = cpu_to_be32(c);
Ard Biesheuvel27c539a2017-02-14 21:51:02 +0000978 if (likely(c))
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000979 return;
980 }
Herbert Xu76136362007-11-20 17:26:06 +0800981
982 crypto_inc_byte(a, size);
983}
984EXPORT_SYMBOL_GPL(crypto_inc);
985
Ard Biesheuvela7c391f2017-07-24 11:28:03 +0100986void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
Herbert Xu76136362007-11-20 17:26:06 +0800987{
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000988 int relalign = 0;
989
990 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
991 int size = sizeof(unsigned long);
Ard Biesheuvela7c391f2017-07-24 11:28:03 +0100992 int d = (((unsigned long)dst ^ (unsigned long)src1) |
993 ((unsigned long)dst ^ (unsigned long)src2)) &
994 (size - 1);
Ard Biesheuveldb91af02017-02-05 10:06:12 +0000995
996 relalign = d ? 1 << __ffs(d) : size;
997
998 /*
999 * If we care about alignment, process as many bytes as
1000 * needed to advance dst and src to values whose alignments
1001 * equal their relative alignment. This will allow us to
1002 * process the remainder of the input using optimal strides.
1003 */
1004 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001005 *dst++ = *src1++ ^ *src2++;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001006 len--;
1007 }
1008 }
1009
1010 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001011 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001012 dst += 8;
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001013 src1 += 8;
1014 src2 += 8;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001015 len -= 8;
1016 }
1017
1018 while (len >= 4 && !(relalign & 3)) {
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001019 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001020 dst += 4;
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001021 src1 += 4;
1022 src2 += 4;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001023 len -= 4;
1024 }
1025
1026 while (len >= 2 && !(relalign & 1)) {
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001027 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001028 dst += 2;
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001029 src1 += 2;
1030 src2 += 2;
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001031 len -= 2;
1032 }
1033
1034 while (len--)
Ard Biesheuvela7c391f2017-07-24 11:28:03 +01001035 *dst++ = *src1++ ^ *src2++;
Herbert Xu76136362007-11-20 17:26:06 +08001036}
Ard Biesheuveldb91af02017-02-05 10:06:12 +00001037EXPORT_SYMBOL_GPL(__crypto_xor);
Herbert Xu76136362007-11-20 17:26:06 +08001038
Herbert Xu38d21432015-04-20 13:39:00 +08001039unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1040{
Herbert Xuc2110f22015-05-28 22:07:56 +08001041 return alg->cra_ctxsize +
1042 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
Herbert Xu38d21432015-04-20 13:39:00 +08001043}
1044EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1045
Herbert Xuf2aefda2016-01-23 13:51:01 +08001046int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1047 u32 type, u32 mask)
1048{
1049 int ret = 0;
1050 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1051
1052 if (!IS_ERR(alg)) {
1053 crypto_mod_put(alg);
1054 ret = 1;
1055 }
1056
1057 return ret;
1058}
1059EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1060
Corentin Labbef7d76e02018-11-29 14:42:21 +00001061#ifdef CONFIG_CRYPTO_STATS
Corentin Labbe1f6669b2018-11-29 14:42:26 +00001062void crypto_stats_init(struct crypto_alg *alg)
1063{
1064 memset(&alg->stats, 0, sizeof(alg->stats));
1065}
1066EXPORT_SYMBOL_GPL(crypto_stats_init);
1067
Corentin Labbef7d76e02018-11-29 14:42:21 +00001068void crypto_stats_get(struct crypto_alg *alg)
1069{
1070 crypto_alg_get(alg);
1071}
1072EXPORT_SYMBOL_GPL(crypto_stats_get);
1073
Corentin Labbef7d76e02018-11-29 14:42:21 +00001074void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1075 int ret)
1076{
1077 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001078 atomic64_inc(&alg->stats.aead.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001079 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001080 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1081 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001082 }
1083 crypto_alg_put(alg);
1084}
1085EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1086
1087void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1088 int ret)
1089{
1090 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001091 atomic64_inc(&alg->stats.aead.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001092 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001093 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1094 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001095 }
1096 crypto_alg_put(alg);
1097}
1098EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1099
1100void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1101 struct crypto_alg *alg)
1102{
1103 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001104 atomic64_inc(&alg->stats.akcipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001105 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001106 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1107 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001108 }
1109 crypto_alg_put(alg);
1110}
1111EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1112
1113void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1114 struct crypto_alg *alg)
1115{
1116 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001117 atomic64_inc(&alg->stats.akcipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001118 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001119 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1120 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001121 }
1122 crypto_alg_put(alg);
1123}
1124EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1125
1126void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1127{
1128 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
Corentin Labbe44f13132018-11-29 14:42:25 +00001129 atomic64_inc(&alg->stats.akcipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001130 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001131 atomic64_inc(&alg->stats.akcipher.sign_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001132 crypto_alg_put(alg);
1133}
1134EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1135
1136void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1137{
1138 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
Corentin Labbe44f13132018-11-29 14:42:25 +00001139 atomic64_inc(&alg->stats.akcipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001140 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001141 atomic64_inc(&alg->stats.akcipher.verify_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001142 crypto_alg_put(alg);
1143}
1144EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1145
1146void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1147{
1148 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001149 atomic64_inc(&alg->stats.compress.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001150 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001151 atomic64_inc(&alg->stats.compress.compress_cnt);
1152 atomic64_add(slen, &alg->stats.compress.compress_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001153 }
1154 crypto_alg_put(alg);
1155}
1156EXPORT_SYMBOL_GPL(crypto_stats_compress);
1157
1158void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1159{
1160 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001161 atomic64_inc(&alg->stats.compress.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001162 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001163 atomic64_inc(&alg->stats.compress.decompress_cnt);
1164 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001165 }
1166 crypto_alg_put(alg);
1167}
1168EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1169
1170void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1171 struct crypto_alg *alg)
1172{
1173 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
Corentin Labbe44f13132018-11-29 14:42:25 +00001174 atomic64_inc(&alg->stats.hash.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001175 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001176 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001177 crypto_alg_put(alg);
1178}
1179EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1180
1181void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1182 struct crypto_alg *alg)
1183{
1184 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001185 atomic64_inc(&alg->stats.hash.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001186 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001187 atomic64_inc(&alg->stats.hash.hash_cnt);
1188 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001189 }
1190 crypto_alg_put(alg);
1191}
1192EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1193
1194void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1195{
1196 if (ret)
Corentin Labbe44f13132018-11-29 14:42:25 +00001197 atomic64_inc(&alg->stats.kpp.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001198 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001199 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001200 crypto_alg_put(alg);
1201}
1202EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1203
1204void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1205{
1206 if (ret)
Corentin Labbe44f13132018-11-29 14:42:25 +00001207 atomic64_inc(&alg->stats.kpp.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001208 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001209 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001210 crypto_alg_put(alg);
1211}
1212EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1213
1214void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1215{
1216 if (ret)
Corentin Labbe44f13132018-11-29 14:42:25 +00001217 atomic64_inc(&alg->stats.kpp.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001218 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001219 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001220 crypto_alg_put(alg);
1221}
1222EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1223
1224void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1225{
1226 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
Corentin Labbe44f13132018-11-29 14:42:25 +00001227 atomic64_inc(&alg->stats.rng.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001228 else
Corentin Labbe17c18f92018-11-29 14:42:24 +00001229 atomic64_inc(&alg->stats.rng.seed_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001230 crypto_alg_put(alg);
1231}
1232EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1233
1234void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1235 int ret)
1236{
1237 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001238 atomic64_inc(&alg->stats.rng.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001239 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001240 atomic64_inc(&alg->stats.rng.generate_cnt);
1241 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001242 }
1243 crypto_alg_put(alg);
1244}
1245EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1246
1247void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1248 struct crypto_alg *alg)
1249{
1250 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001251 atomic64_inc(&alg->stats.cipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001252 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001253 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1254 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001255 }
1256 crypto_alg_put(alg);
1257}
1258EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1259
1260void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1261 struct crypto_alg *alg)
1262{
1263 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
Corentin Labbe44f13132018-11-29 14:42:25 +00001264 atomic64_inc(&alg->stats.cipher.err_cnt);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001265 } else {
Corentin Labbe17c18f92018-11-29 14:42:24 +00001266 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1267 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
Corentin Labbef7d76e02018-11-29 14:42:21 +00001268 }
1269 crypto_alg_put(alg);
1270}
1271EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1272#endif
1273
Herbert Xucce9e062006-08-21 21:08:13 +10001274static int __init crypto_algapi_init(void)
1275{
1276 crypto_init_proc();
1277 return 0;
1278}
1279
1280static void __exit crypto_algapi_exit(void)
1281{
1282 crypto_exit_proc();
1283}
1284
1285module_init(crypto_algapi_init);
1286module_exit(crypto_algapi_exit);
1287
1288MODULE_LICENSE("GPL");
1289MODULE_DESCRIPTION("Cryptographic algorithms API");