Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
Herbert Xu | cce9e06 | 2006-08-21 21:08:13 +1000 | [diff] [blame] | 2 | /* |
| 3 | * Cryptographic API for algorithms (i.e., low-level API). |
| 4 | * |
| 5 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> |
Herbert Xu | cce9e06 | 2006-08-21 21:08:13 +1000 | [diff] [blame] | 6 | */ |
| 7 | #ifndef _CRYPTO_ALGAPI_H |
| 8 | #define _CRYPTO_ALGAPI_H |
| 9 | |
| 10 | #include <linux/crypto.h> |
Herbert Xu | b5b7f08 | 2007-04-16 20:48:54 +1000 | [diff] [blame] | 11 | #include <linux/list.h> |
| 12 | #include <linux/kernel.h> |
Herbert Xu | cce9e06 | 2006-08-21 21:08:13 +1000 | [diff] [blame] | 13 | |
Salvatore Mesoraca | 13c935b | 2018-04-09 15:54:46 +0200 | [diff] [blame] | 14 | /* |
| 15 | * Maximum values for blocksize and alignmask, used to allocate |
| 16 | * static buffers that are big enough for any combination of |
Kees Cook | a9f7f88 | 2018-08-07 14:18:40 -0700 | [diff] [blame] | 17 | * algs and architectures. Ciphers have a lower maximum size. |
Salvatore Mesoraca | 13c935b | 2018-04-09 15:54:46 +0200 | [diff] [blame] | 18 | */ |
Kees Cook | a9f7f88 | 2018-08-07 14:18:40 -0700 | [diff] [blame] | 19 | #define MAX_ALGAPI_BLOCKSIZE 160 |
| 20 | #define MAX_ALGAPI_ALIGNMASK 63 |
Salvatore Mesoraca | 13c935b | 2018-04-09 15:54:46 +0200 | [diff] [blame] | 21 | #define MAX_CIPHER_BLOCKSIZE 16 |
| 22 | #define MAX_CIPHER_ALIGNMASK 15 |
| 23 | |
Herbert Xu | 5d1d65f | 2015-05-11 17:48:12 +0800 | [diff] [blame] | 24 | struct crypto_aead; |
Herbert Xu | 319382a | 2015-07-09 07:17:15 +0800 | [diff] [blame] | 25 | struct crypto_instance; |
Herbert Xu | 4cc7720 | 2006-08-06 21:16:34 +1000 | [diff] [blame] | 26 | struct module; |
Herbert Xu | ebc610e | 2007-01-01 18:37:02 +1100 | [diff] [blame] | 27 | struct rtattr; |
Herbert Xu | e853c3c | 2006-08-22 00:06:54 +1000 | [diff] [blame] | 28 | struct seq_file; |
Herbert Xu | 0c3dc78 | 2020-08-19 21:58:20 +1000 | [diff] [blame] | 29 | struct sk_buff; |
Herbert Xu | e853c3c | 2006-08-22 00:06:54 +1000 | [diff] [blame] | 30 | |
| 31 | struct crypto_type { |
Herbert Xu | 27d2a33 | 2007-01-24 20:50:26 +1100 | [diff] [blame] | 32 | unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask); |
Herbert Xu | 2ca33da | 2009-07-13 20:46:25 +0800 | [diff] [blame] | 33 | unsigned int (*extsize)(struct crypto_alg *alg); |
Herbert Xu | 27d2a33 | 2007-01-24 20:50:26 +1100 | [diff] [blame] | 34 | int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask); |
Herbert Xu | 2ca33da | 2009-07-13 20:46:25 +0800 | [diff] [blame] | 35 | int (*init_tfm)(struct crypto_tfm *tfm); |
Herbert Xu | e853c3c | 2006-08-22 00:06:54 +1000 | [diff] [blame] | 36 | void (*show)(struct seq_file *m, struct crypto_alg *alg); |
Steffen Klassert | b6aa63c | 2011-09-27 07:24:29 +0200 | [diff] [blame] | 37 | int (*report)(struct sk_buff *skb, struct crypto_alg *alg); |
Herbert Xu | 319382a | 2015-07-09 07:17:15 +0800 | [diff] [blame] | 38 | void (*free)(struct crypto_instance *inst); |
Herbert Xu | 7b0bac6 | 2008-09-21 06:52:53 +0900 | [diff] [blame] | 39 | |
| 40 | unsigned int type; |
| 41 | unsigned int maskclear; |
| 42 | unsigned int maskset; |
| 43 | unsigned int tfmsize; |
Herbert Xu | e853c3c | 2006-08-22 00:06:54 +1000 | [diff] [blame] | 44 | }; |
Herbert Xu | 4cc7720 | 2006-08-06 21:16:34 +1000 | [diff] [blame] | 45 | |
| 46 | struct crypto_instance { |
| 47 | struct crypto_alg alg; |
| 48 | |
| 49 | struct crypto_template *tmpl; |
Herbert Xu | 5f567ff | 2019-12-18 15:53:01 +0800 | [diff] [blame] | 50 | |
| 51 | union { |
| 52 | /* Node in list of instances after registration. */ |
| 53 | struct hlist_node list; |
| 54 | /* List of attached spawns before registration. */ |
| 55 | struct crypto_spawn *spawns; |
| 56 | }; |
Herbert Xu | 4cc7720 | 2006-08-06 21:16:34 +1000 | [diff] [blame] | 57 | |
| 58 | void *__ctx[] CRYPTO_MINALIGN_ATTR; |
| 59 | }; |
| 60 | |
| 61 | struct crypto_template { |
| 62 | struct list_head list; |
| 63 | struct hlist_head instances; |
| 64 | struct module *module; |
| 65 | |
Herbert Xu | f2ac72e | 2009-07-07 12:30:33 +0800 | [diff] [blame] | 66 | int (*create)(struct crypto_template *tmpl, struct rtattr **tb); |
Herbert Xu | 4cc7720 | 2006-08-06 21:16:34 +1000 | [diff] [blame] | 67 | |
| 68 | char name[CRYPTO_MAX_ALG_NAME]; |
| 69 | }; |
| 70 | |
Herbert Xu | 6bfd480 | 2006-09-21 11:39:29 +1000 | [diff] [blame] | 71 | struct crypto_spawn { |
| 72 | struct list_head list; |
| 73 | struct crypto_alg *alg; |
Herbert Xu | 5f567ff | 2019-12-18 15:53:01 +0800 | [diff] [blame] | 74 | union { |
| 75 | /* Back pointer to instance after registration.*/ |
| 76 | struct crypto_instance *inst; |
| 77 | /* Spawn list pointer prior to registration. */ |
| 78 | struct crypto_spawn *next; |
| 79 | }; |
Herbert Xu | 97eedce | 2009-07-08 15:55:52 +0800 | [diff] [blame] | 80 | const struct crypto_type *frontend; |
Herbert Xu | a73e699 | 2007-04-08 21:31:36 +1000 | [diff] [blame] | 81 | u32 mask; |
Herbert Xu | 4f87ee1 | 2019-12-07 22:15:17 +0800 | [diff] [blame] | 82 | bool dead; |
Herbert Xu | 5f567ff | 2019-12-18 15:53:01 +0800 | [diff] [blame] | 83 | bool registered; |
Herbert Xu | 6bfd480 | 2006-09-21 11:39:29 +1000 | [diff] [blame] | 84 | }; |
| 85 | |
Herbert Xu | b5b7f08 | 2007-04-16 20:48:54 +1000 | [diff] [blame] | 86 | struct crypto_queue { |
| 87 | struct list_head list; |
| 88 | struct list_head *backlog; |
| 89 | |
| 90 | unsigned int qlen; |
| 91 | unsigned int max_qlen; |
| 92 | }; |
| 93 | |
Herbert Xu | 5c64097 | 2006-08-12 21:56:17 +1000 | [diff] [blame] | 94 | struct scatter_walk { |
| 95 | struct scatterlist *sg; |
| 96 | unsigned int offset; |
| 97 | }; |
| 98 | |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 99 | void crypto_mod_put(struct crypto_alg *alg); |
| 100 | |
Herbert Xu | 4cc7720 | 2006-08-06 21:16:34 +1000 | [diff] [blame] | 101 | int crypto_register_template(struct crypto_template *tmpl); |
Xiongfeng Wang | 9572442 | 2019-01-18 13:58:11 +0800 | [diff] [blame] | 102 | int crypto_register_templates(struct crypto_template *tmpls, int count); |
Herbert Xu | 4cc7720 | 2006-08-06 21:16:34 +1000 | [diff] [blame] | 103 | void crypto_unregister_template(struct crypto_template *tmpl); |
Xiongfeng Wang | 9572442 | 2019-01-18 13:58:11 +0800 | [diff] [blame] | 104 | void crypto_unregister_templates(struct crypto_template *tmpls, int count); |
Herbert Xu | 4cc7720 | 2006-08-06 21:16:34 +1000 | [diff] [blame] | 105 | struct crypto_template *crypto_lookup_template(const char *name); |
| 106 | |
Herbert Xu | 9cd899a | 2009-07-14 18:45:45 +0800 | [diff] [blame] | 107 | int crypto_register_instance(struct crypto_template *tmpl, |
| 108 | struct crypto_instance *inst); |
Eric Biggers | c6d633a | 2019-12-15 15:51:19 -0800 | [diff] [blame] | 109 | void crypto_unregister_instance(struct crypto_instance *inst); |
Herbert Xu | 9cd899a | 2009-07-14 18:45:45 +0800 | [diff] [blame] | 110 | |
Eric Biggers | de95c95 | 2020-01-02 19:58:48 -0800 | [diff] [blame] | 111 | int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst, |
| 112 | const char *name, u32 type, u32 mask); |
Herbert Xu | 6bfd480 | 2006-09-21 11:39:29 +1000 | [diff] [blame] | 113 | void crypto_drop_spawn(struct crypto_spawn *spawn); |
Herbert Xu | 2e306ee | 2006-12-17 10:05:58 +1100 | [diff] [blame] | 114 | struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, |
| 115 | u32 mask); |
Herbert Xu | 97eedce | 2009-07-08 15:55:52 +0800 | [diff] [blame] | 116 | void *crypto_spawn_tfm2(struct crypto_spawn *spawn); |
Herbert Xu | 6bfd480 | 2006-09-21 11:39:29 +1000 | [diff] [blame] | 117 | |
Herbert Xu | ebc610e | 2007-01-01 18:37:02 +1100 | [diff] [blame] | 118 | struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); |
Eric Biggers | 7bcb2c9 | 2020-07-09 23:20:38 -0700 | [diff] [blame] | 119 | int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret); |
Herbert Xu | 68b6c7d | 2007-12-07 20:18:17 +0800 | [diff] [blame] | 120 | const char *crypto_attr_alg_name(struct rtattr *rta); |
Herbert Xu | 3c09f17 | 2007-08-30 16:24:15 +0800 | [diff] [blame] | 121 | int crypto_attr_u32(struct rtattr *rta, u32 *num); |
Herbert Xu | 32f27c7 | 2016-06-29 18:04:13 +0800 | [diff] [blame] | 122 | int crypto_inst_setname(struct crypto_instance *inst, const char *name, |
| 123 | struct crypto_alg *alg); |
Herbert Xu | 7fed0bf | 2006-08-06 23:10:45 +1000 | [diff] [blame] | 124 | |
Herbert Xu | b5b7f08 | 2007-04-16 20:48:54 +1000 | [diff] [blame] | 125 | void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen); |
| 126 | int crypto_enqueue_request(struct crypto_queue *queue, |
| 127 | struct crypto_async_request *request); |
Iuliana Prodan | ec6e2bf3 | 2020-04-28 18:49:03 +0300 | [diff] [blame] | 128 | void crypto_enqueue_request_head(struct crypto_queue *queue, |
| 129 | struct crypto_async_request *request); |
Herbert Xu | b5b7f08 | 2007-04-16 20:48:54 +1000 | [diff] [blame] | 130 | struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); |
Baolin Wang | 9f93a8a | 2016-01-26 20:25:38 +0800 | [diff] [blame] | 131 | static inline unsigned int crypto_queue_len(struct crypto_queue *queue) |
| 132 | { |
| 133 | return queue->qlen; |
| 134 | } |
Herbert Xu | b5b7f08 | 2007-04-16 20:48:54 +1000 | [diff] [blame] | 135 | |
Herbert Xu | 7613636 | 2007-11-20 17:26:06 +0800 | [diff] [blame] | 136 | void crypto_inc(u8 *a, unsigned int size); |
Ard Biesheuvel | a7c391f | 2017-07-24 11:28:03 +0100 | [diff] [blame] | 137 | void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size); |
Ard Biesheuvel | db91af0 | 2017-02-05 10:06:12 +0000 | [diff] [blame] | 138 | |
| 139 | static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size) |
| 140 | { |
| 141 | if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && |
| 142 | __builtin_constant_p(size) && |
| 143 | (size % sizeof(unsigned long)) == 0) { |
| 144 | unsigned long *d = (unsigned long *)dst; |
| 145 | unsigned long *s = (unsigned long *)src; |
| 146 | |
| 147 | while (size > 0) { |
| 148 | *d++ ^= *s++; |
| 149 | size -= sizeof(unsigned long); |
| 150 | } |
| 151 | } else { |
Ard Biesheuvel | a7c391f | 2017-07-24 11:28:03 +0100 | [diff] [blame] | 152 | __crypto_xor(dst, dst, src, size); |
Ard Biesheuvel | db91af0 | 2017-02-05 10:06:12 +0000 | [diff] [blame] | 153 | } |
| 154 | } |
Herbert Xu | 7613636 | 2007-11-20 17:26:06 +0800 | [diff] [blame] | 155 | |
Ard Biesheuvel | 45fe93d | 2017-07-24 11:28:04 +0100 | [diff] [blame] | 156 | static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2, |
| 157 | unsigned int size) |
| 158 | { |
| 159 | if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && |
| 160 | __builtin_constant_p(size) && |
| 161 | (size % sizeof(unsigned long)) == 0) { |
| 162 | unsigned long *d = (unsigned long *)dst; |
| 163 | unsigned long *s1 = (unsigned long *)src1; |
| 164 | unsigned long *s2 = (unsigned long *)src2; |
| 165 | |
| 166 | while (size > 0) { |
| 167 | *d++ = *s1++ ^ *s2++; |
| 168 | size -= sizeof(unsigned long); |
| 169 | } |
| 170 | } else { |
| 171 | __crypto_xor(dst, src1, src2, size); |
| 172 | } |
| 173 | } |
| 174 | |
Herbert Xu | 5cde0af | 2006-08-22 00:07:53 +1000 | [diff] [blame] | 175 | static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) |
| 176 | { |
Herbert Xu | ab30046 | 2009-07-24 15:26:15 +0800 | [diff] [blame] | 177 | return PTR_ALIGN(crypto_tfm_ctx(tfm), |
| 178 | crypto_tfm_alg_alignmask(tfm) + 1); |
Herbert Xu | 5cde0af | 2006-08-22 00:07:53 +1000 | [diff] [blame] | 179 | } |
| 180 | |
Herbert Xu | 124b53d | 2007-04-16 20:49:20 +1000 | [diff] [blame] | 181 | static inline struct crypto_instance *crypto_tfm_alg_instance( |
| 182 | struct crypto_tfm *tfm) |
| 183 | { |
| 184 | return container_of(tfm->__crt_alg, struct crypto_instance, alg); |
| 185 | } |
| 186 | |
Herbert Xu | 4cc7720 | 2006-08-06 21:16:34 +1000 | [diff] [blame] | 187 | static inline void *crypto_instance_ctx(struct crypto_instance *inst) |
| 188 | { |
| 189 | return inst->__ctx; |
| 190 | } |
| 191 | |
Eric Biggers | 0764ac2 | 2020-01-02 19:58:51 -0800 | [diff] [blame] | 192 | struct crypto_cipher_spawn { |
| 193 | struct crypto_spawn base; |
| 194 | }; |
| 195 | |
| 196 | static inline int crypto_grab_cipher(struct crypto_cipher_spawn *spawn, |
| 197 | struct crypto_instance *inst, |
| 198 | const char *name, u32 type, u32 mask) |
| 199 | { |
| 200 | type &= ~CRYPTO_ALG_TYPE_MASK; |
| 201 | type |= CRYPTO_ALG_TYPE_CIPHER; |
| 202 | mask |= CRYPTO_ALG_TYPE_MASK; |
| 203 | return crypto_grab_spawn(&spawn->base, inst, name, type, mask); |
| 204 | } |
| 205 | |
| 206 | static inline void crypto_drop_cipher(struct crypto_cipher_spawn *spawn) |
| 207 | { |
| 208 | crypto_drop_spawn(&spawn->base); |
| 209 | } |
| 210 | |
| 211 | static inline struct crypto_alg *crypto_spawn_cipher_alg( |
| 212 | struct crypto_cipher_spawn *spawn) |
| 213 | { |
| 214 | return spawn->base.alg; |
| 215 | } |
| 216 | |
Herbert Xu | 2e306ee | 2006-12-17 10:05:58 +1100 | [diff] [blame] | 217 | static inline struct crypto_cipher *crypto_spawn_cipher( |
Eric Biggers | d5ed3b6 | 2020-01-02 19:59:05 -0800 | [diff] [blame] | 218 | struct crypto_cipher_spawn *spawn) |
Herbert Xu | 2e306ee | 2006-12-17 10:05:58 +1100 | [diff] [blame] | 219 | { |
| 220 | u32 type = CRYPTO_ALG_TYPE_CIPHER; |
| 221 | u32 mask = CRYPTO_ALG_TYPE_MASK; |
| 222 | |
Eric Biggers | d5ed3b6 | 2020-01-02 19:59:05 -0800 | [diff] [blame] | 223 | return __crypto_cipher_cast(crypto_spawn_tfm(&spawn->base, type, mask)); |
Herbert Xu | 2e306ee | 2006-12-17 10:05:58 +1100 | [diff] [blame] | 224 | } |
| 225 | |
Herbert Xu | f28776a | 2006-08-13 20:58:18 +1000 | [diff] [blame] | 226 | static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) |
| 227 | { |
| 228 | return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; |
| 229 | } |
| 230 | |
Herbert Xu | b5b7f08 | 2007-04-16 20:48:54 +1000 | [diff] [blame] | 231 | static inline struct crypto_async_request *crypto_get_backlog( |
| 232 | struct crypto_queue *queue) |
| 233 | { |
| 234 | return queue->backlog == &queue->list ? NULL : |
| 235 | container_of(queue->backlog, struct crypto_async_request, list); |
| 236 | } |
| 237 | |
Eric Biggers | 7bcb2c9 | 2020-07-09 23:20:38 -0700 | [diff] [blame] | 238 | static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off) |
Herbert Xu | 016df0a | 2017-02-26 12:22:35 +0800 | [diff] [blame] | 239 | { |
Eric Biggers | 7bcb2c9 | 2020-07-09 23:20:38 -0700 | [diff] [blame] | 240 | return (algt->type ^ off) & algt->mask & off; |
Herbert Xu | 016df0a | 2017-02-26 12:22:35 +0800 | [diff] [blame] | 241 | } |
| 242 | |
Herbert Xu | 378f4f5 | 2007-12-17 20:07:31 +0800 | [diff] [blame] | 243 | /* |
Eric Biggers | 7bcb2c9 | 2020-07-09 23:20:38 -0700 | [diff] [blame] | 244 | * When an algorithm uses another algorithm (e.g., if it's an instance of a |
| 245 | * template), these are the flags that should always be set on the "outer" |
| 246 | * algorithm if any "inner" algorithm has them set. |
Herbert Xu | 378f4f5 | 2007-12-17 20:07:31 +0800 | [diff] [blame] | 247 | */ |
Eric Biggers | 2eb27c1 | 2020-07-09 23:20:39 -0700 | [diff] [blame] | 248 | #define CRYPTO_ALG_INHERITED_FLAGS \ |
Eric Biggers | fbb6cda | 2020-07-09 23:20:40 -0700 | [diff] [blame] | 249 | (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \ |
| 250 | CRYPTO_ALG_ALLOCATES_MEMORY) |
Eric Biggers | 7bcb2c9 | 2020-07-09 23:20:38 -0700 | [diff] [blame] | 251 | |
| 252 | /* |
| 253 | * Given the type and mask that specify the flags restrictions on a template |
| 254 | * instance being created, return the mask that should be passed to |
| 255 | * crypto_grab_*() (along with type=0) to honor any request the user made to |
| 256 | * have any of the CRYPTO_ALG_INHERITED_FLAGS clear. |
| 257 | */ |
| 258 | static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt) |
Herbert Xu | 378f4f5 | 2007-12-17 20:07:31 +0800 | [diff] [blame] | 259 | { |
Eric Biggers | 7bcb2c9 | 2020-07-09 23:20:38 -0700 | [diff] [blame] | 260 | return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS); |
Herbert Xu | 378f4f5 | 2007-12-17 20:07:31 +0800 | [diff] [blame] | 261 | } |
| 262 | |
James Yonan | 6bf37e5 | 2013-09-26 02:20:39 -0600 | [diff] [blame] | 263 | noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size); |
Herbert Xu | cce9e06 | 2006-08-21 21:08:13 +1000 | [diff] [blame] | 264 | |
James Yonan | 6bf37e5 | 2013-09-26 02:20:39 -0600 | [diff] [blame] | 265 | /** |
| 266 | * crypto_memneq - Compare two areas of memory without leaking |
| 267 | * timing information. |
| 268 | * |
| 269 | * @a: One area of memory |
| 270 | * @b: Another area of memory |
| 271 | * @size: The size of the area. |
| 272 | * |
| 273 | * Returns 0 when data is equal, 1 otherwise. |
| 274 | */ |
| 275 | static inline int crypto_memneq(const void *a, const void *b, size_t size) |
| 276 | { |
| 277 | return __crypto_memneq(a, b, size) != 0UL ? 1 : 0; |
| 278 | } |
| 279 | |
Martin K. Petersen | dd8b083 | 2018-08-30 11:00:14 -0400 | [diff] [blame] | 280 | int crypto_register_notifier(struct notifier_block *nb); |
| 281 | int crypto_unregister_notifier(struct notifier_block *nb); |
| 282 | |
| 283 | /* Crypto notification events. */ |
| 284 | enum { |
| 285 | CRYPTO_MSG_ALG_REQUEST, |
| 286 | CRYPTO_MSG_ALG_REGISTER, |
| 287 | CRYPTO_MSG_ALG_LOADED, |
| 288 | }; |
| 289 | |
James Yonan | 6bf37e5 | 2013-09-26 02:20:39 -0600 | [diff] [blame] | 290 | #endif /* _CRYPTO_ALGAPI_H */ |