blob: 44e263e25599cac8d1cc3f5286211d83ab997ed1 [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Jeff Garzik53964b92016-06-17 10:30:35 +05302/*
3 * Cryptographic API.
4 *
5 * SHA-3, as specified in
6 * http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
7 *
8 * SHA-3 code by Jeff Garzik <jeff@garzik.org>
Ard Biesheuvel83dee2c2018-01-19 12:04:34 +00009 * Ard Biesheuvel <ard.biesheuvel@linaro.org>
Jeff Garzik53964b92016-06-17 10:30:35 +053010 */
11#include <crypto/internal/hash.h>
12#include <linux/init.h>
13#include <linux/module.h>
14#include <linux/types.h>
15#include <crypto/sha3.h>
Ard Biesheuvelc013cee2018-01-19 12:04:33 +000016#include <asm/unaligned.h>
Jeff Garzik53964b92016-06-17 10:30:35 +053017
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +000018/*
David Howells739d8752018-03-08 09:48:46 +000019 * On some 32-bit architectures (h8300), GCC ends up using
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +000020 * over 1 KB of stack if we inline the round calculation into the loop
21 * in keccakf(). On the other hand, on 64-bit architectures with plenty
22 * of [64-bit wide] general purpose registers, not inlining it severely
23 * hurts performance. So let's use 64-bitness as a heuristic to decide
24 * whether to inline or not.
25 */
26#ifdef CONFIG_64BIT
27#define SHA3_INLINE inline
28#else
29#define SHA3_INLINE noinline
30#endif
31
Jeff Garzik53964b92016-06-17 10:30:35 +053032#define KECCAK_ROUNDS 24
33
Jeff Garzik53964b92016-06-17 10:30:35 +053034static const u64 keccakf_rndc[24] = {
Geert Uytterhoevenf743e702016-08-03 19:37:03 +020035 0x0000000000000001ULL, 0x0000000000008082ULL, 0x800000000000808aULL,
36 0x8000000080008000ULL, 0x000000000000808bULL, 0x0000000080000001ULL,
37 0x8000000080008081ULL, 0x8000000000008009ULL, 0x000000000000008aULL,
38 0x0000000000000088ULL, 0x0000000080008009ULL, 0x000000008000000aULL,
39 0x000000008000808bULL, 0x800000000000008bULL, 0x8000000000008089ULL,
40 0x8000000000008003ULL, 0x8000000000008002ULL, 0x8000000000000080ULL,
41 0x000000000000800aULL, 0x800000008000000aULL, 0x8000000080008081ULL,
42 0x8000000000008080ULL, 0x0000000080000001ULL, 0x8000000080008008ULL
Jeff Garzik53964b92016-06-17 10:30:35 +053043};
44
Jeff Garzik53964b92016-06-17 10:30:35 +053045/* update the state with given number of rounds */
46
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +000047static SHA3_INLINE void keccakf_round(u64 st[25])
Jeff Garzik53964b92016-06-17 10:30:35 +053048{
Ard Biesheuvel83dee2c2018-01-19 12:04:34 +000049 u64 t[5], tt, bc[5];
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +000050
51 /* Theta */
52 bc[0] = st[0] ^ st[5] ^ st[10] ^ st[15] ^ st[20];
53 bc[1] = st[1] ^ st[6] ^ st[11] ^ st[16] ^ st[21];
54 bc[2] = st[2] ^ st[7] ^ st[12] ^ st[17] ^ st[22];
55 bc[3] = st[3] ^ st[8] ^ st[13] ^ st[18] ^ st[23];
56 bc[4] = st[4] ^ st[9] ^ st[14] ^ st[19] ^ st[24];
57
58 t[0] = bc[4] ^ rol64(bc[1], 1);
59 t[1] = bc[0] ^ rol64(bc[2], 1);
60 t[2] = bc[1] ^ rol64(bc[3], 1);
61 t[3] = bc[2] ^ rol64(bc[4], 1);
62 t[4] = bc[3] ^ rol64(bc[0], 1);
63
64 st[0] ^= t[0];
65
66 /* Rho Pi */
67 tt = st[1];
68 st[ 1] = rol64(st[ 6] ^ t[1], 44);
69 st[ 6] = rol64(st[ 9] ^ t[4], 20);
70 st[ 9] = rol64(st[22] ^ t[2], 61);
71 st[22] = rol64(st[14] ^ t[4], 39);
72 st[14] = rol64(st[20] ^ t[0], 18);
73 st[20] = rol64(st[ 2] ^ t[2], 62);
74 st[ 2] = rol64(st[12] ^ t[2], 43);
75 st[12] = rol64(st[13] ^ t[3], 25);
76 st[13] = rol64(st[19] ^ t[4], 8);
77 st[19] = rol64(st[23] ^ t[3], 56);
78 st[23] = rol64(st[15] ^ t[0], 41);
79 st[15] = rol64(st[ 4] ^ t[4], 27);
80 st[ 4] = rol64(st[24] ^ t[4], 14);
81 st[24] = rol64(st[21] ^ t[1], 2);
82 st[21] = rol64(st[ 8] ^ t[3], 55);
83 st[ 8] = rol64(st[16] ^ t[1], 45);
84 st[16] = rol64(st[ 5] ^ t[0], 36);
85 st[ 5] = rol64(st[ 3] ^ t[3], 28);
86 st[ 3] = rol64(st[18] ^ t[3], 21);
87 st[18] = rol64(st[17] ^ t[2], 15);
88 st[17] = rol64(st[11] ^ t[1], 10);
89 st[11] = rol64(st[ 7] ^ t[2], 6);
90 st[ 7] = rol64(st[10] ^ t[0], 3);
91 st[10] = rol64( tt ^ t[1], 1);
92
93 /* Chi */
94 bc[ 0] = ~st[ 1] & st[ 2];
95 bc[ 1] = ~st[ 2] & st[ 3];
96 bc[ 2] = ~st[ 3] & st[ 4];
97 bc[ 3] = ~st[ 4] & st[ 0];
98 bc[ 4] = ~st[ 0] & st[ 1];
99 st[ 0] ^= bc[ 0];
100 st[ 1] ^= bc[ 1];
101 st[ 2] ^= bc[ 2];
102 st[ 3] ^= bc[ 3];
103 st[ 4] ^= bc[ 4];
104
105 bc[ 0] = ~st[ 6] & st[ 7];
106 bc[ 1] = ~st[ 7] & st[ 8];
107 bc[ 2] = ~st[ 8] & st[ 9];
108 bc[ 3] = ~st[ 9] & st[ 5];
109 bc[ 4] = ~st[ 5] & st[ 6];
110 st[ 5] ^= bc[ 0];
111 st[ 6] ^= bc[ 1];
112 st[ 7] ^= bc[ 2];
113 st[ 8] ^= bc[ 3];
114 st[ 9] ^= bc[ 4];
115
116 bc[ 0] = ~st[11] & st[12];
117 bc[ 1] = ~st[12] & st[13];
118 bc[ 2] = ~st[13] & st[14];
119 bc[ 3] = ~st[14] & st[10];
120 bc[ 4] = ~st[10] & st[11];
121 st[10] ^= bc[ 0];
122 st[11] ^= bc[ 1];
123 st[12] ^= bc[ 2];
124 st[13] ^= bc[ 3];
125 st[14] ^= bc[ 4];
126
127 bc[ 0] = ~st[16] & st[17];
128 bc[ 1] = ~st[17] & st[18];
129 bc[ 2] = ~st[18] & st[19];
130 bc[ 3] = ~st[19] & st[15];
131 bc[ 4] = ~st[15] & st[16];
132 st[15] ^= bc[ 0];
133 st[16] ^= bc[ 1];
134 st[17] ^= bc[ 2];
135 st[18] ^= bc[ 3];
136 st[19] ^= bc[ 4];
137
138 bc[ 0] = ~st[21] & st[22];
139 bc[ 1] = ~st[22] & st[23];
140 bc[ 2] = ~st[23] & st[24];
141 bc[ 3] = ~st[24] & st[20];
142 bc[ 4] = ~st[20] & st[21];
143 st[20] ^= bc[ 0];
144 st[21] ^= bc[ 1];
145 st[22] ^= bc[ 2];
146 st[23] ^= bc[ 3];
147 st[24] ^= bc[ 4];
148}
149
Dmitry Vyukovf044a842018-06-08 11:53:41 +0200150static void keccakf(u64 st[25])
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +0000151{
Ard Biesheuvel83dee2c2018-01-19 12:04:34 +0000152 int round;
Jeff Garzik53964b92016-06-17 10:30:35 +0530153
154 for (round = 0; round < KECCAK_ROUNDS; round++) {
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +0000155 keccakf_round(st);
Jeff Garzik53964b92016-06-17 10:30:35 +0530156 /* Iota */
157 st[0] ^= keccakf_rndc[round];
158 }
159}
160
Ard Biesheuvel66576742018-01-19 12:04:36 +0000161int crypto_sha3_init(struct shash_desc *desc)
Jeff Garzik53964b92016-06-17 10:30:35 +0530162{
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000163 struct sha3_state *sctx = shash_desc_ctx(desc);
164 unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
165
166 sctx->rsiz = 200 - 2 * digest_size;
Jeff Garzik53964b92016-06-17 10:30:35 +0530167 sctx->rsizw = sctx->rsiz / 8;
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000168 sctx->partial = 0;
Jeff Garzik53964b92016-06-17 10:30:35 +0530169
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000170 memset(sctx->st, 0, sizeof(sctx->st));
Jeff Garzik53964b92016-06-17 10:30:35 +0530171 return 0;
172}
Ard Biesheuvel66576742018-01-19 12:04:36 +0000173EXPORT_SYMBOL(crypto_sha3_init);
Jeff Garzik53964b92016-06-17 10:30:35 +0530174
Ard Biesheuvel66576742018-01-19 12:04:36 +0000175int crypto_sha3_update(struct shash_desc *desc, const u8 *data,
Jeff Garzik53964b92016-06-17 10:30:35 +0530176 unsigned int len)
177{
178 struct sha3_state *sctx = shash_desc_ctx(desc);
179 unsigned int done;
180 const u8 *src;
181
182 done = 0;
183 src = data;
184
185 if ((sctx->partial + len) > (sctx->rsiz - 1)) {
186 if (sctx->partial) {
187 done = -sctx->partial;
188 memcpy(sctx->buf + sctx->partial, data,
189 done + sctx->rsiz);
190 src = sctx->buf;
191 }
192
193 do {
194 unsigned int i;
195
196 for (i = 0; i < sctx->rsizw; i++)
Ard Biesheuvelc013cee2018-01-19 12:04:33 +0000197 sctx->st[i] ^= get_unaligned_le64(src + 8 * i);
Jeff Garzik53964b92016-06-17 10:30:35 +0530198 keccakf(sctx->st);
199
200 done += sctx->rsiz;
201 src = data + done;
202 } while (done + (sctx->rsiz - 1) < len);
203
204 sctx->partial = 0;
205 }
206 memcpy(sctx->buf + sctx->partial, src, len - done);
207 sctx->partial += (len - done);
208
209 return 0;
210}
Ard Biesheuvel66576742018-01-19 12:04:36 +0000211EXPORT_SYMBOL(crypto_sha3_update);
Jeff Garzik53964b92016-06-17 10:30:35 +0530212
Ard Biesheuvel66576742018-01-19 12:04:36 +0000213int crypto_sha3_final(struct shash_desc *desc, u8 *out)
Jeff Garzik53964b92016-06-17 10:30:35 +0530214{
215 struct sha3_state *sctx = shash_desc_ctx(desc);
216 unsigned int i, inlen = sctx->partial;
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000217 unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
218 __le64 *digest = (__le64 *)out;
Jeff Garzik53964b92016-06-17 10:30:35 +0530219
220 sctx->buf[inlen++] = 0x06;
221 memset(sctx->buf + inlen, 0, sctx->rsiz - inlen);
222 sctx->buf[sctx->rsiz - 1] |= 0x80;
223
224 for (i = 0; i < sctx->rsizw; i++)
Ard Biesheuvelc013cee2018-01-19 12:04:33 +0000225 sctx->st[i] ^= get_unaligned_le64(sctx->buf + 8 * i);
Jeff Garzik53964b92016-06-17 10:30:35 +0530226
227 keccakf(sctx->st);
228
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000229 for (i = 0; i < digest_size / 8; i++)
230 put_unaligned_le64(sctx->st[i], digest++);
Jeff Garzik53964b92016-06-17 10:30:35 +0530231
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000232 if (digest_size & 4)
233 put_unaligned_le32(sctx->st[i], (__le32 *)digest);
Jeff Garzik53964b92016-06-17 10:30:35 +0530234
235 memset(sctx, 0, sizeof(*sctx));
236 return 0;
237}
Ard Biesheuvel66576742018-01-19 12:04:36 +0000238EXPORT_SYMBOL(crypto_sha3_final);
Jeff Garzik53964b92016-06-17 10:30:35 +0530239
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000240static struct shash_alg algs[] = { {
241 .digestsize = SHA3_224_DIGEST_SIZE,
Ard Biesheuvel66576742018-01-19 12:04:36 +0000242 .init = crypto_sha3_init,
243 .update = crypto_sha3_update,
244 .final = crypto_sha3_final,
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000245 .descsize = sizeof(struct sha3_state),
246 .base.cra_name = "sha3-224",
247 .base.cra_driver_name = "sha3-224-generic",
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000248 .base.cra_blocksize = SHA3_224_BLOCK_SIZE,
249 .base.cra_module = THIS_MODULE,
250}, {
251 .digestsize = SHA3_256_DIGEST_SIZE,
Ard Biesheuvel66576742018-01-19 12:04:36 +0000252 .init = crypto_sha3_init,
253 .update = crypto_sha3_update,
254 .final = crypto_sha3_final,
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000255 .descsize = sizeof(struct sha3_state),
256 .base.cra_name = "sha3-256",
257 .base.cra_driver_name = "sha3-256-generic",
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000258 .base.cra_blocksize = SHA3_256_BLOCK_SIZE,
259 .base.cra_module = THIS_MODULE,
260}, {
261 .digestsize = SHA3_384_DIGEST_SIZE,
Ard Biesheuvel66576742018-01-19 12:04:36 +0000262 .init = crypto_sha3_init,
263 .update = crypto_sha3_update,
264 .final = crypto_sha3_final,
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000265 .descsize = sizeof(struct sha3_state),
266 .base.cra_name = "sha3-384",
267 .base.cra_driver_name = "sha3-384-generic",
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000268 .base.cra_blocksize = SHA3_384_BLOCK_SIZE,
269 .base.cra_module = THIS_MODULE,
270}, {
271 .digestsize = SHA3_512_DIGEST_SIZE,
Ard Biesheuvel66576742018-01-19 12:04:36 +0000272 .init = crypto_sha3_init,
273 .update = crypto_sha3_update,
274 .final = crypto_sha3_final,
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000275 .descsize = sizeof(struct sha3_state),
276 .base.cra_name = "sha3-512",
277 .base.cra_driver_name = "sha3-512-generic",
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000278 .base.cra_blocksize = SHA3_512_BLOCK_SIZE,
279 .base.cra_module = THIS_MODULE,
280} };
Jeff Garzik53964b92016-06-17 10:30:35 +0530281
282static int __init sha3_generic_mod_init(void)
283{
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000284 return crypto_register_shashes(algs, ARRAY_SIZE(algs));
Jeff Garzik53964b92016-06-17 10:30:35 +0530285}
286
287static void __exit sha3_generic_mod_fini(void)
288{
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000289 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
Jeff Garzik53964b92016-06-17 10:30:35 +0530290}
291
Eric Biggersc4741b22019-04-11 21:57:42 -0700292subsys_initcall(sha3_generic_mod_init);
Jeff Garzik53964b92016-06-17 10:30:35 +0530293module_exit(sha3_generic_mod_fini);
294
295MODULE_LICENSE("GPL");
296MODULE_DESCRIPTION("SHA-3 Secure Hash Algorithm");
297
298MODULE_ALIAS_CRYPTO("sha3-224");
299MODULE_ALIAS_CRYPTO("sha3-224-generic");
300MODULE_ALIAS_CRYPTO("sha3-256");
301MODULE_ALIAS_CRYPTO("sha3-256-generic");
302MODULE_ALIAS_CRYPTO("sha3-384");
303MODULE_ALIAS_CRYPTO("sha3-384-generic");
304MODULE_ALIAS_CRYPTO("sha3-512");
305MODULE_ALIAS_CRYPTO("sha3-512-generic");