blob: 4a37ba7cdbe5216cd68e0e346e882092f9b4e7c4 [file] [log] [blame]
Jason A. Donenfelded0356e2019-11-08 13:22:31 +01001// SPDX-License-Identifier: GPL-2.0 OR MIT
2/*
3 * Copyright (C) 2015-2019 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
4 */
5
6#include <crypto/internal/blake2s.h>
7#include <crypto/internal/simd.h>
8#include <crypto/internal/hash.h>
9
10#include <linux/types.h>
11#include <linux/jump_label.h>
12#include <linux/kernel.h>
13#include <linux/module.h>
14
15#include <asm/cpufeature.h>
16#include <asm/fpu/api.h>
17#include <asm/processor.h>
18#include <asm/simd.h>
19
20asmlinkage void blake2s_compress_ssse3(struct blake2s_state *state,
21 const u8 *block, const size_t nblocks,
22 const u32 inc);
23asmlinkage void blake2s_compress_avx512(struct blake2s_state *state,
24 const u8 *block, const size_t nblocks,
25 const u32 inc);
26
27static __ro_after_init DEFINE_STATIC_KEY_FALSE(blake2s_use_ssse3);
28static __ro_after_init DEFINE_STATIC_KEY_FALSE(blake2s_use_avx512);
29
30void blake2s_compress_arch(struct blake2s_state *state,
31 const u8 *block, size_t nblocks,
32 const u32 inc)
33{
34 /* SIMD disables preemption, so relax after processing each page. */
35 BUILD_BUG_ON(PAGE_SIZE / BLAKE2S_BLOCK_SIZE < 8);
36
37 if (!static_branch_likely(&blake2s_use_ssse3) || !crypto_simd_usable()) {
38 blake2s_compress_generic(state, block, nblocks, inc);
39 return;
40 }
41
42 for (;;) {
43 const size_t blocks = min_t(size_t, nblocks,
44 PAGE_SIZE / BLAKE2S_BLOCK_SIZE);
45
46 kernel_fpu_begin();
47 if (IS_ENABLED(CONFIG_AS_AVX512) &&
48 static_branch_likely(&blake2s_use_avx512))
49 blake2s_compress_avx512(state, block, blocks, inc);
50 else
51 blake2s_compress_ssse3(state, block, blocks, inc);
52 kernel_fpu_end();
53
54 nblocks -= blocks;
55 if (!nblocks)
56 break;
57 block += blocks * BLAKE2S_BLOCK_SIZE;
58 }
59}
60EXPORT_SYMBOL(blake2s_compress_arch);
61
62static int crypto_blake2s_setkey(struct crypto_shash *tfm, const u8 *key,
63 unsigned int keylen)
64{
65 struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(tfm);
66
67 if (keylen == 0 || keylen > BLAKE2S_KEY_SIZE) {
68 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
69 return -EINVAL;
70 }
71
72 memcpy(tctx->key, key, keylen);
73 tctx->keylen = keylen;
74
75 return 0;
76}
77
78static int crypto_blake2s_init(struct shash_desc *desc)
79{
80 struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
81 struct blake2s_state *state = shash_desc_ctx(desc);
82 const int outlen = crypto_shash_digestsize(desc->tfm);
83
84 if (tctx->keylen)
85 blake2s_init_key(state, outlen, tctx->key, tctx->keylen);
86 else
87 blake2s_init(state, outlen);
88
89 return 0;
90}
91
92static int crypto_blake2s_update(struct shash_desc *desc, const u8 *in,
93 unsigned int inlen)
94{
95 struct blake2s_state *state = shash_desc_ctx(desc);
96 const size_t fill = BLAKE2S_BLOCK_SIZE - state->buflen;
97
98 if (unlikely(!inlen))
99 return 0;
100 if (inlen > fill) {
101 memcpy(state->buf + state->buflen, in, fill);
102 blake2s_compress_arch(state, state->buf, 1, BLAKE2S_BLOCK_SIZE);
103 state->buflen = 0;
104 in += fill;
105 inlen -= fill;
106 }
107 if (inlen > BLAKE2S_BLOCK_SIZE) {
108 const size_t nblocks = DIV_ROUND_UP(inlen, BLAKE2S_BLOCK_SIZE);
109 /* Hash one less (full) block than strictly possible */
110 blake2s_compress_arch(state, in, nblocks - 1, BLAKE2S_BLOCK_SIZE);
111 in += BLAKE2S_BLOCK_SIZE * (nblocks - 1);
112 inlen -= BLAKE2S_BLOCK_SIZE * (nblocks - 1);
113 }
114 memcpy(state->buf + state->buflen, in, inlen);
115 state->buflen += inlen;
116
117 return 0;
118}
119
120static int crypto_blake2s_final(struct shash_desc *desc, u8 *out)
121{
122 struct blake2s_state *state = shash_desc_ctx(desc);
123
124 blake2s_set_lastblock(state);
125 memset(state->buf + state->buflen, 0,
126 BLAKE2S_BLOCK_SIZE - state->buflen); /* Padding */
127 blake2s_compress_arch(state, state->buf, 1, state->buflen);
128 cpu_to_le32_array(state->h, ARRAY_SIZE(state->h));
129 memcpy(out, state->h, state->outlen);
130 memzero_explicit(state, sizeof(*state));
131
132 return 0;
133}
134
135static struct shash_alg blake2s_algs[] = {{
136 .base.cra_name = "blake2s-128",
137 .base.cra_driver_name = "blake2s-128-x86",
138 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
139 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
140 .base.cra_priority = 200,
141 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
142 .base.cra_module = THIS_MODULE,
143
144 .digestsize = BLAKE2S_128_HASH_SIZE,
145 .setkey = crypto_blake2s_setkey,
146 .init = crypto_blake2s_init,
147 .update = crypto_blake2s_update,
148 .final = crypto_blake2s_final,
149 .descsize = sizeof(struct blake2s_state),
150}, {
151 .base.cra_name = "blake2s-160",
152 .base.cra_driver_name = "blake2s-160-x86",
153 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
154 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
155 .base.cra_priority = 200,
156 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
157 .base.cra_module = THIS_MODULE,
158
159 .digestsize = BLAKE2S_160_HASH_SIZE,
160 .setkey = crypto_blake2s_setkey,
161 .init = crypto_blake2s_init,
162 .update = crypto_blake2s_update,
163 .final = crypto_blake2s_final,
164 .descsize = sizeof(struct blake2s_state),
165}, {
166 .base.cra_name = "blake2s-224",
167 .base.cra_driver_name = "blake2s-224-x86",
168 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
169 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
170 .base.cra_priority = 200,
171 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
172 .base.cra_module = THIS_MODULE,
173
174 .digestsize = BLAKE2S_224_HASH_SIZE,
175 .setkey = crypto_blake2s_setkey,
176 .init = crypto_blake2s_init,
177 .update = crypto_blake2s_update,
178 .final = crypto_blake2s_final,
179 .descsize = sizeof(struct blake2s_state),
180}, {
181 .base.cra_name = "blake2s-256",
182 .base.cra_driver_name = "blake2s-256-x86",
183 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
184 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx),
185 .base.cra_priority = 200,
186 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE,
187 .base.cra_module = THIS_MODULE,
188
189 .digestsize = BLAKE2S_256_HASH_SIZE,
190 .setkey = crypto_blake2s_setkey,
191 .init = crypto_blake2s_init,
192 .update = crypto_blake2s_update,
193 .final = crypto_blake2s_final,
194 .descsize = sizeof(struct blake2s_state),
195}};
196
197static int __init blake2s_mod_init(void)
198{
199 if (!boot_cpu_has(X86_FEATURE_SSSE3))
200 return 0;
201
202 static_branch_enable(&blake2s_use_ssse3);
203
204 if (IS_ENABLED(CONFIG_AS_AVX512) &&
205 boot_cpu_has(X86_FEATURE_AVX) &&
206 boot_cpu_has(X86_FEATURE_AVX2) &&
207 boot_cpu_has(X86_FEATURE_AVX512F) &&
208 boot_cpu_has(X86_FEATURE_AVX512VL) &&
209 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM |
210 XFEATURE_MASK_AVX512, NULL))
211 static_branch_enable(&blake2s_use_avx512);
212
213 return crypto_register_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
214}
215
216static void __exit blake2s_mod_exit(void)
217{
218 if (boot_cpu_has(X86_FEATURE_SSSE3))
219 crypto_unregister_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
220}
221
222module_init(blake2s_mod_init);
223module_exit(blake2s_mod_exit);
224
225MODULE_ALIAS_CRYPTO("blake2s-128");
226MODULE_ALIAS_CRYPTO("blake2s-128-x86");
227MODULE_ALIAS_CRYPTO("blake2s-160");
228MODULE_ALIAS_CRYPTO("blake2s-160-x86");
229MODULE_ALIAS_CRYPTO("blake2s-224");
230MODULE_ALIAS_CRYPTO("blake2s-224-x86");
231MODULE_ALIAS_CRYPTO("blake2s-256");
232MODULE_ALIAS_CRYPTO("blake2s-256-x86");
233MODULE_LICENSE("GPL v2");