blob: 193c4584bd00462da87fe1eb0bf1e0aad3eba467 [file] [log] [blame]
Thomas Gleixnercaab2772019-06-03 07:44:50 +02001// SPDX-License-Identifier: GPL-2.0-only
Gilad Ben-Yossef4f0fc162017-08-21 13:51:28 +03002/*
3 * SM3 secure hash, as specified by OSCCA GM/T 0004-2012 SM3 and
4 * described at https://tools.ietf.org/html/draft-shen-sm3-hash-01
5 *
6 * Copyright (C) 2017 ARM Limited or its affiliates.
7 * Written by Gilad Ben-Yossef <gilad@benyossef.com>
Gilad Ben-Yossef4f0fc162017-08-21 13:51:28 +03008 */
9
10#include <crypto/internal/hash.h>
11#include <linux/init.h>
12#include <linux/module.h>
13#include <linux/mm.h>
14#include <linux/types.h>
15#include <crypto/sm3.h>
16#include <crypto/sm3_base.h>
17#include <linux/bitops.h>
18#include <asm/byteorder.h>
19#include <asm/unaligned.h>
20
21const u8 sm3_zero_message_hash[SM3_DIGEST_SIZE] = {
22 0x1A, 0xB2, 0x1D, 0x83, 0x55, 0xCF, 0xA1, 0x7F,
23 0x8e, 0x61, 0x19, 0x48, 0x31, 0xE8, 0x1A, 0x8F,
24 0x22, 0xBE, 0xC8, 0xC7, 0x28, 0xFE, 0xFB, 0x74,
25 0x7E, 0xD0, 0x35, 0xEB, 0x50, 0x82, 0xAA, 0x2B
26};
27EXPORT_SYMBOL_GPL(sm3_zero_message_hash);
28
29static inline u32 p0(u32 x)
30{
31 return x ^ rol32(x, 9) ^ rol32(x, 17);
32}
33
34static inline u32 p1(u32 x)
35{
36 return x ^ rol32(x, 15) ^ rol32(x, 23);
37}
38
39static inline u32 ff(unsigned int n, u32 a, u32 b, u32 c)
40{
41 return (n < 16) ? (a ^ b ^ c) : ((a & b) | (a & c) | (b & c));
42}
43
44static inline u32 gg(unsigned int n, u32 e, u32 f, u32 g)
45{
46 return (n < 16) ? (e ^ f ^ g) : ((e & f) | ((~e) & g));
47}
48
49static inline u32 t(unsigned int n)
50{
51 return (n < 16) ? SM3_T1 : SM3_T2;
52}
53
54static void sm3_expand(u32 *t, u32 *w, u32 *wt)
55{
56 int i;
57 unsigned int tmp;
58
59 /* load the input */
60 for (i = 0; i <= 15; i++)
61 w[i] = get_unaligned_be32((__u32 *)t + i);
62
63 for (i = 16; i <= 67; i++) {
64 tmp = w[i - 16] ^ w[i - 9] ^ rol32(w[i - 3], 15);
65 w[i] = p1(tmp) ^ (rol32(w[i - 13], 7)) ^ w[i - 6];
66 }
67
68 for (i = 0; i <= 63; i++)
69 wt[i] = w[i] ^ w[i + 4];
70}
71
72static void sm3_compress(u32 *w, u32 *wt, u32 *m)
73{
74 u32 ss1;
75 u32 ss2;
76 u32 tt1;
77 u32 tt2;
78 u32 a, b, c, d, e, f, g, h;
79 int i;
80
81 a = m[0];
82 b = m[1];
83 c = m[2];
84 d = m[3];
85 e = m[4];
86 f = m[5];
87 g = m[6];
88 h = m[7];
89
90 for (i = 0; i <= 63; i++) {
91
Eric Biggersd45a90c2019-01-08 22:12:41 -080092 ss1 = rol32((rol32(a, 12) + e + rol32(t(i), i & 31)), 7);
Gilad Ben-Yossef4f0fc162017-08-21 13:51:28 +030093
94 ss2 = ss1 ^ rol32(a, 12);
95
96 tt1 = ff(i, a, b, c) + d + ss2 + *wt;
97 wt++;
98
99 tt2 = gg(i, e, f, g) + h + ss1 + *w;
100 w++;
101
102 d = c;
103 c = rol32(b, 9);
104 b = a;
105 a = tt1;
106 h = g;
107 g = rol32(f, 19);
108 f = e;
109 e = p0(tt2);
110 }
111
112 m[0] = a ^ m[0];
113 m[1] = b ^ m[1];
114 m[2] = c ^ m[2];
115 m[3] = d ^ m[3];
116 m[4] = e ^ m[4];
117 m[5] = f ^ m[5];
118 m[6] = g ^ m[6];
119 m[7] = h ^ m[7];
120
121 a = b = c = d = e = f = g = h = ss1 = ss2 = tt1 = tt2 = 0;
122}
123
124static void sm3_transform(struct sm3_state *sst, u8 const *src)
125{
126 unsigned int w[68];
127 unsigned int wt[64];
128
129 sm3_expand((u32 *)src, w, wt);
130 sm3_compress(w, wt, sst->state);
131
132 memzero_explicit(w, sizeof(w));
133 memzero_explicit(wt, sizeof(wt));
134}
135
136static void sm3_generic_block_fn(struct sm3_state *sst, u8 const *src,
137 int blocks)
138{
139 while (blocks--) {
140 sm3_transform(sst, src);
141 src += SM3_BLOCK_SIZE;
142 }
143}
144
145int crypto_sm3_update(struct shash_desc *desc, const u8 *data,
146 unsigned int len)
147{
148 return sm3_base_do_update(desc, data, len, sm3_generic_block_fn);
149}
150EXPORT_SYMBOL(crypto_sm3_update);
151
Tianjia Zhangf4928282020-09-21 00:20:54 +0800152int crypto_sm3_final(struct shash_desc *desc, u8 *out)
Gilad Ben-Yossef4f0fc162017-08-21 13:51:28 +0300153{
154 sm3_base_do_finalize(desc, sm3_generic_block_fn);
155 return sm3_base_finish(desc, out);
156}
Tianjia Zhangf4928282020-09-21 00:20:54 +0800157EXPORT_SYMBOL(crypto_sm3_final);
Gilad Ben-Yossef4f0fc162017-08-21 13:51:28 +0300158
159int crypto_sm3_finup(struct shash_desc *desc, const u8 *data,
160 unsigned int len, u8 *hash)
161{
162 sm3_base_do_update(desc, data, len, sm3_generic_block_fn);
Tianjia Zhangf4928282020-09-21 00:20:54 +0800163 return crypto_sm3_final(desc, hash);
Gilad Ben-Yossef4f0fc162017-08-21 13:51:28 +0300164}
165EXPORT_SYMBOL(crypto_sm3_finup);
166
167static struct shash_alg sm3_alg = {
168 .digestsize = SM3_DIGEST_SIZE,
169 .init = sm3_base_init,
170 .update = crypto_sm3_update,
Tianjia Zhangf4928282020-09-21 00:20:54 +0800171 .final = crypto_sm3_final,
Gilad Ben-Yossef4f0fc162017-08-21 13:51:28 +0300172 .finup = crypto_sm3_finup,
173 .descsize = sizeof(struct sm3_state),
174 .base = {
175 .cra_name = "sm3",
176 .cra_driver_name = "sm3-generic",
Gilad Ben-Yossef4f0fc162017-08-21 13:51:28 +0300177 .cra_blocksize = SM3_BLOCK_SIZE,
178 .cra_module = THIS_MODULE,
179 }
180};
181
182static int __init sm3_generic_mod_init(void)
183{
184 return crypto_register_shash(&sm3_alg);
185}
186
187static void __exit sm3_generic_mod_fini(void)
188{
189 crypto_unregister_shash(&sm3_alg);
190}
191
Eric Biggersc4741b22019-04-11 21:57:42 -0700192subsys_initcall(sm3_generic_mod_init);
Gilad Ben-Yossef4f0fc162017-08-21 13:51:28 +0300193module_exit(sm3_generic_mod_fini);
194
195MODULE_LICENSE("GPL v2");
196MODULE_DESCRIPTION("SM3 Secure Hash Algorithm");
197
198MODULE_ALIAS_CRYPTO("sm3");
199MODULE_ALIAS_CRYPTO("sm3-generic");