Thomas Gleixner | 97fb5e8 | 2019-05-29 07:17:58 -0700 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-only |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (c) 2012-2014, The Linux Foundation. All rights reserved. |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 4 | */ |
| 5 | |
| 6 | #include <linux/err.h> |
| 7 | #include <linux/interrupt.h> |
| 8 | #include <linux/types.h> |
| 9 | #include <crypto/scatterwalk.h> |
Eric Biggers | a24d22b | 2020-11-12 21:20:21 -0800 | [diff] [blame] | 10 | #include <crypto/sha1.h> |
| 11 | #include <crypto/sha2.h> |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 12 | |
| 13 | #include "cipher.h" |
| 14 | #include "common.h" |
| 15 | #include "core.h" |
| 16 | #include "regs-v5.h" |
| 17 | #include "sha.h" |
Thara Gopinath | db0018a | 2021-04-29 11:07:06 -0400 | [diff] [blame] | 18 | #include "aead.h" |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 19 | |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 20 | static inline u32 qce_read(struct qce_device *qce, u32 offset) |
| 21 | { |
| 22 | return readl(qce->base + offset); |
| 23 | } |
| 24 | |
| 25 | static inline void qce_write(struct qce_device *qce, u32 offset, u32 val) |
| 26 | { |
| 27 | writel(val, qce->base + offset); |
| 28 | } |
| 29 | |
| 30 | static inline void qce_write_array(struct qce_device *qce, u32 offset, |
| 31 | const u32 *val, unsigned int len) |
| 32 | { |
| 33 | int i; |
| 34 | |
| 35 | for (i = 0; i < len; i++) |
| 36 | qce_write(qce, offset + i * sizeof(u32), val[i]); |
| 37 | } |
| 38 | |
| 39 | static inline void |
| 40 | qce_clear_array(struct qce_device *qce, u32 offset, unsigned int len) |
| 41 | { |
| 42 | int i; |
| 43 | |
| 44 | for (i = 0; i < len; i++) |
| 45 | qce_write(qce, offset + i * sizeof(u32), 0); |
| 46 | } |
| 47 | |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 48 | static u32 qce_config_reg(struct qce_device *qce, int little) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 49 | { |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 50 | u32 beats = (qce->burst_size >> 3) - 1; |
| 51 | u32 pipe_pair = qce->pipe_pair_id; |
| 52 | u32 config; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 53 | |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 54 | config = (beats << REQ_SIZE_SHIFT) & REQ_SIZE_MASK; |
| 55 | config |= BIT(MASK_DOUT_INTR_SHIFT) | BIT(MASK_DIN_INTR_SHIFT) | |
| 56 | BIT(MASK_OP_DONE_INTR_SHIFT) | BIT(MASK_ERR_INTR_SHIFT); |
| 57 | config |= (pipe_pair << PIPE_SET_SELECT_SHIFT) & PIPE_SET_SELECT_MASK; |
| 58 | config &= ~HIGH_SPD_EN_N_SHIFT; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 59 | |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 60 | if (little) |
| 61 | config |= BIT(LITTLE_ENDIAN_MODE_SHIFT); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 62 | |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 63 | return config; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 64 | } |
| 65 | |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 66 | void qce_cpu_to_be32p_array(__be32 *dst, const u8 *src, unsigned int len) |
| 67 | { |
| 68 | __be32 *d = dst; |
| 69 | const u8 *s = src; |
| 70 | unsigned int n; |
| 71 | |
| 72 | n = len / sizeof(u32); |
| 73 | for (; n > 0; n--) { |
| 74 | *d = cpu_to_be32p((const __u32 *) s); |
| 75 | s += sizeof(__u32); |
| 76 | d++; |
| 77 | } |
| 78 | } |
| 79 | |
| 80 | static void qce_setup_config(struct qce_device *qce) |
| 81 | { |
| 82 | u32 config; |
| 83 | |
| 84 | /* get big endianness */ |
| 85 | config = qce_config_reg(qce, 0); |
| 86 | |
| 87 | /* clear status */ |
| 88 | qce_write(qce, REG_STATUS, 0); |
| 89 | qce_write(qce, REG_CONFIG, config); |
| 90 | } |
| 91 | |
Thara Gopinath | 6c34e44 | 2021-04-29 11:07:02 -0400 | [diff] [blame] | 92 | static inline void qce_crypto_go(struct qce_device *qce, bool result_dump) |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 93 | { |
Thara Gopinath | 6c34e44 | 2021-04-29 11:07:02 -0400 | [diff] [blame] | 94 | if (result_dump) |
| 95 | qce_write(qce, REG_GOPROC, BIT(GO_SHIFT) | BIT(RESULTS_DUMP_SHIFT)); |
| 96 | else |
| 97 | qce_write(qce, REG_GOPROC, BIT(GO_SHIFT)); |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 98 | } |
| 99 | |
Thara Gopinath | db0018a | 2021-04-29 11:07:06 -0400 | [diff] [blame] | 100 | #if defined(CONFIG_CRYPTO_DEV_QCE_SHA) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD) |
Thara Gopinath | e5d6181 | 2021-04-29 11:07:05 -0400 | [diff] [blame] | 101 | static u32 qce_auth_cfg(unsigned long flags, u32 key_size, u32 auth_size) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 102 | { |
| 103 | u32 cfg = 0; |
| 104 | |
Thara Gopinath | e5d6181 | 2021-04-29 11:07:05 -0400 | [diff] [blame] | 105 | if (IS_CCM(flags) || IS_CMAC(flags)) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 106 | cfg |= AUTH_ALG_AES << AUTH_ALG_SHIFT; |
| 107 | else |
| 108 | cfg |= AUTH_ALG_SHA << AUTH_ALG_SHIFT; |
| 109 | |
| 110 | if (IS_CCM(flags) || IS_CMAC(flags)) { |
| 111 | if (key_size == AES_KEYSIZE_128) |
| 112 | cfg |= AUTH_KEY_SZ_AES128 << AUTH_KEY_SIZE_SHIFT; |
| 113 | else if (key_size == AES_KEYSIZE_256) |
| 114 | cfg |= AUTH_KEY_SZ_AES256 << AUTH_KEY_SIZE_SHIFT; |
| 115 | } |
| 116 | |
| 117 | if (IS_SHA1(flags) || IS_SHA1_HMAC(flags)) |
| 118 | cfg |= AUTH_SIZE_SHA1 << AUTH_SIZE_SHIFT; |
| 119 | else if (IS_SHA256(flags) || IS_SHA256_HMAC(flags)) |
| 120 | cfg |= AUTH_SIZE_SHA256 << AUTH_SIZE_SHIFT; |
| 121 | else if (IS_CMAC(flags)) |
| 122 | cfg |= AUTH_SIZE_ENUM_16_BYTES << AUTH_SIZE_SHIFT; |
Thara Gopinath | e5d6181 | 2021-04-29 11:07:05 -0400 | [diff] [blame] | 123 | else if (IS_CCM(flags)) |
| 124 | cfg |= (auth_size - 1) << AUTH_SIZE_SHIFT; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 125 | |
| 126 | if (IS_SHA1(flags) || IS_SHA256(flags)) |
| 127 | cfg |= AUTH_MODE_HASH << AUTH_MODE_SHIFT; |
Thara Gopinath | e5d6181 | 2021-04-29 11:07:05 -0400 | [diff] [blame] | 128 | else if (IS_SHA1_HMAC(flags) || IS_SHA256_HMAC(flags)) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 129 | cfg |= AUTH_MODE_HMAC << AUTH_MODE_SHIFT; |
Thara Gopinath | e5d6181 | 2021-04-29 11:07:05 -0400 | [diff] [blame] | 130 | else if (IS_CCM(flags)) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 131 | cfg |= AUTH_MODE_CCM << AUTH_MODE_SHIFT; |
Thara Gopinath | e5d6181 | 2021-04-29 11:07:05 -0400 | [diff] [blame] | 132 | else if (IS_CMAC(flags)) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 133 | cfg |= AUTH_MODE_CMAC << AUTH_MODE_SHIFT; |
| 134 | |
| 135 | if (IS_SHA(flags) || IS_SHA_HMAC(flags)) |
| 136 | cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT; |
| 137 | |
| 138 | if (IS_CCM(flags)) |
| 139 | cfg |= QCE_MAX_NONCE_WORDS << AUTH_NONCE_NUM_WORDS_SHIFT; |
| 140 | |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 141 | return cfg; |
| 142 | } |
Thara Gopinath | db0018a | 2021-04-29 11:07:06 -0400 | [diff] [blame] | 143 | #endif |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 144 | |
Thara Gopinath | db0018a | 2021-04-29 11:07:06 -0400 | [diff] [blame] | 145 | #ifdef CONFIG_CRYPTO_DEV_QCE_SHA |
Thara Gopinath | 4139fd5 | 2021-02-11 15:01:28 -0500 | [diff] [blame] | 146 | static int qce_setup_regs_ahash(struct crypto_async_request *async_req) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 147 | { |
| 148 | struct ahash_request *req = ahash_request_cast(async_req); |
| 149 | struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm); |
| 150 | struct qce_sha_reqctx *rctx = ahash_request_ctx(req); |
| 151 | struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); |
| 152 | struct qce_device *qce = tmpl->qce; |
| 153 | unsigned int digestsize = crypto_ahash_digestsize(ahash); |
| 154 | unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm); |
| 155 | __be32 auth[SHA256_DIGEST_SIZE / sizeof(__be32)] = {0}; |
| 156 | __be32 mackey[QCE_SHA_HMAC_KEY_SIZE / sizeof(__be32)] = {0}; |
| 157 | u32 auth_cfg = 0, config; |
| 158 | unsigned int iv_words; |
| 159 | |
| 160 | /* if not the last, the size has to be on the block boundary */ |
| 161 | if (!rctx->last_blk && req->nbytes % blocksize) |
| 162 | return -EINVAL; |
| 163 | |
| 164 | qce_setup_config(qce); |
| 165 | |
| 166 | if (IS_CMAC(rctx->flags)) { |
| 167 | qce_write(qce, REG_AUTH_SEG_CFG, 0); |
| 168 | qce_write(qce, REG_ENCR_SEG_CFG, 0); |
| 169 | qce_write(qce, REG_ENCR_SEG_SIZE, 0); |
| 170 | qce_clear_array(qce, REG_AUTH_IV0, 16); |
| 171 | qce_clear_array(qce, REG_AUTH_KEY0, 16); |
| 172 | qce_clear_array(qce, REG_AUTH_BYTECNT0, 4); |
| 173 | |
Thara Gopinath | e5d6181 | 2021-04-29 11:07:05 -0400 | [diff] [blame] | 174 | auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen, digestsize); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 175 | } |
| 176 | |
| 177 | if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { |
| 178 | u32 authkey_words = rctx->authklen / sizeof(u32); |
| 179 | |
| 180 | qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); |
Stanimir Varbanov | 58a6535 | 2014-07-04 17:03:29 +0300 | [diff] [blame] | 181 | qce_write_array(qce, REG_AUTH_KEY0, (u32 *)mackey, |
| 182 | authkey_words); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 183 | } |
| 184 | |
| 185 | if (IS_CMAC(rctx->flags)) |
| 186 | goto go_proc; |
| 187 | |
| 188 | if (rctx->first_blk) |
| 189 | memcpy(auth, rctx->digest, digestsize); |
| 190 | else |
| 191 | qce_cpu_to_be32p_array(auth, rctx->digest, digestsize); |
| 192 | |
| 193 | iv_words = (IS_SHA1(rctx->flags) || IS_SHA1_HMAC(rctx->flags)) ? 5 : 8; |
Stanimir Varbanov | 58a6535 | 2014-07-04 17:03:29 +0300 | [diff] [blame] | 194 | qce_write_array(qce, REG_AUTH_IV0, (u32 *)auth, iv_words); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 195 | |
| 196 | if (rctx->first_blk) |
| 197 | qce_clear_array(qce, REG_AUTH_BYTECNT0, 4); |
| 198 | else |
Stanimir Varbanov | 58a6535 | 2014-07-04 17:03:29 +0300 | [diff] [blame] | 199 | qce_write_array(qce, REG_AUTH_BYTECNT0, |
| 200 | (u32 *)rctx->byte_count, 2); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 201 | |
Thara Gopinath | e5d6181 | 2021-04-29 11:07:05 -0400 | [diff] [blame] | 202 | auth_cfg = qce_auth_cfg(rctx->flags, 0, digestsize); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 203 | |
| 204 | if (rctx->last_blk) |
| 205 | auth_cfg |= BIT(AUTH_LAST_SHIFT); |
| 206 | else |
| 207 | auth_cfg &= ~BIT(AUTH_LAST_SHIFT); |
| 208 | |
| 209 | if (rctx->first_blk) |
| 210 | auth_cfg |= BIT(AUTH_FIRST_SHIFT); |
| 211 | else |
| 212 | auth_cfg &= ~BIT(AUTH_FIRST_SHIFT); |
| 213 | |
| 214 | go_proc: |
| 215 | qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg); |
| 216 | qce_write(qce, REG_AUTH_SEG_SIZE, req->nbytes); |
| 217 | qce_write(qce, REG_AUTH_SEG_START, 0); |
| 218 | qce_write(qce, REG_ENCR_SEG_CFG, 0); |
| 219 | qce_write(qce, REG_SEG_SIZE, req->nbytes); |
| 220 | |
| 221 | /* get little endianness */ |
| 222 | config = qce_config_reg(qce, 1); |
| 223 | qce_write(qce, REG_CONFIG, config); |
| 224 | |
Thara Gopinath | 6c34e44 | 2021-04-29 11:07:02 -0400 | [diff] [blame] | 225 | qce_crypto_go(qce, true); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 226 | |
| 227 | return 0; |
| 228 | } |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 229 | #endif |
| 230 | |
Thara Gopinath | db0018a | 2021-04-29 11:07:06 -0400 | [diff] [blame] | 231 | #if defined(CONFIG_CRYPTO_DEV_QCE_SKCIPHER) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD) |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 232 | static u32 qce_encr_cfg(unsigned long flags, u32 aes_key_size) |
| 233 | { |
| 234 | u32 cfg = 0; |
| 235 | |
| 236 | if (IS_AES(flags)) { |
| 237 | if (aes_key_size == AES_KEYSIZE_128) |
| 238 | cfg |= ENCR_KEY_SZ_AES128 << ENCR_KEY_SZ_SHIFT; |
| 239 | else if (aes_key_size == AES_KEYSIZE_256) |
| 240 | cfg |= ENCR_KEY_SZ_AES256 << ENCR_KEY_SZ_SHIFT; |
| 241 | } |
| 242 | |
| 243 | if (IS_AES(flags)) |
| 244 | cfg |= ENCR_ALG_AES << ENCR_ALG_SHIFT; |
| 245 | else if (IS_DES(flags) || IS_3DES(flags)) |
| 246 | cfg |= ENCR_ALG_DES << ENCR_ALG_SHIFT; |
| 247 | |
| 248 | if (IS_DES(flags)) |
| 249 | cfg |= ENCR_KEY_SZ_DES << ENCR_KEY_SZ_SHIFT; |
| 250 | |
| 251 | if (IS_3DES(flags)) |
| 252 | cfg |= ENCR_KEY_SZ_3DES << ENCR_KEY_SZ_SHIFT; |
| 253 | |
| 254 | switch (flags & QCE_MODE_MASK) { |
| 255 | case QCE_MODE_ECB: |
| 256 | cfg |= ENCR_MODE_ECB << ENCR_MODE_SHIFT; |
| 257 | break; |
| 258 | case QCE_MODE_CBC: |
| 259 | cfg |= ENCR_MODE_CBC << ENCR_MODE_SHIFT; |
| 260 | break; |
| 261 | case QCE_MODE_CTR: |
| 262 | cfg |= ENCR_MODE_CTR << ENCR_MODE_SHIFT; |
| 263 | break; |
| 264 | case QCE_MODE_XTS: |
| 265 | cfg |= ENCR_MODE_XTS << ENCR_MODE_SHIFT; |
| 266 | break; |
| 267 | case QCE_MODE_CCM: |
| 268 | cfg |= ENCR_MODE_CCM << ENCR_MODE_SHIFT; |
| 269 | cfg |= LAST_CCM_XFR << LAST_CCM_SHIFT; |
| 270 | break; |
| 271 | default: |
| 272 | return ~0; |
| 273 | } |
| 274 | |
| 275 | return cfg; |
| 276 | } |
Thara Gopinath | db0018a | 2021-04-29 11:07:06 -0400 | [diff] [blame] | 277 | #endif |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 278 | |
Thara Gopinath | db0018a | 2021-04-29 11:07:06 -0400 | [diff] [blame] | 279 | #ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 280 | static void qce_xts_swapiv(__be32 *dst, const u8 *src, unsigned int ivsize) |
| 281 | { |
| 282 | u8 swap[QCE_AES_IV_LENGTH]; |
| 283 | u32 i, j; |
| 284 | |
| 285 | if (ivsize > QCE_AES_IV_LENGTH) |
| 286 | return; |
| 287 | |
| 288 | memset(swap, 0, QCE_AES_IV_LENGTH); |
| 289 | |
| 290 | for (i = (QCE_AES_IV_LENGTH - ivsize), j = ivsize - 1; |
| 291 | i < QCE_AES_IV_LENGTH; i++, j--) |
| 292 | swap[i] = src[j]; |
| 293 | |
| 294 | qce_cpu_to_be32p_array(dst, swap, QCE_AES_IV_LENGTH); |
| 295 | } |
| 296 | |
| 297 | static void qce_xtskey(struct qce_device *qce, const u8 *enckey, |
| 298 | unsigned int enckeylen, unsigned int cryptlen) |
| 299 | { |
| 300 | u32 xtskey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(u32)] = {0}; |
| 301 | unsigned int xtsklen = enckeylen / (2 * sizeof(u32)); |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 302 | |
| 303 | qce_cpu_to_be32p_array((__be32 *)xtskey, enckey + enckeylen / 2, |
| 304 | enckeylen / 2); |
| 305 | qce_write_array(qce, REG_ENCR_XTS_KEY0, xtskey, xtsklen); |
| 306 | |
Thara Gopinath | 24cbcc9 | 2021-02-11 15:01:26 -0500 | [diff] [blame] | 307 | /* Set data unit size to cryptlen. Anything else causes |
| 308 | * crypto engine to return back incorrect results. |
| 309 | */ |
| 310 | qce_write(qce, REG_ENCR_XTS_DU_SIZE, cryptlen); |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 311 | } |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 312 | |
Thara Gopinath | 4139fd5 | 2021-02-11 15:01:28 -0500 | [diff] [blame] | 313 | static int qce_setup_regs_skcipher(struct crypto_async_request *async_req) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 314 | { |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 315 | struct skcipher_request *req = skcipher_request_cast(async_req); |
| 316 | struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 317 | struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm); |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 318 | struct qce_alg_template *tmpl = to_cipher_tmpl(crypto_skcipher_reqtfm(req)); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 319 | struct qce_device *qce = tmpl->qce; |
| 320 | __be32 enckey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(__be32)] = {0}; |
| 321 | __be32 enciv[QCE_MAX_IV_SIZE / sizeof(__be32)] = {0}; |
| 322 | unsigned int enckey_words, enciv_words; |
| 323 | unsigned int keylen; |
| 324 | u32 encr_cfg = 0, auth_cfg = 0, config; |
| 325 | unsigned int ivsize = rctx->ivsize; |
| 326 | unsigned long flags = rctx->flags; |
| 327 | |
| 328 | qce_setup_config(qce); |
| 329 | |
| 330 | if (IS_XTS(flags)) |
| 331 | keylen = ctx->enc_keylen / 2; |
| 332 | else |
| 333 | keylen = ctx->enc_keylen; |
| 334 | |
| 335 | qce_cpu_to_be32p_array(enckey, ctx->enc_key, keylen); |
| 336 | enckey_words = keylen / sizeof(u32); |
| 337 | |
| 338 | qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg); |
| 339 | |
| 340 | encr_cfg = qce_encr_cfg(flags, keylen); |
| 341 | |
| 342 | if (IS_DES(flags)) { |
| 343 | enciv_words = 2; |
| 344 | enckey_words = 2; |
| 345 | } else if (IS_3DES(flags)) { |
| 346 | enciv_words = 2; |
| 347 | enckey_words = 6; |
| 348 | } else if (IS_AES(flags)) { |
| 349 | if (IS_XTS(flags)) |
| 350 | qce_xtskey(qce, ctx->enc_key, ctx->enc_keylen, |
| 351 | rctx->cryptlen); |
| 352 | enciv_words = 4; |
| 353 | } else { |
| 354 | return -EINVAL; |
| 355 | } |
| 356 | |
Stanimir Varbanov | 58a6535 | 2014-07-04 17:03:29 +0300 | [diff] [blame] | 357 | qce_write_array(qce, REG_ENCR_KEY0, (u32 *)enckey, enckey_words); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 358 | |
| 359 | if (!IS_ECB(flags)) { |
| 360 | if (IS_XTS(flags)) |
| 361 | qce_xts_swapiv(enciv, rctx->iv, ivsize); |
| 362 | else |
| 363 | qce_cpu_to_be32p_array(enciv, rctx->iv, ivsize); |
| 364 | |
Stanimir Varbanov | 58a6535 | 2014-07-04 17:03:29 +0300 | [diff] [blame] | 365 | qce_write_array(qce, REG_CNTR0_IV0, (u32 *)enciv, enciv_words); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 366 | } |
| 367 | |
| 368 | if (IS_ENCRYPT(flags)) |
| 369 | encr_cfg |= BIT(ENCODE_SHIFT); |
| 370 | |
| 371 | qce_write(qce, REG_ENCR_SEG_CFG, encr_cfg); |
| 372 | qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); |
Thara Gopinath | 4139fd5 | 2021-02-11 15:01:28 -0500 | [diff] [blame] | 373 | qce_write(qce, REG_ENCR_SEG_START, 0); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 374 | |
| 375 | if (IS_CTR(flags)) { |
| 376 | qce_write(qce, REG_CNTR_MASK, ~0); |
| 377 | qce_write(qce, REG_CNTR_MASK0, ~0); |
| 378 | qce_write(qce, REG_CNTR_MASK1, ~0); |
| 379 | qce_write(qce, REG_CNTR_MASK2, ~0); |
| 380 | } |
| 381 | |
Thara Gopinath | 4139fd5 | 2021-02-11 15:01:28 -0500 | [diff] [blame] | 382 | qce_write(qce, REG_SEG_SIZE, rctx->cryptlen); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 383 | |
| 384 | /* get little endianness */ |
| 385 | config = qce_config_reg(qce, 1); |
| 386 | qce_write(qce, REG_CONFIG, config); |
| 387 | |
Thara Gopinath | 6c34e44 | 2021-04-29 11:07:02 -0400 | [diff] [blame] | 388 | qce_crypto_go(qce, true); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 389 | |
| 390 | return 0; |
| 391 | } |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 392 | #endif |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 393 | |
Thara Gopinath | db0018a | 2021-04-29 11:07:06 -0400 | [diff] [blame] | 394 | #ifdef CONFIG_CRYPTO_DEV_QCE_AEAD |
| 395 | static const u32 std_iv_sha1[SHA256_DIGEST_SIZE / sizeof(u32)] = { |
| 396 | SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4, 0, 0, 0 |
| 397 | }; |
| 398 | |
| 399 | static const u32 std_iv_sha256[SHA256_DIGEST_SIZE / sizeof(u32)] = { |
| 400 | SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3, |
| 401 | SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7 |
| 402 | }; |
| 403 | |
| 404 | static unsigned int qce_be32_to_cpu_array(u32 *dst, const u8 *src, unsigned int len) |
| 405 | { |
| 406 | u32 *d = dst; |
| 407 | const u8 *s = src; |
| 408 | unsigned int n; |
| 409 | |
| 410 | n = len / sizeof(u32); |
| 411 | for (; n > 0; n--) { |
| 412 | *d = be32_to_cpup((const __be32 *)s); |
| 413 | s += sizeof(u32); |
| 414 | d++; |
| 415 | } |
| 416 | return DIV_ROUND_UP(len, sizeof(u32)); |
| 417 | } |
| 418 | |
| 419 | static int qce_setup_regs_aead(struct crypto_async_request *async_req) |
| 420 | { |
| 421 | struct aead_request *req = aead_request_cast(async_req); |
| 422 | struct qce_aead_reqctx *rctx = aead_request_ctx(req); |
| 423 | struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm); |
| 424 | struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req)); |
| 425 | struct qce_device *qce = tmpl->qce; |
| 426 | u32 enckey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(u32)] = {0}; |
| 427 | u32 enciv[QCE_MAX_IV_SIZE / sizeof(u32)] = {0}; |
| 428 | u32 authkey[QCE_SHA_HMAC_KEY_SIZE / sizeof(u32)] = {0}; |
| 429 | u32 authiv[SHA256_DIGEST_SIZE / sizeof(u32)] = {0}; |
| 430 | u32 authnonce[QCE_MAX_NONCE / sizeof(u32)] = {0}; |
| 431 | unsigned int enc_keylen = ctx->enc_keylen; |
| 432 | unsigned int auth_keylen = ctx->auth_keylen; |
| 433 | unsigned int enc_ivsize = rctx->ivsize; |
| 434 | unsigned int auth_ivsize = 0; |
| 435 | unsigned int enckey_words, enciv_words; |
| 436 | unsigned int authkey_words, authiv_words, authnonce_words; |
| 437 | unsigned long flags = rctx->flags; |
| 438 | u32 encr_cfg, auth_cfg, config, totallen; |
| 439 | u32 iv_last_word; |
| 440 | |
| 441 | qce_setup_config(qce); |
| 442 | |
| 443 | /* Write encryption key */ |
| 444 | enckey_words = qce_be32_to_cpu_array(enckey, ctx->enc_key, enc_keylen); |
| 445 | qce_write_array(qce, REG_ENCR_KEY0, enckey, enckey_words); |
| 446 | |
| 447 | /* Write encryption iv */ |
| 448 | enciv_words = qce_be32_to_cpu_array(enciv, rctx->iv, enc_ivsize); |
| 449 | qce_write_array(qce, REG_CNTR0_IV0, enciv, enciv_words); |
| 450 | |
| 451 | if (IS_CCM(rctx->flags)) { |
| 452 | iv_last_word = enciv[enciv_words - 1]; |
| 453 | qce_write(qce, REG_CNTR3_IV3, iv_last_word + 1); |
| 454 | qce_write_array(qce, REG_ENCR_CCM_INT_CNTR0, (u32 *)enciv, enciv_words); |
| 455 | qce_write(qce, REG_CNTR_MASK, ~0); |
| 456 | qce_write(qce, REG_CNTR_MASK0, ~0); |
| 457 | qce_write(qce, REG_CNTR_MASK1, ~0); |
| 458 | qce_write(qce, REG_CNTR_MASK2, ~0); |
| 459 | } |
| 460 | |
| 461 | /* Clear authentication IV and KEY registers of previous values */ |
| 462 | qce_clear_array(qce, REG_AUTH_IV0, 16); |
| 463 | qce_clear_array(qce, REG_AUTH_KEY0, 16); |
| 464 | |
| 465 | /* Clear byte count */ |
| 466 | qce_clear_array(qce, REG_AUTH_BYTECNT0, 4); |
| 467 | |
| 468 | /* Write authentication key */ |
| 469 | authkey_words = qce_be32_to_cpu_array(authkey, ctx->auth_key, auth_keylen); |
| 470 | qce_write_array(qce, REG_AUTH_KEY0, (u32 *)authkey, authkey_words); |
| 471 | |
| 472 | /* Write initial authentication IV only for HMAC algorithms */ |
| 473 | if (IS_SHA_HMAC(rctx->flags)) { |
| 474 | /* Write default authentication iv */ |
| 475 | if (IS_SHA1_HMAC(rctx->flags)) { |
| 476 | auth_ivsize = SHA1_DIGEST_SIZE; |
| 477 | memcpy(authiv, std_iv_sha1, auth_ivsize); |
| 478 | } else if (IS_SHA256_HMAC(rctx->flags)) { |
| 479 | auth_ivsize = SHA256_DIGEST_SIZE; |
| 480 | memcpy(authiv, std_iv_sha256, auth_ivsize); |
| 481 | } |
| 482 | authiv_words = auth_ivsize / sizeof(u32); |
| 483 | qce_write_array(qce, REG_AUTH_IV0, (u32 *)authiv, authiv_words); |
| 484 | } else if (IS_CCM(rctx->flags)) { |
| 485 | /* Write nonce for CCM algorithms */ |
| 486 | authnonce_words = qce_be32_to_cpu_array(authnonce, rctx->ccm_nonce, QCE_MAX_NONCE); |
| 487 | qce_write_array(qce, REG_AUTH_INFO_NONCE0, authnonce, authnonce_words); |
| 488 | } |
| 489 | |
| 490 | /* Set up ENCR_SEG_CFG */ |
| 491 | encr_cfg = qce_encr_cfg(flags, enc_keylen); |
| 492 | if (IS_ENCRYPT(flags)) |
| 493 | encr_cfg |= BIT(ENCODE_SHIFT); |
| 494 | qce_write(qce, REG_ENCR_SEG_CFG, encr_cfg); |
| 495 | |
| 496 | /* Set up AUTH_SEG_CFG */ |
| 497 | auth_cfg = qce_auth_cfg(rctx->flags, auth_keylen, ctx->authsize); |
| 498 | auth_cfg |= BIT(AUTH_LAST_SHIFT); |
| 499 | auth_cfg |= BIT(AUTH_FIRST_SHIFT); |
| 500 | if (IS_ENCRYPT(flags)) { |
| 501 | if (IS_CCM(rctx->flags)) |
| 502 | auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT; |
| 503 | else |
| 504 | auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT; |
| 505 | } else { |
| 506 | if (IS_CCM(rctx->flags)) |
| 507 | auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT; |
| 508 | else |
| 509 | auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT; |
| 510 | } |
| 511 | qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg); |
| 512 | |
| 513 | totallen = rctx->cryptlen + rctx->assoclen; |
| 514 | |
| 515 | /* Set the encryption size and start offset */ |
| 516 | if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags)) |
| 517 | qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen + ctx->authsize); |
| 518 | else |
| 519 | qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); |
| 520 | qce_write(qce, REG_ENCR_SEG_START, rctx->assoclen & 0xffff); |
| 521 | |
| 522 | /* Set the authentication size and start offset */ |
| 523 | qce_write(qce, REG_AUTH_SEG_SIZE, totallen); |
| 524 | qce_write(qce, REG_AUTH_SEG_START, 0); |
| 525 | |
| 526 | /* Write total length */ |
| 527 | if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags)) |
| 528 | qce_write(qce, REG_SEG_SIZE, totallen + ctx->authsize); |
| 529 | else |
| 530 | qce_write(qce, REG_SEG_SIZE, totallen); |
| 531 | |
| 532 | /* get little endianness */ |
| 533 | config = qce_config_reg(qce, 1); |
| 534 | qce_write(qce, REG_CONFIG, config); |
| 535 | |
| 536 | /* Start the process */ |
| 537 | qce_crypto_go(qce, !IS_CCM(flags)); |
| 538 | |
| 539 | return 0; |
| 540 | } |
| 541 | #endif |
| 542 | |
Thara Gopinath | 4139fd5 | 2021-02-11 15:01:28 -0500 | [diff] [blame] | 543 | int qce_start(struct crypto_async_request *async_req, u32 type) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 544 | { |
| 545 | switch (type) { |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 546 | #ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 547 | case CRYPTO_ALG_TYPE_SKCIPHER: |
Thara Gopinath | 4139fd5 | 2021-02-11 15:01:28 -0500 | [diff] [blame] | 548 | return qce_setup_regs_skcipher(async_req); |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 549 | #endif |
| 550 | #ifdef CONFIG_CRYPTO_DEV_QCE_SHA |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 551 | case CRYPTO_ALG_TYPE_AHASH: |
Thara Gopinath | 4139fd5 | 2021-02-11 15:01:28 -0500 | [diff] [blame] | 552 | return qce_setup_regs_ahash(async_req); |
Eneas U de Queiroz | 59e056c | 2019-12-20 16:02:18 -0300 | [diff] [blame] | 553 | #endif |
Thara Gopinath | db0018a | 2021-04-29 11:07:06 -0400 | [diff] [blame] | 554 | #ifdef CONFIG_CRYPTO_DEV_QCE_AEAD |
| 555 | case CRYPTO_ALG_TYPE_AEAD: |
| 556 | return qce_setup_regs_aead(async_req); |
| 557 | #endif |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 558 | default: |
| 559 | return -EINVAL; |
| 560 | } |
| 561 | } |
| 562 | |
| 563 | #define STATUS_ERRORS \ |
| 564 | (BIT(SW_ERR_SHIFT) | BIT(AXI_ERR_SHIFT) | BIT(HSD_ERR_SHIFT)) |
| 565 | |
| 566 | int qce_check_status(struct qce_device *qce, u32 *status) |
| 567 | { |
| 568 | int ret = 0; |
| 569 | |
| 570 | *status = qce_read(qce, REG_STATUS); |
| 571 | |
| 572 | /* |
| 573 | * Don't use result dump status. The operation may not be complete. |
| 574 | * Instead, use the status we just read from device. In case, we need to |
| 575 | * use result_status from result dump the result_status needs to be byte |
| 576 | * swapped, since we set the device to little endian. |
| 577 | */ |
| 578 | if (*status & STATUS_ERRORS || !(*status & BIT(OPERATION_DONE_SHIFT))) |
| 579 | ret = -ENXIO; |
Thara Gopinath | a9ca8ea | 2021-04-29 11:07:01 -0400 | [diff] [blame] | 580 | else if (*status & BIT(MAC_FAILED_SHIFT)) |
| 581 | ret = -EBADMSG; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 582 | |
| 583 | return ret; |
| 584 | } |
| 585 | |
| 586 | void qce_get_version(struct qce_device *qce, u32 *major, u32 *minor, u32 *step) |
| 587 | { |
| 588 | u32 val; |
| 589 | |
| 590 | val = qce_read(qce, REG_VERSION); |
| 591 | *major = (val & CORE_MAJOR_REV_MASK) >> CORE_MAJOR_REV_SHIFT; |
| 592 | *minor = (val & CORE_MINOR_REV_MASK) >> CORE_MINOR_REV_SHIFT; |
| 593 | *step = (val & CORE_STEP_REV_MASK) >> CORE_STEP_REV_SHIFT; |
| 594 | } |