Thomas Gleixner | d2912cb | 2019-06-04 10:11:33 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-only |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 2 | /* |
| 3 | * aes-ce-glue.c - wrapper code for ARMv8 AES |
| 4 | * |
| 5 | * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org> |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 6 | */ |
| 7 | |
| 8 | #include <asm/hwcap.h> |
| 9 | #include <asm/neon.h> |
Ard Biesheuvel | 5eedf31 | 2019-07-02 21:41:38 +0200 | [diff] [blame] | 10 | #include <asm/simd.h> |
Ard Biesheuvel | fafb1dc | 2019-07-02 21:41:37 +0200 | [diff] [blame] | 11 | #include <asm/unaligned.h> |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 12 | #include <crypto/aes.h> |
Ard Biesheuvel | 5eedf31 | 2019-07-02 21:41:38 +0200 | [diff] [blame] | 13 | #include <crypto/ctr.h> |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 14 | #include <crypto/internal/simd.h> |
| 15 | #include <crypto/internal/skcipher.h> |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 16 | #include <crypto/scatterwalk.h> |
Ard Biesheuvel | 4d8061a | 2017-05-21 10:23:36 +0000 | [diff] [blame] | 17 | #include <linux/cpufeature.h> |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 18 | #include <linux/module.h> |
Stephan Mueller | 49abc0d | 2016-02-17 07:00:01 +0100 | [diff] [blame] | 19 | #include <crypto/xts.h> |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 20 | |
| 21 | MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions"); |
| 22 | MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>"); |
| 23 | MODULE_LICENSE("GPL v2"); |
| 24 | |
| 25 | /* defined in aes-ce-core.S */ |
| 26 | asmlinkage u32 ce_aes_sub(u32 input); |
| 27 | asmlinkage void ce_aes_invert(void *dst, void *src); |
| 28 | |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 29 | asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[], |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 30 | int rounds, int blocks); |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 31 | asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[], |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 32 | int rounds, int blocks); |
| 33 | |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 34 | asmlinkage void ce_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[], |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 35 | int rounds, int blocks, u8 iv[]); |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 36 | asmlinkage void ce_aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[], |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 37 | int rounds, int blocks, u8 iv[]); |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 38 | asmlinkage void ce_aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[], |
| 39 | int rounds, int bytes, u8 const iv[]); |
| 40 | asmlinkage void ce_aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[], |
| 41 | int rounds, int bytes, u8 const iv[]); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 42 | |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 43 | asmlinkage void ce_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[], |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 44 | int rounds, int blocks, u8 ctr[]); |
| 45 | |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 46 | asmlinkage void ce_aes_xts_encrypt(u8 out[], u8 const in[], u32 const rk1[], |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 47 | int rounds, int bytes, u8 iv[], |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 48 | u32 const rk2[], int first); |
| 49 | asmlinkage void ce_aes_xts_decrypt(u8 out[], u8 const in[], u32 const rk1[], |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 50 | int rounds, int bytes, u8 iv[], |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 51 | u32 const rk2[], int first); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 52 | |
| 53 | struct aes_block { |
| 54 | u8 b[AES_BLOCK_SIZE]; |
| 55 | }; |
| 56 | |
| 57 | static int num_rounds(struct crypto_aes_ctx *ctx) |
| 58 | { |
| 59 | /* |
| 60 | * # of rounds specified by AES: |
| 61 | * 128 bit key 10 rounds |
| 62 | * 192 bit key 12 rounds |
| 63 | * 256 bit key 14 rounds |
| 64 | * => n byte key => 6 + (n/4) rounds |
| 65 | */ |
| 66 | return 6 + ctx->key_length / 4; |
| 67 | } |
| 68 | |
| 69 | static int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key, |
| 70 | unsigned int key_len) |
| 71 | { |
| 72 | /* |
| 73 | * The AES key schedule round constants |
| 74 | */ |
| 75 | static u8 const rcon[] = { |
| 76 | 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, |
| 77 | }; |
| 78 | |
| 79 | u32 kwords = key_len / sizeof(u32); |
| 80 | struct aes_block *key_enc, *key_dec; |
| 81 | int i, j; |
| 82 | |
| 83 | if (key_len != AES_KEYSIZE_128 && |
| 84 | key_len != AES_KEYSIZE_192 && |
| 85 | key_len != AES_KEYSIZE_256) |
| 86 | return -EINVAL; |
| 87 | |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 88 | ctx->key_length = key_len; |
Ard Biesheuvel | fafb1dc | 2019-07-02 21:41:37 +0200 | [diff] [blame] | 89 | for (i = 0; i < kwords; i++) |
| 90 | ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32)); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 91 | |
| 92 | kernel_neon_begin(); |
| 93 | for (i = 0; i < sizeof(rcon); i++) { |
| 94 | u32 *rki = ctx->key_enc + (i * kwords); |
| 95 | u32 *rko = rki + kwords; |
| 96 | |
| 97 | rko[0] = ror32(ce_aes_sub(rki[kwords - 1]), 8); |
| 98 | rko[0] = rko[0] ^ rki[0] ^ rcon[i]; |
| 99 | rko[1] = rko[0] ^ rki[1]; |
| 100 | rko[2] = rko[1] ^ rki[2]; |
| 101 | rko[3] = rko[2] ^ rki[3]; |
| 102 | |
| 103 | if (key_len == AES_KEYSIZE_192) { |
| 104 | if (i >= 7) |
| 105 | break; |
| 106 | rko[4] = rko[3] ^ rki[4]; |
| 107 | rko[5] = rko[4] ^ rki[5]; |
| 108 | } else if (key_len == AES_KEYSIZE_256) { |
| 109 | if (i >= 6) |
| 110 | break; |
| 111 | rko[4] = ce_aes_sub(rko[3]) ^ rki[4]; |
| 112 | rko[5] = rko[4] ^ rki[5]; |
| 113 | rko[6] = rko[5] ^ rki[6]; |
| 114 | rko[7] = rko[6] ^ rki[7]; |
| 115 | } |
| 116 | } |
| 117 | |
| 118 | /* |
| 119 | * Generate the decryption keys for the Equivalent Inverse Cipher. |
| 120 | * This involves reversing the order of the round keys, and applying |
| 121 | * the Inverse Mix Columns transformation on all but the first and |
| 122 | * the last one. |
| 123 | */ |
| 124 | key_enc = (struct aes_block *)ctx->key_enc; |
| 125 | key_dec = (struct aes_block *)ctx->key_dec; |
| 126 | j = num_rounds(ctx); |
| 127 | |
| 128 | key_dec[0] = key_enc[j]; |
| 129 | for (i = 1, j--; j > 0; i++, j--) |
| 130 | ce_aes_invert(key_dec + i, key_enc + j); |
| 131 | key_dec[i] = key_enc[0]; |
| 132 | |
| 133 | kernel_neon_end(); |
| 134 | return 0; |
| 135 | } |
| 136 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 137 | static int ce_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key, |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 138 | unsigned int key_len) |
| 139 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 140 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 141 | int ret; |
| 142 | |
| 143 | ret = ce_aes_expandkey(ctx, in_key, key_len); |
| 144 | if (!ret) |
| 145 | return 0; |
| 146 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 147 | crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 148 | return -EINVAL; |
| 149 | } |
| 150 | |
| 151 | struct crypto_aes_xts_ctx { |
| 152 | struct crypto_aes_ctx key1; |
| 153 | struct crypto_aes_ctx __aligned(8) key2; |
| 154 | }; |
| 155 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 156 | static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key, |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 157 | unsigned int key_len) |
| 158 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 159 | struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 160 | int ret; |
| 161 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 162 | ret = xts_verify_key(tfm, in_key, key_len); |
Stephan Mueller | 28856a9 | 2016-02-09 15:37:47 +0100 | [diff] [blame] | 163 | if (ret) |
| 164 | return ret; |
| 165 | |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 166 | ret = ce_aes_expandkey(&ctx->key1, in_key, key_len / 2); |
| 167 | if (!ret) |
| 168 | ret = ce_aes_expandkey(&ctx->key2, &in_key[key_len / 2], |
| 169 | key_len / 2); |
| 170 | if (!ret) |
| 171 | return 0; |
| 172 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 173 | crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 174 | return -EINVAL; |
| 175 | } |
| 176 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 177 | static int ecb_encrypt(struct skcipher_request *req) |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 178 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 179 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 180 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 181 | struct skcipher_walk walk; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 182 | unsigned int blocks; |
| 183 | int err; |
| 184 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 185 | err = skcipher_walk_virt(&walk, req, false); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 186 | |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 187 | while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 188 | kernel_neon_begin(); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 189 | ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 190 | ctx->key_enc, num_rounds(ctx), blocks); |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 191 | kernel_neon_end(); |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 192 | err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 193 | } |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 194 | return err; |
| 195 | } |
| 196 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 197 | static int ecb_decrypt(struct skcipher_request *req) |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 198 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 199 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 200 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 201 | struct skcipher_walk walk; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 202 | unsigned int blocks; |
| 203 | int err; |
| 204 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 205 | err = skcipher_walk_virt(&walk, req, false); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 206 | |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 207 | while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 208 | kernel_neon_begin(); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 209 | ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 210 | ctx->key_dec, num_rounds(ctx), blocks); |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 211 | kernel_neon_end(); |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 212 | err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 213 | } |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 214 | return err; |
| 215 | } |
| 216 | |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 217 | static int cbc_encrypt_walk(struct skcipher_request *req, |
| 218 | struct skcipher_walk *walk) |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 219 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 220 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 221 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 222 | unsigned int blocks; |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 223 | int err = 0; |
| 224 | |
| 225 | while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { |
| 226 | kernel_neon_begin(); |
| 227 | ce_aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr, |
| 228 | ctx->key_enc, num_rounds(ctx), blocks, |
| 229 | walk->iv); |
| 230 | kernel_neon_end(); |
| 231 | err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); |
| 232 | } |
| 233 | return err; |
| 234 | } |
| 235 | |
| 236 | static int cbc_encrypt(struct skcipher_request *req) |
| 237 | { |
| 238 | struct skcipher_walk walk; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 239 | int err; |
| 240 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 241 | err = skcipher_walk_virt(&walk, req, false); |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 242 | if (err) |
| 243 | return err; |
| 244 | return cbc_encrypt_walk(req, &walk); |
| 245 | } |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 246 | |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 247 | static int cbc_decrypt_walk(struct skcipher_request *req, |
| 248 | struct skcipher_walk *walk) |
| 249 | { |
| 250 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 251 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 252 | unsigned int blocks; |
| 253 | int err = 0; |
| 254 | |
| 255 | while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 256 | kernel_neon_begin(); |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 257 | ce_aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr, |
| 258 | ctx->key_dec, num_rounds(ctx), blocks, |
| 259 | walk->iv); |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 260 | kernel_neon_end(); |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 261 | err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 262 | } |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 263 | return err; |
| 264 | } |
| 265 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 266 | static int cbc_decrypt(struct skcipher_request *req) |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 267 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 268 | struct skcipher_walk walk; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 269 | int err; |
| 270 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 271 | err = skcipher_walk_virt(&walk, req, false); |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 272 | if (err) |
| 273 | return err; |
| 274 | return cbc_decrypt_walk(req, &walk); |
| 275 | } |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 276 | |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 277 | static int cts_cbc_encrypt(struct skcipher_request *req) |
| 278 | { |
| 279 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 280 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 281 | int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; |
| 282 | struct scatterlist *src = req->src, *dst = req->dst; |
| 283 | struct scatterlist sg_src[2], sg_dst[2]; |
| 284 | struct skcipher_request subreq; |
| 285 | struct skcipher_walk walk; |
| 286 | int err; |
| 287 | |
| 288 | skcipher_request_set_tfm(&subreq, tfm); |
| 289 | skcipher_request_set_callback(&subreq, skcipher_request_flags(req), |
| 290 | NULL, NULL); |
| 291 | |
| 292 | if (req->cryptlen <= AES_BLOCK_SIZE) { |
| 293 | if (req->cryptlen < AES_BLOCK_SIZE) |
| 294 | return -EINVAL; |
| 295 | cbc_blocks = 1; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 296 | } |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 297 | |
| 298 | if (cbc_blocks > 0) { |
| 299 | skcipher_request_set_crypt(&subreq, req->src, req->dst, |
| 300 | cbc_blocks * AES_BLOCK_SIZE, |
| 301 | req->iv); |
| 302 | |
| 303 | err = skcipher_walk_virt(&walk, &subreq, false) ?: |
| 304 | cbc_encrypt_walk(&subreq, &walk); |
| 305 | if (err) |
| 306 | return err; |
| 307 | |
| 308 | if (req->cryptlen == AES_BLOCK_SIZE) |
| 309 | return 0; |
| 310 | |
| 311 | dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); |
| 312 | if (req->dst != req->src) |
| 313 | dst = scatterwalk_ffwd(sg_dst, req->dst, |
| 314 | subreq.cryptlen); |
| 315 | } |
| 316 | |
| 317 | /* handle ciphertext stealing */ |
| 318 | skcipher_request_set_crypt(&subreq, src, dst, |
| 319 | req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, |
| 320 | req->iv); |
| 321 | |
| 322 | err = skcipher_walk_virt(&walk, &subreq, false); |
| 323 | if (err) |
| 324 | return err; |
| 325 | |
| 326 | kernel_neon_begin(); |
| 327 | ce_aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
| 328 | ctx->key_enc, num_rounds(ctx), walk.nbytes, |
| 329 | walk.iv); |
| 330 | kernel_neon_end(); |
| 331 | |
| 332 | return skcipher_walk_done(&walk, 0); |
| 333 | } |
| 334 | |
| 335 | static int cts_cbc_decrypt(struct skcipher_request *req) |
| 336 | { |
| 337 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 338 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 339 | int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; |
| 340 | struct scatterlist *src = req->src, *dst = req->dst; |
| 341 | struct scatterlist sg_src[2], sg_dst[2]; |
| 342 | struct skcipher_request subreq; |
| 343 | struct skcipher_walk walk; |
| 344 | int err; |
| 345 | |
| 346 | skcipher_request_set_tfm(&subreq, tfm); |
| 347 | skcipher_request_set_callback(&subreq, skcipher_request_flags(req), |
| 348 | NULL, NULL); |
| 349 | |
| 350 | if (req->cryptlen <= AES_BLOCK_SIZE) { |
| 351 | if (req->cryptlen < AES_BLOCK_SIZE) |
| 352 | return -EINVAL; |
| 353 | cbc_blocks = 1; |
| 354 | } |
| 355 | |
| 356 | if (cbc_blocks > 0) { |
| 357 | skcipher_request_set_crypt(&subreq, req->src, req->dst, |
| 358 | cbc_blocks * AES_BLOCK_SIZE, |
| 359 | req->iv); |
| 360 | |
| 361 | err = skcipher_walk_virt(&walk, &subreq, false) ?: |
| 362 | cbc_decrypt_walk(&subreq, &walk); |
| 363 | if (err) |
| 364 | return err; |
| 365 | |
| 366 | if (req->cryptlen == AES_BLOCK_SIZE) |
| 367 | return 0; |
| 368 | |
| 369 | dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); |
| 370 | if (req->dst != req->src) |
| 371 | dst = scatterwalk_ffwd(sg_dst, req->dst, |
| 372 | subreq.cryptlen); |
| 373 | } |
| 374 | |
| 375 | /* handle ciphertext stealing */ |
| 376 | skcipher_request_set_crypt(&subreq, src, dst, |
| 377 | req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, |
| 378 | req->iv); |
| 379 | |
| 380 | err = skcipher_walk_virt(&walk, &subreq, false); |
| 381 | if (err) |
| 382 | return err; |
| 383 | |
| 384 | kernel_neon_begin(); |
| 385 | ce_aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
| 386 | ctx->key_dec, num_rounds(ctx), walk.nbytes, |
| 387 | walk.iv); |
| 388 | kernel_neon_end(); |
| 389 | |
| 390 | return skcipher_walk_done(&walk, 0); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 391 | } |
| 392 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 393 | static int ctr_encrypt(struct skcipher_request *req) |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 394 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 395 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 396 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 397 | struct skcipher_walk walk; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 398 | int err, blocks; |
| 399 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 400 | err = skcipher_walk_virt(&walk, req, false); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 401 | |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 402 | while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 403 | kernel_neon_begin(); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 404 | ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 405 | ctx->key_enc, num_rounds(ctx), blocks, |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 406 | walk.iv); |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 407 | kernel_neon_end(); |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 408 | err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 409 | } |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 410 | if (walk.nbytes) { |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 411 | u8 __aligned(8) tail[AES_BLOCK_SIZE]; |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 412 | unsigned int nbytes = walk.nbytes; |
| 413 | u8 *tdst = walk.dst.virt.addr; |
| 414 | u8 *tsrc = walk.src.virt.addr; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 415 | |
| 416 | /* |
Ard Biesheuvel | 1465fb1 | 2017-01-28 23:25:31 +0000 | [diff] [blame] | 417 | * Tell aes_ctr_encrypt() to process a tail block. |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 418 | */ |
Ard Biesheuvel | 1465fb1 | 2017-01-28 23:25:31 +0000 | [diff] [blame] | 419 | blocks = -1; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 420 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 421 | kernel_neon_begin(); |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 422 | ce_aes_ctr_encrypt(tail, NULL, ctx->key_enc, num_rounds(ctx), |
| 423 | blocks, walk.iv); |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 424 | kernel_neon_end(); |
Ard Biesheuvel | 45fe93d | 2017-07-24 11:28:04 +0100 | [diff] [blame] | 425 | crypto_xor_cpy(tdst, tsrc, tail, nbytes); |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 426 | err = skcipher_walk_done(&walk, 0); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 427 | } |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 428 | return err; |
| 429 | } |
| 430 | |
Ard Biesheuvel | 5eedf31 | 2019-07-02 21:41:38 +0200 | [diff] [blame] | 431 | static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) |
| 432 | { |
| 433 | struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 434 | unsigned long flags; |
| 435 | |
| 436 | /* |
| 437 | * Temporarily disable interrupts to avoid races where |
| 438 | * cachelines are evicted when the CPU is interrupted |
| 439 | * to do something else. |
| 440 | */ |
| 441 | local_irq_save(flags); |
| 442 | aes_encrypt(ctx, dst, src); |
| 443 | local_irq_restore(flags); |
| 444 | } |
| 445 | |
| 446 | static int ctr_encrypt_sync(struct skcipher_request *req) |
| 447 | { |
| 448 | if (!crypto_simd_usable()) |
| 449 | return crypto_ctr_encrypt_walk(req, ctr_encrypt_one); |
| 450 | |
| 451 | return ctr_encrypt(req); |
| 452 | } |
| 453 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 454 | static int xts_encrypt(struct skcipher_request *req) |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 455 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 456 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 457 | struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 458 | int err, first, rounds = num_rounds(&ctx->key1); |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 459 | int tail = req->cryptlen % AES_BLOCK_SIZE; |
| 460 | struct scatterlist sg_src[2], sg_dst[2]; |
| 461 | struct skcipher_request subreq; |
| 462 | struct scatterlist *src, *dst; |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 463 | struct skcipher_walk walk; |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 464 | |
| 465 | if (req->cryptlen < AES_BLOCK_SIZE) |
| 466 | return -EINVAL; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 467 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 468 | err = skcipher_walk_virt(&walk, req, false); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 469 | |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 470 | if (unlikely(tail > 0 && walk.nbytes < walk.total)) { |
| 471 | int xts_blocks = DIV_ROUND_UP(req->cryptlen, |
| 472 | AES_BLOCK_SIZE) - 2; |
| 473 | |
| 474 | skcipher_walk_abort(&walk); |
| 475 | |
| 476 | skcipher_request_set_tfm(&subreq, tfm); |
| 477 | skcipher_request_set_callback(&subreq, |
| 478 | skcipher_request_flags(req), |
| 479 | NULL, NULL); |
| 480 | skcipher_request_set_crypt(&subreq, req->src, req->dst, |
| 481 | xts_blocks * AES_BLOCK_SIZE, |
| 482 | req->iv); |
| 483 | req = &subreq; |
| 484 | err = skcipher_walk_virt(&walk, req, false); |
| 485 | } else { |
| 486 | tail = 0; |
| 487 | } |
| 488 | |
| 489 | for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { |
| 490 | int nbytes = walk.nbytes; |
| 491 | |
| 492 | if (walk.nbytes < walk.total) |
| 493 | nbytes &= ~(AES_BLOCK_SIZE - 1); |
| 494 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 495 | kernel_neon_begin(); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 496 | ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 497 | ctx->key1.key_enc, rounds, nbytes, walk.iv, |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 498 | ctx->key2.key_enc, first); |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 499 | kernel_neon_end(); |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 500 | err = skcipher_walk_done(&walk, walk.nbytes - nbytes); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 501 | } |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 502 | |
| 503 | if (err || likely(!tail)) |
| 504 | return err; |
| 505 | |
| 506 | dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); |
| 507 | if (req->dst != req->src) |
| 508 | dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); |
| 509 | |
| 510 | skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, |
| 511 | req->iv); |
| 512 | |
| 513 | err = skcipher_walk_virt(&walk, req, false); |
| 514 | if (err) |
| 515 | return err; |
| 516 | |
| 517 | kernel_neon_begin(); |
| 518 | ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, |
| 519 | ctx->key1.key_enc, rounds, walk.nbytes, walk.iv, |
| 520 | ctx->key2.key_enc, first); |
| 521 | kernel_neon_end(); |
| 522 | |
| 523 | return skcipher_walk_done(&walk, 0); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 524 | } |
| 525 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 526 | static int xts_decrypt(struct skcipher_request *req) |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 527 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 528 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 529 | struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 530 | int err, first, rounds = num_rounds(&ctx->key1); |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 531 | int tail = req->cryptlen % AES_BLOCK_SIZE; |
| 532 | struct scatterlist sg_src[2], sg_dst[2]; |
| 533 | struct skcipher_request subreq; |
| 534 | struct scatterlist *src, *dst; |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 535 | struct skcipher_walk walk; |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 536 | |
| 537 | if (req->cryptlen < AES_BLOCK_SIZE) |
| 538 | return -EINVAL; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 539 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 540 | err = skcipher_walk_virt(&walk, req, false); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 541 | |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 542 | if (unlikely(tail > 0 && walk.nbytes < walk.total)) { |
| 543 | int xts_blocks = DIV_ROUND_UP(req->cryptlen, |
| 544 | AES_BLOCK_SIZE) - 2; |
| 545 | |
| 546 | skcipher_walk_abort(&walk); |
| 547 | |
| 548 | skcipher_request_set_tfm(&subreq, tfm); |
| 549 | skcipher_request_set_callback(&subreq, |
| 550 | skcipher_request_flags(req), |
| 551 | NULL, NULL); |
| 552 | skcipher_request_set_crypt(&subreq, req->src, req->dst, |
| 553 | xts_blocks * AES_BLOCK_SIZE, |
| 554 | req->iv); |
| 555 | req = &subreq; |
| 556 | err = skcipher_walk_virt(&walk, req, false); |
| 557 | } else { |
| 558 | tail = 0; |
| 559 | } |
| 560 | |
| 561 | for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { |
| 562 | int nbytes = walk.nbytes; |
| 563 | |
| 564 | if (walk.nbytes < walk.total) |
| 565 | nbytes &= ~(AES_BLOCK_SIZE - 1); |
| 566 | |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 567 | kernel_neon_begin(); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 568 | ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 569 | ctx->key1.key_dec, rounds, nbytes, walk.iv, |
Ard Biesheuvel | fcb0e30 | 2019-09-03 09:43:23 -0700 | [diff] [blame] | 570 | ctx->key2.key_enc, first); |
Ard Biesheuvel | 46a2277 | 2019-09-03 09:43:24 -0700 | [diff] [blame] | 571 | kernel_neon_end(); |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 572 | err = skcipher_walk_done(&walk, walk.nbytes - nbytes); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 573 | } |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 574 | |
| 575 | if (err || likely(!tail)) |
| 576 | return err; |
| 577 | |
| 578 | dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); |
| 579 | if (req->dst != req->src) |
| 580 | dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); |
| 581 | |
| 582 | skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, |
| 583 | req->iv); |
| 584 | |
| 585 | err = skcipher_walk_virt(&walk, req, false); |
| 586 | if (err) |
| 587 | return err; |
| 588 | |
| 589 | kernel_neon_begin(); |
| 590 | ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, |
| 591 | ctx->key1.key_dec, rounds, walk.nbytes, walk.iv, |
| 592 | ctx->key2.key_enc, first); |
| 593 | kernel_neon_end(); |
| 594 | |
| 595 | return skcipher_walk_done(&walk, 0); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 596 | } |
| 597 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 598 | static struct skcipher_alg aes_algs[] = { { |
Ard Biesheuvel | 20bb4ef | 2019-07-02 21:41:19 +0200 | [diff] [blame] | 599 | .base.cra_name = "__ecb(aes)", |
| 600 | .base.cra_driver_name = "__ecb-aes-ce", |
| 601 | .base.cra_priority = 300, |
| 602 | .base.cra_flags = CRYPTO_ALG_INTERNAL, |
| 603 | .base.cra_blocksize = AES_BLOCK_SIZE, |
| 604 | .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
| 605 | .base.cra_module = THIS_MODULE, |
| 606 | |
| 607 | .min_keysize = AES_MIN_KEY_SIZE, |
| 608 | .max_keysize = AES_MAX_KEY_SIZE, |
| 609 | .setkey = ce_aes_setkey, |
| 610 | .encrypt = ecb_encrypt, |
| 611 | .decrypt = ecb_decrypt, |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 612 | }, { |
Ard Biesheuvel | 20bb4ef | 2019-07-02 21:41:19 +0200 | [diff] [blame] | 613 | .base.cra_name = "__cbc(aes)", |
| 614 | .base.cra_driver_name = "__cbc-aes-ce", |
| 615 | .base.cra_priority = 300, |
| 616 | .base.cra_flags = CRYPTO_ALG_INTERNAL, |
| 617 | .base.cra_blocksize = AES_BLOCK_SIZE, |
| 618 | .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
| 619 | .base.cra_module = THIS_MODULE, |
| 620 | |
| 621 | .min_keysize = AES_MIN_KEY_SIZE, |
| 622 | .max_keysize = AES_MAX_KEY_SIZE, |
| 623 | .ivsize = AES_BLOCK_SIZE, |
| 624 | .setkey = ce_aes_setkey, |
| 625 | .encrypt = cbc_encrypt, |
| 626 | .decrypt = cbc_decrypt, |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 627 | }, { |
Ard Biesheuvel | 143d264 | 2019-09-03 09:43:37 -0700 | [diff] [blame] | 628 | .base.cra_name = "__cts(cbc(aes))", |
| 629 | .base.cra_driver_name = "__cts-cbc-aes-ce", |
| 630 | .base.cra_priority = 300, |
| 631 | .base.cra_flags = CRYPTO_ALG_INTERNAL, |
| 632 | .base.cra_blocksize = AES_BLOCK_SIZE, |
| 633 | .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
| 634 | .base.cra_module = THIS_MODULE, |
| 635 | |
| 636 | .min_keysize = AES_MIN_KEY_SIZE, |
| 637 | .max_keysize = AES_MAX_KEY_SIZE, |
| 638 | .ivsize = AES_BLOCK_SIZE, |
| 639 | .walksize = 2 * AES_BLOCK_SIZE, |
| 640 | .setkey = ce_aes_setkey, |
| 641 | .encrypt = cts_cbc_encrypt, |
| 642 | .decrypt = cts_cbc_decrypt, |
| 643 | }, { |
Ard Biesheuvel | 20bb4ef | 2019-07-02 21:41:19 +0200 | [diff] [blame] | 644 | .base.cra_name = "__ctr(aes)", |
| 645 | .base.cra_driver_name = "__ctr-aes-ce", |
| 646 | .base.cra_priority = 300, |
| 647 | .base.cra_flags = CRYPTO_ALG_INTERNAL, |
| 648 | .base.cra_blocksize = 1, |
| 649 | .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
| 650 | .base.cra_module = THIS_MODULE, |
| 651 | |
| 652 | .min_keysize = AES_MIN_KEY_SIZE, |
| 653 | .max_keysize = AES_MAX_KEY_SIZE, |
| 654 | .ivsize = AES_BLOCK_SIZE, |
| 655 | .chunksize = AES_BLOCK_SIZE, |
| 656 | .setkey = ce_aes_setkey, |
| 657 | .encrypt = ctr_encrypt, |
| 658 | .decrypt = ctr_encrypt, |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 659 | }, { |
Ard Biesheuvel | 5eedf31 | 2019-07-02 21:41:38 +0200 | [diff] [blame] | 660 | .base.cra_name = "ctr(aes)", |
| 661 | .base.cra_driver_name = "ctr-aes-ce-sync", |
| 662 | .base.cra_priority = 300 - 1, |
| 663 | .base.cra_blocksize = 1, |
| 664 | .base.cra_ctxsize = sizeof(struct crypto_aes_ctx), |
| 665 | .base.cra_module = THIS_MODULE, |
| 666 | |
| 667 | .min_keysize = AES_MIN_KEY_SIZE, |
| 668 | .max_keysize = AES_MAX_KEY_SIZE, |
| 669 | .ivsize = AES_BLOCK_SIZE, |
| 670 | .chunksize = AES_BLOCK_SIZE, |
| 671 | .setkey = ce_aes_setkey, |
| 672 | .encrypt = ctr_encrypt_sync, |
| 673 | .decrypt = ctr_encrypt_sync, |
| 674 | }, { |
Ard Biesheuvel | 20bb4ef | 2019-07-02 21:41:19 +0200 | [diff] [blame] | 675 | .base.cra_name = "__xts(aes)", |
| 676 | .base.cra_driver_name = "__xts-aes-ce", |
| 677 | .base.cra_priority = 300, |
| 678 | .base.cra_flags = CRYPTO_ALG_INTERNAL, |
| 679 | .base.cra_blocksize = AES_BLOCK_SIZE, |
| 680 | .base.cra_ctxsize = sizeof(struct crypto_aes_xts_ctx), |
| 681 | .base.cra_module = THIS_MODULE, |
| 682 | |
| 683 | .min_keysize = 2 * AES_MIN_KEY_SIZE, |
| 684 | .max_keysize = 2 * AES_MAX_KEY_SIZE, |
| 685 | .ivsize = AES_BLOCK_SIZE, |
Ard Biesheuvel | c61b160 | 2019-09-03 09:43:35 -0700 | [diff] [blame] | 686 | .walksize = 2 * AES_BLOCK_SIZE, |
Ard Biesheuvel | 20bb4ef | 2019-07-02 21:41:19 +0200 | [diff] [blame] | 687 | .setkey = xts_set_key, |
| 688 | .encrypt = xts_encrypt, |
| 689 | .decrypt = xts_decrypt, |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 690 | } }; |
| 691 | |
Herbert Xu | efad2b6 | 2016-12-01 13:45:05 +0800 | [diff] [blame] | 692 | static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)]; |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 693 | |
| 694 | static void aes_exit(void) |
| 695 | { |
| 696 | int i; |
| 697 | |
| 698 | for (i = 0; i < ARRAY_SIZE(aes_simd_algs) && aes_simd_algs[i]; i++) |
| 699 | simd_skcipher_free(aes_simd_algs[i]); |
| 700 | |
| 701 | crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); |
| 702 | } |
| 703 | |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 704 | static int __init aes_init(void) |
| 705 | { |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 706 | struct simd_skcipher_alg *simd; |
| 707 | const char *basename; |
| 708 | const char *algname; |
| 709 | const char *drvname; |
| 710 | int err; |
| 711 | int i; |
| 712 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 713 | err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs)); |
| 714 | if (err) |
| 715 | return err; |
| 716 | |
| 717 | for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { |
Ard Biesheuvel | 5eedf31 | 2019-07-02 21:41:38 +0200 | [diff] [blame] | 718 | if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL)) |
| 719 | continue; |
| 720 | |
Herbert Xu | da40e7a | 2016-11-22 20:08:37 +0800 | [diff] [blame] | 721 | algname = aes_algs[i].base.cra_name + 2; |
| 722 | drvname = aes_algs[i].base.cra_driver_name + 2; |
| 723 | basename = aes_algs[i].base.cra_driver_name; |
| 724 | simd = simd_skcipher_create_compat(algname, drvname, basename); |
| 725 | err = PTR_ERR(simd); |
| 726 | if (IS_ERR(simd)) |
| 727 | goto unregister_simds; |
| 728 | |
| 729 | aes_simd_algs[i] = simd; |
| 730 | } |
| 731 | |
| 732 | return 0; |
| 733 | |
| 734 | unregister_simds: |
| 735 | aes_exit(); |
| 736 | return err; |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 737 | } |
| 738 | |
Ard Biesheuvel | 4d8061a | 2017-05-21 10:23:36 +0000 | [diff] [blame] | 739 | module_cpu_feature_match(AES, aes_init); |
Ard Biesheuvel | 8646485 | 2015-03-10 09:47:47 +0100 | [diff] [blame] | 740 | module_exit(aes_exit); |