Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 2 | /* |
| 3 | * CBC: Cipher Block Chaining mode |
| 4 | * |
Herbert Xu | cc868d8 | 2016-11-22 20:08:42 +0800 | [diff] [blame] | 5 | * Copyright (c) 2006-2016 Herbert Xu <herbert@gondor.apana.org.au> |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 6 | */ |
| 7 | |
Marcelo Cerri | e6c2e65 | 2017-02-27 09:38:25 -0300 | [diff] [blame] | 8 | #include <crypto/algapi.h> |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 9 | #include <crypto/internal/skcipher.h> |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 10 | #include <linux/err.h> |
| 11 | #include <linux/init.h> |
| 12 | #include <linux/kernel.h> |
Herbert Xu | 50b6544 | 2007-11-20 17:36:00 +0800 | [diff] [blame] | 13 | #include <linux/log2.h> |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 14 | #include <linux/module.h> |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 15 | |
Herbert Xu | 5f254dd | 2020-09-01 21:49:11 +1000 | [diff] [blame] | 16 | static int crypto_cbc_encrypt_segment(struct skcipher_walk *walk, |
| 17 | struct crypto_skcipher *skcipher) |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 18 | { |
Herbert Xu | 5f254dd | 2020-09-01 21:49:11 +1000 | [diff] [blame] | 19 | unsigned int bsize = crypto_skcipher_blocksize(skcipher); |
| 20 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *); |
| 21 | unsigned int nbytes = walk->nbytes; |
| 22 | u8 *src = walk->src.virt.addr; |
| 23 | u8 *dst = walk->dst.virt.addr; |
| 24 | struct crypto_cipher *cipher; |
| 25 | struct crypto_tfm *tfm; |
| 26 | u8 *iv = walk->iv; |
| 27 | |
| 28 | cipher = skcipher_cipher_simple(skcipher); |
| 29 | tfm = crypto_cipher_tfm(cipher); |
| 30 | fn = crypto_cipher_alg(cipher)->cia_encrypt; |
| 31 | |
| 32 | do { |
| 33 | crypto_xor(iv, src, bsize); |
| 34 | fn(tfm, dst, iv); |
| 35 | memcpy(iv, dst, bsize); |
| 36 | |
| 37 | src += bsize; |
| 38 | dst += bsize; |
| 39 | } while ((nbytes -= bsize) >= bsize); |
| 40 | |
| 41 | return nbytes; |
| 42 | } |
| 43 | |
| 44 | static int crypto_cbc_encrypt_inplace(struct skcipher_walk *walk, |
| 45 | struct crypto_skcipher *skcipher) |
| 46 | { |
| 47 | unsigned int bsize = crypto_skcipher_blocksize(skcipher); |
| 48 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *); |
| 49 | unsigned int nbytes = walk->nbytes; |
| 50 | u8 *src = walk->src.virt.addr; |
| 51 | struct crypto_cipher *cipher; |
| 52 | struct crypto_tfm *tfm; |
| 53 | u8 *iv = walk->iv; |
| 54 | |
| 55 | cipher = skcipher_cipher_simple(skcipher); |
| 56 | tfm = crypto_cipher_tfm(cipher); |
| 57 | fn = crypto_cipher_alg(cipher)->cia_encrypt; |
| 58 | |
| 59 | do { |
| 60 | crypto_xor(src, iv, bsize); |
| 61 | fn(tfm, src, src); |
| 62 | iv = src; |
| 63 | |
| 64 | src += bsize; |
| 65 | } while ((nbytes -= bsize) >= bsize); |
| 66 | |
| 67 | memcpy(walk->iv, iv, bsize); |
| 68 | |
| 69 | return nbytes; |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 70 | } |
| 71 | |
| 72 | static int crypto_cbc_encrypt(struct skcipher_request *req) |
| 73 | { |
Herbert Xu | 5f254dd | 2020-09-01 21:49:11 +1000 | [diff] [blame] | 74 | struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 75 | struct skcipher_walk walk; |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 76 | int err; |
| 77 | |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 78 | err = skcipher_walk_virt(&walk, req, false); |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 79 | |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 80 | while (walk.nbytes) { |
Herbert Xu | 5f254dd | 2020-09-01 21:49:11 +1000 | [diff] [blame] | 81 | if (walk.src.virt.addr == walk.dst.virt.addr) |
| 82 | err = crypto_cbc_encrypt_inplace(&walk, skcipher); |
| 83 | else |
| 84 | err = crypto_cbc_encrypt_segment(&walk, skcipher); |
| 85 | err = skcipher_walk_done(&walk, err); |
| 86 | } |
| 87 | |
| 88 | return err; |
| 89 | } |
| 90 | |
| 91 | static int crypto_cbc_decrypt_segment(struct skcipher_walk *walk, |
| 92 | struct crypto_skcipher *skcipher) |
| 93 | { |
| 94 | unsigned int bsize = crypto_skcipher_blocksize(skcipher); |
| 95 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *); |
| 96 | unsigned int nbytes = walk->nbytes; |
| 97 | u8 *src = walk->src.virt.addr; |
| 98 | u8 *dst = walk->dst.virt.addr; |
| 99 | struct crypto_cipher *cipher; |
| 100 | struct crypto_tfm *tfm; |
| 101 | u8 *iv = walk->iv; |
| 102 | |
| 103 | cipher = skcipher_cipher_simple(skcipher); |
| 104 | tfm = crypto_cipher_tfm(cipher); |
| 105 | fn = crypto_cipher_alg(cipher)->cia_decrypt; |
| 106 | |
| 107 | do { |
| 108 | fn(tfm, dst, src); |
| 109 | crypto_xor(dst, iv, bsize); |
| 110 | iv = src; |
| 111 | |
| 112 | src += bsize; |
| 113 | dst += bsize; |
| 114 | } while ((nbytes -= bsize) >= bsize); |
| 115 | |
| 116 | memcpy(walk->iv, iv, bsize); |
| 117 | |
| 118 | return nbytes; |
| 119 | } |
| 120 | |
| 121 | static int crypto_cbc_decrypt_inplace(struct skcipher_walk *walk, |
| 122 | struct crypto_skcipher *skcipher) |
| 123 | { |
| 124 | unsigned int bsize = crypto_skcipher_blocksize(skcipher); |
| 125 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *); |
| 126 | unsigned int nbytes = walk->nbytes; |
| 127 | u8 *src = walk->src.virt.addr; |
| 128 | u8 last_iv[MAX_CIPHER_BLOCKSIZE]; |
| 129 | struct crypto_cipher *cipher; |
| 130 | struct crypto_tfm *tfm; |
| 131 | |
| 132 | cipher = skcipher_cipher_simple(skcipher); |
| 133 | tfm = crypto_cipher_tfm(cipher); |
| 134 | fn = crypto_cipher_alg(cipher)->cia_decrypt; |
| 135 | |
| 136 | /* Start of the last block. */ |
| 137 | src += nbytes - (nbytes & (bsize - 1)) - bsize; |
| 138 | memcpy(last_iv, src, bsize); |
| 139 | |
| 140 | for (;;) { |
| 141 | fn(tfm, src, src); |
| 142 | if ((nbytes -= bsize) < bsize) |
| 143 | break; |
| 144 | crypto_xor(src, src - bsize, bsize); |
| 145 | src -= bsize; |
| 146 | } |
| 147 | |
| 148 | crypto_xor(src, walk->iv, bsize); |
| 149 | memcpy(walk->iv, last_iv, bsize); |
| 150 | |
| 151 | return nbytes; |
| 152 | } |
| 153 | |
| 154 | static int crypto_cbc_decrypt(struct skcipher_request *req) |
| 155 | { |
| 156 | struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); |
| 157 | struct skcipher_walk walk; |
| 158 | int err; |
| 159 | |
| 160 | err = skcipher_walk_virt(&walk, req, false); |
| 161 | |
| 162 | while (walk.nbytes) { |
| 163 | if (walk.src.virt.addr == walk.dst.virt.addr) |
| 164 | err = crypto_cbc_decrypt_inplace(&walk, skcipher); |
| 165 | else |
| 166 | err = crypto_cbc_decrypt_segment(&walk, skcipher); |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 167 | err = skcipher_walk_done(&walk, err); |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 168 | } |
| 169 | |
| 170 | return err; |
| 171 | } |
| 172 | |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 173 | static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb) |
| 174 | { |
| 175 | struct skcipher_instance *inst; |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 176 | struct crypto_alg *alg; |
Herbert Xu | ebc610e | 2007-01-01 18:37:02 +1100 | [diff] [blame] | 177 | int err; |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 178 | |
Herbert Xu | b3c16bf | 2019-12-20 13:29:40 +0800 | [diff] [blame] | 179 | inst = skcipher_alloc_instance_simple(tmpl, tb); |
Eric Biggers | a5a84a9 | 2019-01-03 20:16:15 -0800 | [diff] [blame] | 180 | if (IS_ERR(inst)) |
| 181 | return PTR_ERR(inst); |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 182 | |
Herbert Xu | b3c16bf | 2019-12-20 13:29:40 +0800 | [diff] [blame] | 183 | alg = skcipher_ialg_simple(inst); |
| 184 | |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 185 | err = -EINVAL; |
Herbert Xu | 50b6544 | 2007-11-20 17:36:00 +0800 | [diff] [blame] | 186 | if (!is_power_of_2(alg->cra_blocksize)) |
Eric Biggers | a5a84a9 | 2019-01-03 20:16:15 -0800 | [diff] [blame] | 187 | goto out_free_inst; |
Herbert Xu | 50b6544 | 2007-11-20 17:36:00 +0800 | [diff] [blame] | 188 | |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 189 | inst->alg.encrypt = crypto_cbc_encrypt; |
| 190 | inst->alg.decrypt = crypto_cbc_decrypt; |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 191 | |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 192 | err = skcipher_register_instance(tmpl, inst); |
Herbert Xu | b3c16bf | 2019-12-20 13:29:40 +0800 | [diff] [blame] | 193 | if (err) { |
Eric Biggers | a5a84a9 | 2019-01-03 20:16:15 -0800 | [diff] [blame] | 194 | out_free_inst: |
Herbert Xu | b3c16bf | 2019-12-20 13:29:40 +0800 | [diff] [blame] | 195 | inst->free(inst); |
| 196 | } |
| 197 | |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 198 | return err; |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 199 | } |
| 200 | |
| 201 | static struct crypto_template crypto_cbc_tmpl = { |
| 202 | .name = "cbc", |
Herbert Xu | 79c65d1 | 2016-11-22 20:08:39 +0800 | [diff] [blame] | 203 | .create = crypto_cbc_create, |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 204 | .module = THIS_MODULE, |
| 205 | }; |
| 206 | |
| 207 | static int __init crypto_cbc_module_init(void) |
| 208 | { |
| 209 | return crypto_register_template(&crypto_cbc_tmpl); |
| 210 | } |
| 211 | |
| 212 | static void __exit crypto_cbc_module_exit(void) |
| 213 | { |
| 214 | crypto_unregister_template(&crypto_cbc_tmpl); |
| 215 | } |
| 216 | |
Eric Biggers | c4741b2 | 2019-04-11 21:57:42 -0700 | [diff] [blame] | 217 | subsys_initcall(crypto_cbc_module_init); |
Herbert Xu | db131ef | 2006-09-21 11:44:08 +1000 | [diff] [blame] | 218 | module_exit(crypto_cbc_module_exit); |
| 219 | |
| 220 | MODULE_LICENSE("GPL"); |
Eric Biggers | a5a84a9 | 2019-01-03 20:16:15 -0800 | [diff] [blame] | 221 | MODULE_DESCRIPTION("CBC block cipher mode of operation"); |
Kees Cook | 4943ba1 | 2014-11-24 16:32:38 -0800 | [diff] [blame] | 222 | MODULE_ALIAS_CRYPTO("cbc"); |