blob: 3f40be34ac9518c7de70a1110c1f7efb6cde7372 [file] [log] [blame]
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001/*
2 * Intel IXP4xx NPE-C crypto driver
3 *
4 * Copyright (C) 2008 Christian Hohnstaedt <chohnstaedt@innominate.com>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of version 2 of the GNU General Public License
8 * as published by the Free Software Foundation.
9 *
10 */
11
12#include <linux/platform_device.h>
13#include <linux/dma-mapping.h>
14#include <linux/dmapool.h>
15#include <linux/crypto.h>
16#include <linux/kernel.h>
17#include <linux/rtnetlink.h>
18#include <linux/interrupt.h>
19#include <linux/spinlock.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090020#include <linux/gfp.h>
Michał Wróbel752587232012-04-05 20:34:21 +080021#include <linux/module.h>
Christian Hohnstaedt81bef012008-06-25 14:38:47 +080022
23#include <crypto/ctr.h>
24#include <crypto/des.h>
25#include <crypto/aes.h>
Corentin LABBEbb9634d2017-05-19 08:53:25 +020026#include <crypto/hmac.h>
Christian Hohnstaedt81bef012008-06-25 14:38:47 +080027#include <crypto/sha.h>
28#include <crypto/algapi.h>
Herbert Xu5290b422015-05-11 17:47:44 +080029#include <crypto/internal/aead.h>
Christian Hohnstaedt81bef012008-06-25 14:38:47 +080030#include <crypto/authenc.h>
31#include <crypto/scatterwalk.h>
32
Linus Walleij4af20dc2019-02-10 14:55:58 +010033#include <linux/soc/ixp4xx/npe.h>
34#include <linux/soc/ixp4xx/qmgr.h>
Christian Hohnstaedt81bef012008-06-25 14:38:47 +080035
36#define MAX_KEYLEN 32
37
38/* hash: cfgword + 2 * digestlen; crypt: keylen + cfgword */
39#define NPE_CTX_LEN 80
40#define AES_BLOCK128 16
41
42#define NPE_OP_HASH_VERIFY 0x01
43#define NPE_OP_CCM_ENABLE 0x04
44#define NPE_OP_CRYPT_ENABLE 0x08
45#define NPE_OP_HASH_ENABLE 0x10
46#define NPE_OP_NOT_IN_PLACE 0x20
47#define NPE_OP_HMAC_DISABLE 0x40
48#define NPE_OP_CRYPT_ENCRYPT 0x80
49
50#define NPE_OP_CCM_GEN_MIC 0xcc
51#define NPE_OP_HASH_GEN_ICV 0x50
52#define NPE_OP_ENC_GEN_KEY 0xc9
53
54#define MOD_ECB 0x0000
55#define MOD_CTR 0x1000
56#define MOD_CBC_ENC 0x2000
57#define MOD_CBC_DEC 0x3000
58#define MOD_CCM_ENC 0x4000
59#define MOD_CCM_DEC 0x5000
60
61#define KEYLEN_128 4
62#define KEYLEN_192 6
63#define KEYLEN_256 8
64
65#define CIPH_DECR 0x0000
66#define CIPH_ENCR 0x0400
67
68#define MOD_DES 0x0000
69#define MOD_TDEA2 0x0100
70#define MOD_3DES 0x0200
71#define MOD_AES 0x0800
72#define MOD_AES128 (0x0800 | KEYLEN_128)
73#define MOD_AES192 (0x0900 | KEYLEN_192)
74#define MOD_AES256 (0x0a00 | KEYLEN_256)
75
76#define MAX_IVLEN 16
77#define NPE_ID 2 /* NPE C */
78#define NPE_QLEN 16
79/* Space for registering when the first
80 * NPE_QLEN crypt_ctl are busy */
81#define NPE_QLEN_TOTAL 64
82
83#define SEND_QID 29
84#define RECV_QID 30
85
86#define CTL_FLAG_UNUSED 0x0000
87#define CTL_FLAG_USED 0x1000
88#define CTL_FLAG_PERFORM_ABLK 0x0001
89#define CTL_FLAG_GEN_ICV 0x0002
90#define CTL_FLAG_GEN_REVAES 0x0004
91#define CTL_FLAG_PERFORM_AEAD 0x0008
92#define CTL_FLAG_MASK 0x000f
93
Christian Hohnstaedt81bef012008-06-25 14:38:47 +080094#define HMAC_PAD_BLOCKLEN SHA1_BLOCK_SIZE
95
96#define MD5_DIGEST_SIZE 16
97
98struct buffer_desc {
99 u32 phys_next;
Krzysztof Hałasace057292010-01-10 14:20:10 +0100100#ifdef __ARMEB__
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800101 u16 buf_len;
102 u16 pkt_len;
Krzysztof Hałasace057292010-01-10 14:20:10 +0100103#else
104 u16 pkt_len;
105 u16 buf_len;
106#endif
Herbert Xuff455ad92019-05-23 14:35:30 +0800107 dma_addr_t phys_addr;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800108 u32 __reserved[4];
109 struct buffer_desc *next;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800110 enum dma_data_direction dir;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800111};
112
113struct crypt_ctl {
Krzysztof Hałasace057292010-01-10 14:20:10 +0100114#ifdef __ARMEB__
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800115 u8 mode; /* NPE_OP_* operation mode */
116 u8 init_len;
117 u16 reserved;
Krzysztof Hałasace057292010-01-10 14:20:10 +0100118#else
119 u16 reserved;
120 u8 init_len;
121 u8 mode; /* NPE_OP_* operation mode */
122#endif
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800123 u8 iv[MAX_IVLEN]; /* IV for CBC mode or CTR IV for CTR mode */
Herbert Xuff455ad92019-05-23 14:35:30 +0800124 dma_addr_t icv_rev_aes; /* icv or rev aes */
125 dma_addr_t src_buf;
126 dma_addr_t dst_buf;
Krzysztof Hałasace057292010-01-10 14:20:10 +0100127#ifdef __ARMEB__
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800128 u16 auth_offs; /* Authentication start offset */
129 u16 auth_len; /* Authentication data length */
130 u16 crypt_offs; /* Cryption start offset */
131 u16 crypt_len; /* Cryption data length */
Krzysztof Hałasace057292010-01-10 14:20:10 +0100132#else
133 u16 auth_len; /* Authentication data length */
134 u16 auth_offs; /* Authentication start offset */
135 u16 crypt_len; /* Cryption data length */
136 u16 crypt_offs; /* Cryption start offset */
137#endif
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800138 u32 aadAddr; /* Additional Auth Data Addr for CCM mode */
139 u32 crypto_ctx; /* NPE Crypto Param structure address */
140
141 /* Used by Host: 4*4 bytes*/
142 unsigned ctl_flags;
143 union {
144 struct ablkcipher_request *ablk_req;
145 struct aead_request *aead_req;
146 struct crypto_tfm *tfm;
147 } data;
148 struct buffer_desc *regist_buf;
149 u8 *regist_ptr;
150};
151
152struct ablk_ctx {
153 struct buffer_desc *src;
154 struct buffer_desc *dst;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800155};
156
157struct aead_ctx {
Herbert Xud7295a82015-07-30 17:53:18 +0800158 struct buffer_desc *src;
159 struct buffer_desc *dst;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800160 struct scatterlist ivlist;
161 /* used when the hmac is not on one sg entry */
162 u8 *hmac_virt;
163 int encrypt;
164};
165
166struct ix_hash_algo {
167 u32 cfgword;
168 unsigned char *icv;
169};
170
171struct ix_sa_dir {
172 unsigned char *npe_ctx;
173 dma_addr_t npe_ctx_phys;
174 int npe_ctx_idx;
175 u8 npe_mode;
176};
177
178struct ixp_ctx {
179 struct ix_sa_dir encrypt;
180 struct ix_sa_dir decrypt;
181 int authkey_len;
182 u8 authkey[MAX_KEYLEN];
183 int enckey_len;
184 u8 enckey[MAX_KEYLEN];
185 u8 salt[MAX_IVLEN];
186 u8 nonce[CTR_RFC3686_NONCE_SIZE];
187 unsigned salted;
188 atomic_t configuring;
189 struct completion completion;
190};
191
192struct ixp_alg {
193 struct crypto_alg crypto;
194 const struct ix_hash_algo *hash;
195 u32 cfg_enc;
196 u32 cfg_dec;
197
198 int registered;
199};
200
Herbert Xud7295a82015-07-30 17:53:18 +0800201struct ixp_aead_alg {
202 struct aead_alg crypto;
203 const struct ix_hash_algo *hash;
204 u32 cfg_enc;
205 u32 cfg_dec;
206
207 int registered;
208};
209
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800210static const struct ix_hash_algo hash_alg_md5 = {
211 .cfgword = 0xAA010004,
212 .icv = "\x01\x23\x45\x67\x89\xAB\xCD\xEF"
213 "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
214};
215static const struct ix_hash_algo hash_alg_sha1 = {
216 .cfgword = 0x00000005,
217 .icv = "\x67\x45\x23\x01\xEF\xCD\xAB\x89\x98\xBA"
218 "\xDC\xFE\x10\x32\x54\x76\xC3\xD2\xE1\xF0",
219};
220
221static struct npe *npe_c;
222static struct dma_pool *buffer_pool = NULL;
223static struct dma_pool *ctx_pool = NULL;
224
225static struct crypt_ctl *crypt_virt = NULL;
226static dma_addr_t crypt_phys;
227
228static int support_aes = 1;
229
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800230#define DRIVER_NAME "ixp4xx_crypto"
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800231
Russell Kingd8cbc3f2013-06-10 18:52:52 +0100232static struct platform_device *pdev;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800233
234static inline dma_addr_t crypt_virt2phys(struct crypt_ctl *virt)
235{
236 return crypt_phys + (virt - crypt_virt) * sizeof(struct crypt_ctl);
237}
238
239static inline struct crypt_ctl *crypt_phys2virt(dma_addr_t phys)
240{
241 return crypt_virt + (phys - crypt_phys) / sizeof(struct crypt_ctl);
242}
243
244static inline u32 cipher_cfg_enc(struct crypto_tfm *tfm)
245{
246 return container_of(tfm->__crt_alg, struct ixp_alg,crypto)->cfg_enc;
247}
248
249static inline u32 cipher_cfg_dec(struct crypto_tfm *tfm)
250{
251 return container_of(tfm->__crt_alg, struct ixp_alg,crypto)->cfg_dec;
252}
253
254static inline const struct ix_hash_algo *ix_hash(struct crypto_tfm *tfm)
255{
256 return container_of(tfm->__crt_alg, struct ixp_alg, crypto)->hash;
257}
258
259static int setup_crypt_desc(void)
260{
Russell King27c17892013-06-10 19:09:45 +0100261 struct device *dev = &pdev->dev;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800262 BUILD_BUG_ON(sizeof(struct crypt_ctl) != 64);
Luis Chamberlain750afb02019-01-04 09:23:09 +0100263 crypt_virt = dma_alloc_coherent(dev,
264 NPE_QLEN * sizeof(struct crypt_ctl),
265 &crypt_phys, GFP_ATOMIC);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800266 if (!crypt_virt)
267 return -ENOMEM;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800268 return 0;
269}
270
271static spinlock_t desc_lock;
272static struct crypt_ctl *get_crypt_desc(void)
273{
274 int i;
275 static int idx = 0;
276 unsigned long flags;
277
278 spin_lock_irqsave(&desc_lock, flags);
279
280 if (unlikely(!crypt_virt))
281 setup_crypt_desc();
282 if (unlikely(!crypt_virt)) {
283 spin_unlock_irqrestore(&desc_lock, flags);
284 return NULL;
285 }
286 i = idx;
287 if (crypt_virt[i].ctl_flags == CTL_FLAG_UNUSED) {
288 if (++idx >= NPE_QLEN)
289 idx = 0;
290 crypt_virt[i].ctl_flags = CTL_FLAG_USED;
291 spin_unlock_irqrestore(&desc_lock, flags);
292 return crypt_virt +i;
293 } else {
294 spin_unlock_irqrestore(&desc_lock, flags);
295 return NULL;
296 }
297}
298
299static spinlock_t emerg_lock;
300static struct crypt_ctl *get_crypt_desc_emerg(void)
301{
302 int i;
303 static int idx = NPE_QLEN;
304 struct crypt_ctl *desc;
305 unsigned long flags;
306
307 desc = get_crypt_desc();
308 if (desc)
309 return desc;
310 if (unlikely(!crypt_virt))
311 return NULL;
312
313 spin_lock_irqsave(&emerg_lock, flags);
314 i = idx;
315 if (crypt_virt[i].ctl_flags == CTL_FLAG_UNUSED) {
316 if (++idx >= NPE_QLEN_TOTAL)
317 idx = NPE_QLEN;
318 crypt_virt[i].ctl_flags = CTL_FLAG_USED;
319 spin_unlock_irqrestore(&emerg_lock, flags);
320 return crypt_virt +i;
321 } else {
322 spin_unlock_irqrestore(&emerg_lock, flags);
323 return NULL;
324 }
325}
326
Herbert Xuff455ad92019-05-23 14:35:30 +0800327static void free_buf_chain(struct device *dev, struct buffer_desc *buf,
328 dma_addr_t phys)
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800329{
330 while (buf) {
331 struct buffer_desc *buf1;
332 u32 phys1;
333
334 buf1 = buf->next;
335 phys1 = buf->phys_next;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800336 dma_unmap_single(dev, buf->phys_next, buf->buf_len, buf->dir);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800337 dma_pool_free(buffer_pool, buf, phys);
338 buf = buf1;
339 phys = phys1;
340 }
341}
342
343static struct tasklet_struct crypto_done_tasklet;
344
345static void finish_scattered_hmac(struct crypt_ctl *crypt)
346{
347 struct aead_request *req = crypt->data.aead_req;
348 struct aead_ctx *req_ctx = aead_request_ctx(req);
349 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
350 int authsize = crypto_aead_authsize(tfm);
Herbert Xud7295a82015-07-30 17:53:18 +0800351 int decryptlen = req->assoclen + req->cryptlen - authsize;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800352
353 if (req_ctx->encrypt) {
354 scatterwalk_map_and_copy(req_ctx->hmac_virt,
Herbert Xud7295a82015-07-30 17:53:18 +0800355 req->dst, decryptlen, authsize, 1);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800356 }
357 dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes);
358}
359
360static void one_packet(dma_addr_t phys)
361{
Russell King27c17892013-06-10 19:09:45 +0100362 struct device *dev = &pdev->dev;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800363 struct crypt_ctl *crypt;
364 struct ixp_ctx *ctx;
365 int failed;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800366
367 failed = phys & 0x1 ? -EBADMSG : 0;
368 phys &= ~0x3;
369 crypt = crypt_phys2virt(phys);
370
371 switch (crypt->ctl_flags & CTL_FLAG_MASK) {
372 case CTL_FLAG_PERFORM_AEAD: {
373 struct aead_request *req = crypt->data.aead_req;
374 struct aead_ctx *req_ctx = aead_request_ctx(req);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800375
Herbert Xud7295a82015-07-30 17:53:18 +0800376 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
377 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800378 if (req_ctx->hmac_virt) {
379 finish_scattered_hmac(crypt);
380 }
381 req->base.complete(&req->base, failed);
382 break;
383 }
384 case CTL_FLAG_PERFORM_ABLK: {
385 struct ablkcipher_request *req = crypt->data.ablk_req;
386 struct ablk_ctx *req_ctx = ablkcipher_request_ctx(req);
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800387
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800388 if (req_ctx->dst) {
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800389 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800390 }
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800391 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800392 req->base.complete(&req->base, failed);
393 break;
394 }
395 case CTL_FLAG_GEN_ICV:
396 ctx = crypto_tfm_ctx(crypt->data.tfm);
397 dma_pool_free(ctx_pool, crypt->regist_ptr,
398 crypt->regist_buf->phys_addr);
399 dma_pool_free(buffer_pool, crypt->regist_buf, crypt->src_buf);
400 if (atomic_dec_and_test(&ctx->configuring))
401 complete(&ctx->completion);
402 break;
403 case CTL_FLAG_GEN_REVAES:
404 ctx = crypto_tfm_ctx(crypt->data.tfm);
405 *(u32*)ctx->decrypt.npe_ctx &= cpu_to_be32(~CIPH_ENCR);
406 if (atomic_dec_and_test(&ctx->configuring))
407 complete(&ctx->completion);
408 break;
409 default:
410 BUG();
411 }
412 crypt->ctl_flags = CTL_FLAG_UNUSED;
413}
414
415static void irqhandler(void *_unused)
416{
417 tasklet_schedule(&crypto_done_tasklet);
418}
419
420static void crypto_done_action(unsigned long arg)
421{
422 int i;
423
424 for(i=0; i<4; i++) {
425 dma_addr_t phys = qmgr_get_entry(RECV_QID);
426 if (!phys)
427 return;
428 one_packet(phys);
429 }
430 tasklet_schedule(&crypto_done_tasklet);
431}
432
Russell King27c17892013-06-10 19:09:45 +0100433static int init_ixp_crypto(struct device *dev)
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800434{
435 int ret = -ENODEV;
Christian Hohnstaedt295c01f2009-04-12 13:01:44 +0800436 u32 msg[2] = { 0, 0 };
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800437
438 if (! ( ~(*IXP4XX_EXP_CFG2) & (IXP4XX_FEATURE_HASH |
439 IXP4XX_FEATURE_AES | IXP4XX_FEATURE_DES))) {
440 printk(KERN_ERR "ixp_crypto: No HW crypto available\n");
441 return ret;
442 }
443 npe_c = npe_request(NPE_ID);
444 if (!npe_c)
445 return ret;
446
447 if (!npe_running(npe_c)) {
Christian Hohnstaedt295c01f2009-04-12 13:01:44 +0800448 ret = npe_load_firmware(npe_c, npe_name(npe_c), dev);
Quentin Lambertb3637002016-07-22 15:32:42 +0200449 if (ret)
Quentin Lambertc5736a42016-07-22 15:32:41 +0200450 goto npe_release;
Christian Hohnstaedt295c01f2009-04-12 13:01:44 +0800451 if (npe_recv_message(npe_c, msg, "STATUS_MSG"))
452 goto npe_error;
453 } else {
454 if (npe_send_message(npe_c, msg, "STATUS_MSG"))
455 goto npe_error;
456
457 if (npe_recv_message(npe_c, msg, "STATUS_MSG"))
458 goto npe_error;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800459 }
460
Christian Hohnstaedt295c01f2009-04-12 13:01:44 +0800461 switch ((msg[1]>>16) & 0xff) {
462 case 3:
463 printk(KERN_WARNING "Firmware of %s lacks AES support\n",
464 npe_name(npe_c));
465 support_aes = 0;
466 break;
467 case 4:
468 case 5:
469 support_aes = 1;
470 break;
471 default:
472 printk(KERN_ERR "Firmware of %s lacks crypto support\n",
473 npe_name(npe_c));
Quentin Lambertc5736a42016-07-22 15:32:41 +0200474 ret = -ENODEV;
475 goto npe_release;
Christian Hohnstaedt295c01f2009-04-12 13:01:44 +0800476 }
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800477 /* buffer_pool will also be used to sometimes store the hmac,
478 * so assure it is large enough
479 */
480 BUILD_BUG_ON(SHA1_DIGEST_SIZE > sizeof(struct buffer_desc));
481 buffer_pool = dma_pool_create("buffer", dev,
482 sizeof(struct buffer_desc), 32, 0);
483 ret = -ENOMEM;
484 if (!buffer_pool) {
485 goto err;
486 }
487 ctx_pool = dma_pool_create("context", dev,
488 NPE_CTX_LEN, 16, 0);
489 if (!ctx_pool) {
490 goto err;
491 }
Krzysztof Hałasa1777f1a2009-03-04 08:01:22 +0800492 ret = qmgr_request_queue(SEND_QID, NPE_QLEN_TOTAL, 0, 0,
493 "ixp_crypto:out", NULL);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800494 if (ret)
495 goto err;
Krzysztof Hałasa1777f1a2009-03-04 08:01:22 +0800496 ret = qmgr_request_queue(RECV_QID, NPE_QLEN, 0, 0,
497 "ixp_crypto:in", NULL);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800498 if (ret) {
499 qmgr_release_queue(SEND_QID);
500 goto err;
501 }
502 qmgr_set_irq(RECV_QID, QUEUE_IRQ_SRC_NOT_EMPTY, irqhandler, NULL);
503 tasklet_init(&crypto_done_tasklet, crypto_done_action, 0);
504
505 qmgr_enable_irq(RECV_QID);
506 return 0;
Christian Hohnstaedt295c01f2009-04-12 13:01:44 +0800507
508npe_error:
509 printk(KERN_ERR "%s not responding\n", npe_name(npe_c));
510 ret = -EIO;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800511err:
Markus Elfringf9d12932015-11-15 16:51:21 +0100512 dma_pool_destroy(ctx_pool);
513 dma_pool_destroy(buffer_pool);
Quentin Lambertc5736a42016-07-22 15:32:41 +0200514npe_release:
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800515 npe_release(npe_c);
516 return ret;
517}
518
Russell King27c17892013-06-10 19:09:45 +0100519static void release_ixp_crypto(struct device *dev)
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800520{
521 qmgr_disable_irq(RECV_QID);
522 tasklet_kill(&crypto_done_tasklet);
523
524 qmgr_release_queue(SEND_QID);
525 qmgr_release_queue(RECV_QID);
526
527 dma_pool_destroy(ctx_pool);
528 dma_pool_destroy(buffer_pool);
529
530 npe_release(npe_c);
531
532 if (crypt_virt) {
533 dma_free_coherent(dev,
534 NPE_QLEN_TOTAL * sizeof( struct crypt_ctl),
535 crypt_virt, crypt_phys);
536 }
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800537}
538
539static void reset_sa_dir(struct ix_sa_dir *dir)
540{
541 memset(dir->npe_ctx, 0, NPE_CTX_LEN);
542 dir->npe_ctx_idx = 0;
543 dir->npe_mode = 0;
544}
545
546static int init_sa_dir(struct ix_sa_dir *dir)
547{
548 dir->npe_ctx = dma_pool_alloc(ctx_pool, GFP_KERNEL, &dir->npe_ctx_phys);
549 if (!dir->npe_ctx) {
550 return -ENOMEM;
551 }
552 reset_sa_dir(dir);
553 return 0;
554}
555
556static void free_sa_dir(struct ix_sa_dir *dir)
557{
558 memset(dir->npe_ctx, 0, NPE_CTX_LEN);
559 dma_pool_free(ctx_pool, dir->npe_ctx, dir->npe_ctx_phys);
560}
561
562static int init_tfm(struct crypto_tfm *tfm)
563{
564 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
565 int ret;
566
567 atomic_set(&ctx->configuring, 0);
568 ret = init_sa_dir(&ctx->encrypt);
569 if (ret)
570 return ret;
571 ret = init_sa_dir(&ctx->decrypt);
572 if (ret) {
573 free_sa_dir(&ctx->encrypt);
574 }
575 return ret;
576}
577
578static int init_tfm_ablk(struct crypto_tfm *tfm)
579{
580 tfm->crt_ablkcipher.reqsize = sizeof(struct ablk_ctx);
581 return init_tfm(tfm);
582}
583
Herbert Xud7295a82015-07-30 17:53:18 +0800584static int init_tfm_aead(struct crypto_aead *tfm)
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800585{
Herbert Xud7295a82015-07-30 17:53:18 +0800586 crypto_aead_set_reqsize(tfm, sizeof(struct aead_ctx));
587 return init_tfm(crypto_aead_tfm(tfm));
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800588}
589
590static void exit_tfm(struct crypto_tfm *tfm)
591{
592 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
593 free_sa_dir(&ctx->encrypt);
594 free_sa_dir(&ctx->decrypt);
595}
596
Herbert Xud7295a82015-07-30 17:53:18 +0800597static void exit_tfm_aead(struct crypto_aead *tfm)
598{
599 exit_tfm(crypto_aead_tfm(tfm));
600}
601
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800602static int register_chain_var(struct crypto_tfm *tfm, u8 xpad, u32 target,
603 int init_len, u32 ctx_addr, const u8 *key, int key_len)
604{
605 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
606 struct crypt_ctl *crypt;
607 struct buffer_desc *buf;
608 int i;
609 u8 *pad;
Herbert Xuff455ad92019-05-23 14:35:30 +0800610 dma_addr_t pad_phys, buf_phys;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800611
612 BUILD_BUG_ON(NPE_CTX_LEN < HMAC_PAD_BLOCKLEN);
613 pad = dma_pool_alloc(ctx_pool, GFP_KERNEL, &pad_phys);
614 if (!pad)
615 return -ENOMEM;
616 buf = dma_pool_alloc(buffer_pool, GFP_KERNEL, &buf_phys);
617 if (!buf) {
618 dma_pool_free(ctx_pool, pad, pad_phys);
619 return -ENOMEM;
620 }
621 crypt = get_crypt_desc_emerg();
622 if (!crypt) {
623 dma_pool_free(ctx_pool, pad, pad_phys);
624 dma_pool_free(buffer_pool, buf, buf_phys);
625 return -EAGAIN;
626 }
627
628 memcpy(pad, key, key_len);
629 memset(pad + key_len, 0, HMAC_PAD_BLOCKLEN - key_len);
630 for (i = 0; i < HMAC_PAD_BLOCKLEN; i++) {
631 pad[i] ^= xpad;
632 }
633
634 crypt->data.tfm = tfm;
635 crypt->regist_ptr = pad;
636 crypt->regist_buf = buf;
637
638 crypt->auth_offs = 0;
639 crypt->auth_len = HMAC_PAD_BLOCKLEN;
640 crypt->crypto_ctx = ctx_addr;
641 crypt->src_buf = buf_phys;
642 crypt->icv_rev_aes = target;
643 crypt->mode = NPE_OP_HASH_GEN_ICV;
644 crypt->init_len = init_len;
645 crypt->ctl_flags |= CTL_FLAG_GEN_ICV;
646
647 buf->next = 0;
648 buf->buf_len = HMAC_PAD_BLOCKLEN;
649 buf->pkt_len = 0;
650 buf->phys_addr = pad_phys;
651
652 atomic_inc(&ctx->configuring);
653 qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
654 BUG_ON(qmgr_stat_overflow(SEND_QID));
655 return 0;
656}
657
658static int setup_auth(struct crypto_tfm *tfm, int encrypt, unsigned authsize,
659 const u8 *key, int key_len, unsigned digest_len)
660{
661 u32 itarget, otarget, npe_ctx_addr;
662 unsigned char *cinfo;
663 int init_len, ret = 0;
664 u32 cfgword;
665 struct ix_sa_dir *dir;
666 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
667 const struct ix_hash_algo *algo;
668
669 dir = encrypt ? &ctx->encrypt : &ctx->decrypt;
670 cinfo = dir->npe_ctx + dir->npe_ctx_idx;
671 algo = ix_hash(tfm);
672
673 /* write cfg word to cryptinfo */
674 cfgword = algo->cfgword | ( authsize << 6); /* (authsize/4) << 8 */
Krzysztof Hałasace057292010-01-10 14:20:10 +0100675#ifndef __ARMEB__
676 cfgword ^= 0xAA000000; /* change the "byte swap" flags */
677#endif
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800678 *(u32*)cinfo = cpu_to_be32(cfgword);
679 cinfo += sizeof(cfgword);
680
681 /* write ICV to cryptinfo */
682 memcpy(cinfo, algo->icv, digest_len);
683 cinfo += digest_len;
684
685 itarget = dir->npe_ctx_phys + dir->npe_ctx_idx
686 + sizeof(algo->cfgword);
687 otarget = itarget + digest_len;
688 init_len = cinfo - (dir->npe_ctx + dir->npe_ctx_idx);
689 npe_ctx_addr = dir->npe_ctx_phys + dir->npe_ctx_idx;
690
691 dir->npe_ctx_idx += init_len;
692 dir->npe_mode |= NPE_OP_HASH_ENABLE;
693
694 if (!encrypt)
695 dir->npe_mode |= NPE_OP_HASH_VERIFY;
696
697 ret = register_chain_var(tfm, HMAC_OPAD_VALUE, otarget,
698 init_len, npe_ctx_addr, key, key_len);
699 if (ret)
700 return ret;
701 return register_chain_var(tfm, HMAC_IPAD_VALUE, itarget,
702 init_len, npe_ctx_addr, key, key_len);
703}
704
705static int gen_rev_aes_key(struct crypto_tfm *tfm)
706{
707 struct crypt_ctl *crypt;
708 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
709 struct ix_sa_dir *dir = &ctx->decrypt;
710
711 crypt = get_crypt_desc_emerg();
712 if (!crypt) {
713 return -EAGAIN;
714 }
715 *(u32*)dir->npe_ctx |= cpu_to_be32(CIPH_ENCR);
716
717 crypt->data.tfm = tfm;
718 crypt->crypt_offs = 0;
719 crypt->crypt_len = AES_BLOCK128;
720 crypt->src_buf = 0;
721 crypt->crypto_ctx = dir->npe_ctx_phys;
722 crypt->icv_rev_aes = dir->npe_ctx_phys + sizeof(u32);
723 crypt->mode = NPE_OP_ENC_GEN_KEY;
724 crypt->init_len = dir->npe_ctx_idx;
725 crypt->ctl_flags |= CTL_FLAG_GEN_REVAES;
726
727 atomic_inc(&ctx->configuring);
728 qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
729 BUG_ON(qmgr_stat_overflow(SEND_QID));
730 return 0;
731}
732
733static int setup_cipher(struct crypto_tfm *tfm, int encrypt,
734 const u8 *key, int key_len)
735{
736 u8 *cinfo;
737 u32 cipher_cfg;
738 u32 keylen_cfg = 0;
739 struct ix_sa_dir *dir;
740 struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
741 u32 *flags = &tfm->crt_flags;
742
743 dir = encrypt ? &ctx->encrypt : &ctx->decrypt;
744 cinfo = dir->npe_ctx;
745
746 if (encrypt) {
747 cipher_cfg = cipher_cfg_enc(tfm);
748 dir->npe_mode |= NPE_OP_CRYPT_ENCRYPT;
749 } else {
750 cipher_cfg = cipher_cfg_dec(tfm);
751 }
752 if (cipher_cfg & MOD_AES) {
753 switch (key_len) {
Krzysztof Hałasa9792eb12010-12-28 13:08:18 +0100754 case 16: keylen_cfg = MOD_AES128; break;
755 case 24: keylen_cfg = MOD_AES192; break;
756 case 32: keylen_cfg = MOD_AES256; break;
757 default:
758 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
759 return -EINVAL;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800760 }
761 cipher_cfg |= keylen_cfg;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800762 } else {
763 u32 tmp[DES_EXPKEY_WORDS];
764 if (des_ekey(tmp, key) == 0) {
765 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
766 }
767 }
768 /* write cfg word to cryptinfo */
769 *(u32*)cinfo = cpu_to_be32(cipher_cfg);
770 cinfo += sizeof(cipher_cfg);
771
772 /* write cipher key to cryptinfo */
773 memcpy(cinfo, key, key_len);
774 /* NPE wants keylen set to DES3_EDE_KEY_SIZE even for single DES */
775 if (key_len < DES3_EDE_KEY_SIZE && !(cipher_cfg & MOD_AES)) {
776 memset(cinfo + key_len, 0, DES3_EDE_KEY_SIZE -key_len);
777 key_len = DES3_EDE_KEY_SIZE;
778 }
779 dir->npe_ctx_idx = sizeof(cipher_cfg) + key_len;
780 dir->npe_mode |= NPE_OP_CRYPT_ENABLE;
781 if ((cipher_cfg & MOD_AES) && !encrypt) {
782 return gen_rev_aes_key(tfm);
783 }
784 return 0;
785}
786
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800787static struct buffer_desc *chainup_buffers(struct device *dev,
788 struct scatterlist *sg, unsigned nbytes,
789 struct buffer_desc *buf, gfp_t flags,
790 enum dma_data_direction dir)
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800791{
Cristian Stoica5be4d4c2015-01-20 10:06:16 +0200792 for (; nbytes > 0; sg = sg_next(sg)) {
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800793 unsigned len = min(nbytes, sg->length);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800794 struct buffer_desc *next_buf;
Herbert Xuff455ad92019-05-23 14:35:30 +0800795 dma_addr_t next_buf_phys;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800796 void *ptr;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800797
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800798 nbytes -= len;
Geliang Tang796b40c2017-03-23 21:16:30 +0800799 ptr = sg_virt(sg);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800800 next_buf = dma_pool_alloc(buffer_pool, flags, &next_buf_phys);
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800801 if (!next_buf) {
802 buf = NULL;
803 break;
804 }
805 sg_dma_address(sg) = dma_map_single(dev, ptr, len, dir);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800806 buf->next = next_buf;
807 buf->phys_next = next_buf_phys;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800808 buf = next_buf;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800809
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800810 buf->phys_addr = sg_dma_address(sg);
811 buf->buf_len = len;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800812 buf->dir = dir;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800813 }
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800814 buf->next = NULL;
815 buf->phys_next = 0;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800816 return buf;
817}
818
819static int ablk_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
820 unsigned int key_len)
821{
822 struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
823 u32 *flags = &tfm->base.crt_flags;
824 int ret;
825
826 init_completion(&ctx->completion);
827 atomic_inc(&ctx->configuring);
828
829 reset_sa_dir(&ctx->encrypt);
830 reset_sa_dir(&ctx->decrypt);
831
832 ctx->encrypt.npe_mode = NPE_OP_HMAC_DISABLE;
833 ctx->decrypt.npe_mode = NPE_OP_HMAC_DISABLE;
834
835 ret = setup_cipher(&tfm->base, 0, key, key_len);
836 if (ret)
837 goto out;
838 ret = setup_cipher(&tfm->base, 1, key, key_len);
839 if (ret)
840 goto out;
841
842 if (*flags & CRYPTO_TFM_RES_WEAK_KEY) {
Eric Biggers231baec2019-01-18 22:48:00 -0800843 if (*flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS) {
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800844 ret = -EINVAL;
845 } else {
846 *flags &= ~CRYPTO_TFM_RES_WEAK_KEY;
847 }
848 }
849out:
850 if (!atomic_dec_and_test(&ctx->configuring))
851 wait_for_completion(&ctx->completion);
852 return ret;
853}
854
Herbert Xudba434a2019-04-11 16:51:11 +0800855static int ablk_des3_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
856 unsigned int key_len)
857{
858 u32 flags = crypto_ablkcipher_get_flags(tfm);
859 int err;
860
861 err = __des3_verify_key(&flags, key);
862 if (unlikely(err))
863 crypto_ablkcipher_set_flags(tfm, flags);
864
865 return ablk_setkey(tfm, key, key_len);
866}
867
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800868static int ablk_rfc3686_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
869 unsigned int key_len)
870{
871 struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
872
873 /* the nonce is stored in bytes at end of key */
874 if (key_len < CTR_RFC3686_NONCE_SIZE)
875 return -EINVAL;
876
877 memcpy(ctx->nonce, key + (key_len - CTR_RFC3686_NONCE_SIZE),
878 CTR_RFC3686_NONCE_SIZE);
879
880 key_len -= CTR_RFC3686_NONCE_SIZE;
881 return ablk_setkey(tfm, key, key_len);
882}
883
884static int ablk_perform(struct ablkcipher_request *req, int encrypt)
885{
886 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
887 struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
888 unsigned ivsize = crypto_ablkcipher_ivsize(tfm);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800889 struct ix_sa_dir *dir;
890 struct crypt_ctl *crypt;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800891 unsigned int nbytes = req->nbytes;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800892 enum dma_data_direction src_direction = DMA_BIDIRECTIONAL;
893 struct ablk_ctx *req_ctx = ablkcipher_request_ctx(req);
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800894 struct buffer_desc src_hook;
Russell King27c17892013-06-10 19:09:45 +0100895 struct device *dev = &pdev->dev;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800896 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
897 GFP_KERNEL : GFP_ATOMIC;
898
899 if (qmgr_stat_full(SEND_QID))
900 return -EAGAIN;
901 if (atomic_read(&ctx->configuring))
902 return -EAGAIN;
903
904 dir = encrypt ? &ctx->encrypt : &ctx->decrypt;
905
906 crypt = get_crypt_desc();
907 if (!crypt)
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800908 return -ENOMEM;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800909
910 crypt->data.ablk_req = req;
911 crypt->crypto_ctx = dir->npe_ctx_phys;
912 crypt->mode = dir->npe_mode;
913 crypt->init_len = dir->npe_ctx_idx;
914
915 crypt->crypt_offs = 0;
916 crypt->crypt_len = nbytes;
917
918 BUG_ON(ivsize && !req->info);
919 memcpy(crypt->iv, req->info, ivsize);
920 if (req->src != req->dst) {
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800921 struct buffer_desc dst_hook;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800922 crypt->mode |= NPE_OP_NOT_IN_PLACE;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800923 /* This was never tested by Intel
924 * for more than one dst buffer, I think. */
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800925 req_ctx->dst = NULL;
926 if (!chainup_buffers(dev, req->dst, nbytes, &dst_hook,
927 flags, DMA_FROM_DEVICE))
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800928 goto free_buf_dest;
929 src_direction = DMA_TO_DEVICE;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800930 req_ctx->dst = dst_hook.next;
931 crypt->dst_buf = dst_hook.phys_next;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800932 } else {
933 req_ctx->dst = NULL;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800934 }
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800935 req_ctx->src = NULL;
936 if (!chainup_buffers(dev, req->src, nbytes, &src_hook,
937 flags, src_direction))
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800938 goto free_buf_src;
939
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800940 req_ctx->src = src_hook.next;
941 crypt->src_buf = src_hook.phys_next;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800942 crypt->ctl_flags |= CTL_FLAG_PERFORM_ABLK;
943 qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
944 BUG_ON(qmgr_stat_overflow(SEND_QID));
945 return -EINPROGRESS;
946
947free_buf_src:
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800948 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800949free_buf_dest:
950 if (req->src != req->dst) {
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800951 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800952 }
953 crypt->ctl_flags = CTL_FLAG_UNUSED;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800954 return -ENOMEM;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800955}
956
957static int ablk_encrypt(struct ablkcipher_request *req)
958{
959 return ablk_perform(req, 1);
960}
961
962static int ablk_decrypt(struct ablkcipher_request *req)
963{
964 return ablk_perform(req, 0);
965}
966
967static int ablk_rfc3686_crypt(struct ablkcipher_request *req)
968{
969 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
970 struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
971 u8 iv[CTR_RFC3686_BLOCK_SIZE];
972 u8 *info = req->info;
973 int ret;
974
975 /* set up counter block */
976 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
977 memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);
978
979 /* initialize counter portion of counter block */
980 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
981 cpu_to_be32(1);
982
983 req->info = iv;
984 ret = ablk_perform(req, 1);
985 req->info = info;
986 return ret;
987}
988
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800989static int aead_perform(struct aead_request *req, int encrypt,
990 int cryptoffset, int eff_cryptlen, u8 *iv)
991{
992 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
993 struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
994 unsigned ivsize = crypto_aead_ivsize(tfm);
995 unsigned authsize = crypto_aead_authsize(tfm);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +0800996 struct ix_sa_dir *dir;
997 struct crypt_ctl *crypt;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +0800998 unsigned int cryptlen;
999 struct buffer_desc *buf, src_hook;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001000 struct aead_ctx *req_ctx = aead_request_ctx(req);
Russell King27c17892013-06-10 19:09:45 +01001001 struct device *dev = &pdev->dev;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001002 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
1003 GFP_KERNEL : GFP_ATOMIC;
Herbert Xud7295a82015-07-30 17:53:18 +08001004 enum dma_data_direction src_direction = DMA_BIDIRECTIONAL;
1005 unsigned int lastlen;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001006
1007 if (qmgr_stat_full(SEND_QID))
1008 return -EAGAIN;
1009 if (atomic_read(&ctx->configuring))
1010 return -EAGAIN;
1011
1012 if (encrypt) {
1013 dir = &ctx->encrypt;
1014 cryptlen = req->cryptlen;
1015 } else {
1016 dir = &ctx->decrypt;
1017 /* req->cryptlen includes the authsize when decrypting */
1018 cryptlen = req->cryptlen -authsize;
1019 eff_cryptlen -= authsize;
1020 }
1021 crypt = get_crypt_desc();
1022 if (!crypt)
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +08001023 return -ENOMEM;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001024
1025 crypt->data.aead_req = req;
1026 crypt->crypto_ctx = dir->npe_ctx_phys;
1027 crypt->mode = dir->npe_mode;
1028 crypt->init_len = dir->npe_ctx_idx;
1029
1030 crypt->crypt_offs = cryptoffset;
1031 crypt->crypt_len = eff_cryptlen;
1032
1033 crypt->auth_offs = 0;
Herbert Xud7295a82015-07-30 17:53:18 +08001034 crypt->auth_len = req->assoclen + cryptlen;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001035 BUG_ON(ivsize && !req->iv);
1036 memcpy(crypt->iv, req->iv, ivsize);
1037
Herbert Xu0f987e22016-01-19 09:00:21 +08001038 buf = chainup_buffers(dev, req->src, crypt->auth_len,
1039 &src_hook, flags, src_direction);
1040 req_ctx->src = src_hook.next;
1041 crypt->src_buf = src_hook.phys_next;
1042 if (!buf)
1043 goto free_buf_src;
1044
1045 lastlen = buf->buf_len;
1046 if (lastlen >= authsize)
1047 crypt->icv_rev_aes = buf->phys_addr +
1048 buf->buf_len - authsize;
1049
Herbert Xud7295a82015-07-30 17:53:18 +08001050 req_ctx->dst = NULL;
1051
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001052 if (req->src != req->dst) {
Herbert Xud7295a82015-07-30 17:53:18 +08001053 struct buffer_desc dst_hook;
1054
1055 crypt->mode |= NPE_OP_NOT_IN_PLACE;
1056 src_direction = DMA_TO_DEVICE;
1057
1058 buf = chainup_buffers(dev, req->dst, crypt->auth_len,
1059 &dst_hook, flags, DMA_FROM_DEVICE);
1060 req_ctx->dst = dst_hook.next;
1061 crypt->dst_buf = dst_hook.phys_next;
1062
1063 if (!buf)
1064 goto free_buf_dst;
1065
1066 if (encrypt) {
1067 lastlen = buf->buf_len;
1068 if (lastlen >= authsize)
1069 crypt->icv_rev_aes = buf->phys_addr +
1070 buf->buf_len - authsize;
1071 }
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001072 }
1073
Herbert Xud7295a82015-07-30 17:53:18 +08001074 if (unlikely(lastlen < authsize)) {
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001075 /* The 12 hmac bytes are scattered,
1076 * we need to copy them into a safe buffer */
1077 req_ctx->hmac_virt = dma_pool_alloc(buffer_pool, flags,
1078 &crypt->icv_rev_aes);
1079 if (unlikely(!req_ctx->hmac_virt))
Herbert Xu28389572017-08-02 16:40:47 +08001080 goto free_buf_dst;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001081 if (!encrypt) {
1082 scatterwalk_map_and_copy(req_ctx->hmac_virt,
1083 req->src, cryptlen, authsize, 0);
1084 }
1085 req_ctx->encrypt = encrypt;
1086 } else {
1087 req_ctx->hmac_virt = NULL;
1088 }
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +08001089
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001090 crypt->ctl_flags |= CTL_FLAG_PERFORM_AEAD;
1091 qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
1092 BUG_ON(qmgr_stat_overflow(SEND_QID));
1093 return -EINPROGRESS;
Herbert Xud7295a82015-07-30 17:53:18 +08001094
Herbert Xud7295a82015-07-30 17:53:18 +08001095free_buf_dst:
1096 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
Herbert Xu28389572017-08-02 16:40:47 +08001097free_buf_src:
1098 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001099 crypt->ctl_flags = CTL_FLAG_UNUSED;
Christian Hohnstaedt0d44dc52009-03-27 15:09:05 +08001100 return -ENOMEM;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001101}
1102
1103static int aead_setup(struct crypto_aead *tfm, unsigned int authsize)
1104{
1105 struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
1106 u32 *flags = &tfm->base.crt_flags;
Herbert Xu6da9c232015-05-21 15:11:06 +08001107 unsigned digest_len = crypto_aead_maxauthsize(tfm);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001108 int ret;
1109
1110 if (!ctx->enckey_len && !ctx->authkey_len)
1111 return 0;
1112 init_completion(&ctx->completion);
1113 atomic_inc(&ctx->configuring);
1114
1115 reset_sa_dir(&ctx->encrypt);
1116 reset_sa_dir(&ctx->decrypt);
1117
1118 ret = setup_cipher(&tfm->base, 0, ctx->enckey, ctx->enckey_len);
1119 if (ret)
1120 goto out;
1121 ret = setup_cipher(&tfm->base, 1, ctx->enckey, ctx->enckey_len);
1122 if (ret)
1123 goto out;
1124 ret = setup_auth(&tfm->base, 0, authsize, ctx->authkey,
1125 ctx->authkey_len, digest_len);
1126 if (ret)
1127 goto out;
1128 ret = setup_auth(&tfm->base, 1, authsize, ctx->authkey,
1129 ctx->authkey_len, digest_len);
1130 if (ret)
1131 goto out;
1132
1133 if (*flags & CRYPTO_TFM_RES_WEAK_KEY) {
Eric Biggers231baec2019-01-18 22:48:00 -08001134 if (*flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS) {
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001135 ret = -EINVAL;
1136 goto out;
1137 } else {
1138 *flags &= ~CRYPTO_TFM_RES_WEAK_KEY;
1139 }
1140 }
1141out:
1142 if (!atomic_dec_and_test(&ctx->configuring))
1143 wait_for_completion(&ctx->completion);
1144 return ret;
1145}
1146
1147static int aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
1148{
Herbert Xu6da9c232015-05-21 15:11:06 +08001149 int max = crypto_aead_maxauthsize(tfm) >> 2;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001150
1151 if ((authsize>>2) < 1 || (authsize>>2) > max || (authsize & 3))
1152 return -EINVAL;
1153 return aead_setup(tfm, authsize);
1154}
1155
1156static int aead_setkey(struct crypto_aead *tfm, const u8 *key,
1157 unsigned int keylen)
1158{
1159 struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
Mathias Krause56902782013-10-15 13:49:32 +02001160 struct crypto_authenc_keys keys;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001161
Mathias Krause56902782013-10-15 13:49:32 +02001162 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001163 goto badkey;
1164
Mathias Krause56902782013-10-15 13:49:32 +02001165 if (keys.authkeylen > sizeof(ctx->authkey))
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001166 goto badkey;
1167
Mathias Krause56902782013-10-15 13:49:32 +02001168 if (keys.enckeylen > sizeof(ctx->enckey))
1169 goto badkey;
1170
1171 memcpy(ctx->authkey, keys.authkey, keys.authkeylen);
1172 memcpy(ctx->enckey, keys.enckey, keys.enckeylen);
1173 ctx->authkey_len = keys.authkeylen;
1174 ctx->enckey_len = keys.enckeylen;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001175
Tudor-Dan Ambarus0e7da292018-03-23 12:42:21 +02001176 memzero_explicit(&keys, sizeof(keys));
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001177 return aead_setup(tfm, crypto_aead_authsize(tfm));
1178badkey:
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001179 crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
Tudor-Dan Ambarus0e7da292018-03-23 12:42:21 +02001180 memzero_explicit(&keys, sizeof(keys));
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001181 return -EINVAL;
1182}
1183
Herbert Xudba434a2019-04-11 16:51:11 +08001184static int des3_aead_setkey(struct crypto_aead *tfm, const u8 *key,
1185 unsigned int keylen)
1186{
1187 struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
1188 u32 flags = CRYPTO_TFM_RES_BAD_KEY_LEN;
1189 struct crypto_authenc_keys keys;
1190 int err;
1191
1192 err = crypto_authenc_extractkeys(&keys, key, keylen);
1193 if (unlikely(err))
1194 goto badkey;
1195
1196 err = -EINVAL;
1197 if (keys.authkeylen > sizeof(ctx->authkey))
1198 goto badkey;
1199
1200 if (keys.enckeylen != DES3_EDE_KEY_SIZE)
1201 goto badkey;
1202
1203 flags = crypto_aead_get_flags(tfm);
1204 err = __des3_verify_key(&flags, keys.enckey);
1205 if (unlikely(err))
1206 goto badkey;
1207
1208 memcpy(ctx->authkey, keys.authkey, keys.authkeylen);
1209 memcpy(ctx->enckey, keys.enckey, keys.enckeylen);
1210 ctx->authkey_len = keys.authkeylen;
1211 ctx->enckey_len = keys.enckeylen;
1212
1213 memzero_explicit(&keys, sizeof(keys));
1214 return aead_setup(tfm, crypto_aead_authsize(tfm));
1215badkey:
1216 crypto_aead_set_flags(tfm, flags);
1217 memzero_explicit(&keys, sizeof(keys));
1218 return err;
1219}
1220
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001221static int aead_encrypt(struct aead_request *req)
1222{
Herbert Xud7295a82015-07-30 17:53:18 +08001223 return aead_perform(req, 1, req->assoclen, req->cryptlen, req->iv);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001224}
1225
1226static int aead_decrypt(struct aead_request *req)
1227{
Herbert Xud7295a82015-07-30 17:53:18 +08001228 return aead_perform(req, 0, req->assoclen, req->cryptlen, req->iv);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001229}
1230
1231static struct ixp_alg ixp4xx_algos[] = {
1232{
1233 .crypto = {
1234 .cra_name = "cbc(des)",
1235 .cra_blocksize = DES_BLOCK_SIZE,
1236 .cra_u = { .ablkcipher = {
1237 .min_keysize = DES_KEY_SIZE,
1238 .max_keysize = DES_KEY_SIZE,
1239 .ivsize = DES_BLOCK_SIZE,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001240 }
1241 }
1242 },
1243 .cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192,
1244 .cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192,
1245
1246}, {
1247 .crypto = {
1248 .cra_name = "ecb(des)",
1249 .cra_blocksize = DES_BLOCK_SIZE,
1250 .cra_u = { .ablkcipher = {
1251 .min_keysize = DES_KEY_SIZE,
1252 .max_keysize = DES_KEY_SIZE,
1253 }
1254 }
1255 },
1256 .cfg_enc = CIPH_ENCR | MOD_DES | MOD_ECB | KEYLEN_192,
1257 .cfg_dec = CIPH_DECR | MOD_DES | MOD_ECB | KEYLEN_192,
1258}, {
1259 .crypto = {
1260 .cra_name = "cbc(des3_ede)",
1261 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1262 .cra_u = { .ablkcipher = {
1263 .min_keysize = DES3_EDE_KEY_SIZE,
1264 .max_keysize = DES3_EDE_KEY_SIZE,
1265 .ivsize = DES3_EDE_BLOCK_SIZE,
Herbert Xudba434a2019-04-11 16:51:11 +08001266 .setkey = ablk_des3_setkey,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001267 }
1268 }
1269 },
1270 .cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192,
1271 .cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192,
1272}, {
1273 .crypto = {
1274 .cra_name = "ecb(des3_ede)",
1275 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1276 .cra_u = { .ablkcipher = {
1277 .min_keysize = DES3_EDE_KEY_SIZE,
1278 .max_keysize = DES3_EDE_KEY_SIZE,
Herbert Xudba434a2019-04-11 16:51:11 +08001279 .setkey = ablk_des3_setkey,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001280 }
1281 }
1282 },
1283 .cfg_enc = CIPH_ENCR | MOD_3DES | MOD_ECB | KEYLEN_192,
1284 .cfg_dec = CIPH_DECR | MOD_3DES | MOD_ECB | KEYLEN_192,
1285}, {
1286 .crypto = {
1287 .cra_name = "cbc(aes)",
1288 .cra_blocksize = AES_BLOCK_SIZE,
1289 .cra_u = { .ablkcipher = {
1290 .min_keysize = AES_MIN_KEY_SIZE,
1291 .max_keysize = AES_MAX_KEY_SIZE,
1292 .ivsize = AES_BLOCK_SIZE,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001293 }
1294 }
1295 },
1296 .cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC,
1297 .cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC,
1298}, {
1299 .crypto = {
1300 .cra_name = "ecb(aes)",
1301 .cra_blocksize = AES_BLOCK_SIZE,
1302 .cra_u = { .ablkcipher = {
1303 .min_keysize = AES_MIN_KEY_SIZE,
1304 .max_keysize = AES_MAX_KEY_SIZE,
1305 }
1306 }
1307 },
1308 .cfg_enc = CIPH_ENCR | MOD_AES | MOD_ECB,
1309 .cfg_dec = CIPH_DECR | MOD_AES | MOD_ECB,
1310}, {
1311 .crypto = {
1312 .cra_name = "ctr(aes)",
1313 .cra_blocksize = AES_BLOCK_SIZE,
1314 .cra_u = { .ablkcipher = {
1315 .min_keysize = AES_MIN_KEY_SIZE,
1316 .max_keysize = AES_MAX_KEY_SIZE,
1317 .ivsize = AES_BLOCK_SIZE,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001318 }
1319 }
1320 },
1321 .cfg_enc = CIPH_ENCR | MOD_AES | MOD_CTR,
1322 .cfg_dec = CIPH_ENCR | MOD_AES | MOD_CTR,
1323}, {
1324 .crypto = {
1325 .cra_name = "rfc3686(ctr(aes))",
1326 .cra_blocksize = AES_BLOCK_SIZE,
1327 .cra_u = { .ablkcipher = {
1328 .min_keysize = AES_MIN_KEY_SIZE,
1329 .max_keysize = AES_MAX_KEY_SIZE,
1330 .ivsize = AES_BLOCK_SIZE,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001331 .setkey = ablk_rfc3686_setkey,
1332 .encrypt = ablk_rfc3686_crypt,
1333 .decrypt = ablk_rfc3686_crypt }
1334 }
1335 },
1336 .cfg_enc = CIPH_ENCR | MOD_AES | MOD_CTR,
1337 .cfg_dec = CIPH_ENCR | MOD_AES | MOD_CTR,
Herbert Xud7295a82015-07-30 17:53:18 +08001338} };
1339
1340static struct ixp_aead_alg ixp4xx_aeads[] = {
1341{
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001342 .crypto = {
Herbert Xud7295a82015-07-30 17:53:18 +08001343 .base = {
1344 .cra_name = "authenc(hmac(md5),cbc(des))",
1345 .cra_blocksize = DES_BLOCK_SIZE,
1346 },
1347 .ivsize = DES_BLOCK_SIZE,
1348 .maxauthsize = MD5_DIGEST_SIZE,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001349 },
1350 .hash = &hash_alg_md5,
1351 .cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192,
1352 .cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192,
1353}, {
1354 .crypto = {
Herbert Xud7295a82015-07-30 17:53:18 +08001355 .base = {
1356 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
1357 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1358 },
1359 .ivsize = DES3_EDE_BLOCK_SIZE,
1360 .maxauthsize = MD5_DIGEST_SIZE,
Herbert Xudba434a2019-04-11 16:51:11 +08001361 .setkey = des3_aead_setkey,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001362 },
1363 .hash = &hash_alg_md5,
1364 .cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192,
1365 .cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192,
1366}, {
1367 .crypto = {
Herbert Xud7295a82015-07-30 17:53:18 +08001368 .base = {
1369 .cra_name = "authenc(hmac(sha1),cbc(des))",
1370 .cra_blocksize = DES_BLOCK_SIZE,
1371 },
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001372 .ivsize = DES_BLOCK_SIZE,
1373 .maxauthsize = SHA1_DIGEST_SIZE,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001374 },
1375 .hash = &hash_alg_sha1,
1376 .cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192,
1377 .cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192,
1378}, {
1379 .crypto = {
Herbert Xud7295a82015-07-30 17:53:18 +08001380 .base = {
1381 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1382 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1383 },
1384 .ivsize = DES3_EDE_BLOCK_SIZE,
1385 .maxauthsize = SHA1_DIGEST_SIZE,
Herbert Xudba434a2019-04-11 16:51:11 +08001386 .setkey = des3_aead_setkey,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001387 },
1388 .hash = &hash_alg_sha1,
1389 .cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192,
1390 .cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192,
1391}, {
1392 .crypto = {
Herbert Xud7295a82015-07-30 17:53:18 +08001393 .base = {
1394 .cra_name = "authenc(hmac(md5),cbc(aes))",
1395 .cra_blocksize = AES_BLOCK_SIZE,
1396 },
1397 .ivsize = AES_BLOCK_SIZE,
1398 .maxauthsize = MD5_DIGEST_SIZE,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001399 },
1400 .hash = &hash_alg_md5,
1401 .cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC,
1402 .cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC,
1403}, {
1404 .crypto = {
Herbert Xud7295a82015-07-30 17:53:18 +08001405 .base = {
1406 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1407 .cra_blocksize = AES_BLOCK_SIZE,
1408 },
1409 .ivsize = AES_BLOCK_SIZE,
1410 .maxauthsize = SHA1_DIGEST_SIZE,
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001411 },
1412 .hash = &hash_alg_sha1,
1413 .cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC,
1414 .cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC,
1415} };
1416
1417#define IXP_POSTFIX "-ixp4xx"
Russell Kingd8cbc3f2013-06-10 18:52:52 +01001418
1419static const struct platform_device_info ixp_dev_info __initdata = {
1420 .name = DRIVER_NAME,
1421 .id = 0,
1422 .dma_mask = DMA_BIT_MASK(32),
1423};
1424
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001425static int __init ixp_module_init(void)
1426{
1427 int num = ARRAY_SIZE(ixp4xx_algos);
Krzysztof Hałasaefb753b2013-12-31 11:51:16 +01001428 int i, err;
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001429
Russell Kingd8cbc3f2013-06-10 18:52:52 +01001430 pdev = platform_device_register_full(&ixp_dev_info);
1431 if (IS_ERR(pdev))
1432 return PTR_ERR(pdev);
1433
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001434 spin_lock_init(&desc_lock);
1435 spin_lock_init(&emerg_lock);
1436
Russell King27c17892013-06-10 19:09:45 +01001437 err = init_ixp_crypto(&pdev->dev);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001438 if (err) {
Russell Kingd8cbc3f2013-06-10 18:52:52 +01001439 platform_device_unregister(pdev);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001440 return err;
1441 }
1442 for (i=0; i< num; i++) {
1443 struct crypto_alg *cra = &ixp4xx_algos[i].crypto;
1444
1445 if (snprintf(cra->cra_driver_name, CRYPTO_MAX_ALG_NAME,
1446 "%s"IXP_POSTFIX, cra->cra_name) >=
1447 CRYPTO_MAX_ALG_NAME)
1448 {
1449 continue;
1450 }
1451 if (!support_aes && (ixp4xx_algos[i].cfg_enc & MOD_AES)) {
1452 continue;
1453 }
Herbert Xud7295a82015-07-30 17:53:18 +08001454
1455 /* block ciphers */
1456 cra->cra_type = &crypto_ablkcipher_type;
1457 cra->cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1458 CRYPTO_ALG_KERN_DRIVER_ONLY |
1459 CRYPTO_ALG_ASYNC;
1460 if (!cra->cra_ablkcipher.setkey)
1461 cra->cra_ablkcipher.setkey = ablk_setkey;
1462 if (!cra->cra_ablkcipher.encrypt)
1463 cra->cra_ablkcipher.encrypt = ablk_encrypt;
1464 if (!cra->cra_ablkcipher.decrypt)
1465 cra->cra_ablkcipher.decrypt = ablk_decrypt;
1466 cra->cra_init = init_tfm_ablk;
1467
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001468 cra->cra_ctxsize = sizeof(struct ixp_ctx);
1469 cra->cra_module = THIS_MODULE;
1470 cra->cra_alignmask = 3;
1471 cra->cra_priority = 300;
1472 cra->cra_exit = exit_tfm;
1473 if (crypto_register_alg(cra))
1474 printk(KERN_ERR "Failed to register '%s'\n",
1475 cra->cra_name);
1476 else
1477 ixp4xx_algos[i].registered = 1;
1478 }
Herbert Xud7295a82015-07-30 17:53:18 +08001479
1480 for (i = 0; i < ARRAY_SIZE(ixp4xx_aeads); i++) {
1481 struct aead_alg *cra = &ixp4xx_aeads[i].crypto;
1482
1483 if (snprintf(cra->base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
1484 "%s"IXP_POSTFIX, cra->base.cra_name) >=
1485 CRYPTO_MAX_ALG_NAME)
1486 continue;
1487 if (!support_aes && (ixp4xx_algos[i].cfg_enc & MOD_AES))
1488 continue;
1489
1490 /* authenc */
1491 cra->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
Herbert Xud7295a82015-07-30 17:53:18 +08001492 CRYPTO_ALG_ASYNC;
Herbert Xudba434a2019-04-11 16:51:11 +08001493 cra->setkey = cra->setkey ?: aead_setkey;
Herbert Xud7295a82015-07-30 17:53:18 +08001494 cra->setauthsize = aead_setauthsize;
1495 cra->encrypt = aead_encrypt;
1496 cra->decrypt = aead_decrypt;
1497 cra->init = init_tfm_aead;
1498 cra->exit = exit_tfm_aead;
1499
1500 cra->base.cra_ctxsize = sizeof(struct ixp_ctx);
1501 cra->base.cra_module = THIS_MODULE;
1502 cra->base.cra_alignmask = 3;
1503 cra->base.cra_priority = 300;
1504
1505 if (crypto_register_aead(cra))
1506 printk(KERN_ERR "Failed to register '%s'\n",
1507 cra->base.cra_driver_name);
1508 else
1509 ixp4xx_aeads[i].registered = 1;
1510 }
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001511 return 0;
1512}
1513
1514static void __exit ixp_module_exit(void)
1515{
1516 int num = ARRAY_SIZE(ixp4xx_algos);
1517 int i;
1518
Herbert Xud7295a82015-07-30 17:53:18 +08001519 for (i = 0; i < ARRAY_SIZE(ixp4xx_aeads); i++) {
1520 if (ixp4xx_aeads[i].registered)
1521 crypto_unregister_aead(&ixp4xx_aeads[i].crypto);
1522 }
1523
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001524 for (i=0; i< num; i++) {
1525 if (ixp4xx_algos[i].registered)
1526 crypto_unregister_alg(&ixp4xx_algos[i].crypto);
1527 }
Russell King27c17892013-06-10 19:09:45 +01001528 release_ixp_crypto(&pdev->dev);
Russell Kingd8cbc3f2013-06-10 18:52:52 +01001529 platform_device_unregister(pdev);
Christian Hohnstaedt81bef012008-06-25 14:38:47 +08001530}
1531
1532module_init(ixp_module_init);
1533module_exit(ixp_module_exit);
1534
1535MODULE_LICENSE("GPL");
1536MODULE_AUTHOR("Christian Hohnstaedt <chohnstaedt@innominate.com>");
1537MODULE_DESCRIPTION("IXP4xx hardware crypto");
1538