blob: 78b0b7c17205b346fb89405aed40ad7788e43e1a [file] [log] [blame]
Kim Phillips8e8ec592011-03-13 16:54:26 +08001/*
2 * caam - Freescale FSL CAAM support for crypto API
3 *
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
Horia Geantă8cea7b62016-11-22 15:44:09 +02005 * Copyright 2016 NXP
Kim Phillips8e8ec592011-03-13 16:54:26 +08006 *
7 * Based on talitos crypto API driver.
8 *
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10 *
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
15 * . | | (cipherKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
19 * | *(packet 2) | |
20 * --------------- |
21 * . |
22 * . |
23 * --------------- |
24 * | JobDesc #3 |------------
25 * | *(packet 3) |
26 * ---------------
27 *
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
33 *
34 * So, a job desc looks like:
35 *
36 * ---------------------
37 * | Header |
38 * | ShareDesc Pointer |
39 * | SEQ_OUT_PTR |
40 * | (output buffer) |
Yuan Kang6ec47332012-06-22 19:48:43 -050041 * | (output length) |
Kim Phillips8e8ec592011-03-13 16:54:26 +080042 * | SEQ_IN_PTR |
43 * | (input buffer) |
Yuan Kang6ec47332012-06-22 19:48:43 -050044 * | (input length) |
Kim Phillips8e8ec592011-03-13 16:54:26 +080045 * ---------------------
46 */
47
48#include "compat.h"
49
50#include "regs.h"
51#include "intern.h"
52#include "desc_constr.h"
53#include "jr.h"
54#include "error.h"
Yuan Kanga299c832012-06-22 19:48:46 -050055#include "sg_sw_sec4.h"
Yuan Kang4c1ec1f2012-06-22 19:48:45 -050056#include "key_gen.h"
Horia Geantă8cea7b62016-11-22 15:44:09 +020057#include "caamalg_desc.h"
Kim Phillips8e8ec592011-03-13 16:54:26 +080058
59/*
60 * crypto alg
61 */
62#define CAAM_CRA_PRIORITY 3000
63/* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64#define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
Catalin Vasiledaebc462014-10-31 12:45:37 +020065 CTR_RFC3686_NONCE_SIZE + \
Kim Phillips8e8ec592011-03-13 16:54:26 +080066 SHA512_DIGEST_SIZE * 2)
Kim Phillips8e8ec592011-03-13 16:54:26 +080067
Herbert Xuf2147b82015-06-16 13:54:23 +080068#define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69#define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
70 CAAM_CMD_SZ * 4)
Herbert Xu479bcc72015-07-30 17:53:17 +080071#define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
72 CAAM_CMD_SZ * 5)
Herbert Xuf2147b82015-06-16 13:54:23 +080073
Herbert Xu87e51b02015-06-18 14:25:55 +080074#define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75#define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
Kim Phillips4427b1b2011-05-14 22:08:17 -050076
Kim Phillips8e8ec592011-03-13 16:54:26 +080077#ifdef DEBUG
78/* for print_hex_dumps with line references */
Kim Phillips8e8ec592011-03-13 16:54:26 +080079#define debug(format, arg...) printk(format, arg)
80#else
81#define debug(format, arg...)
82#endif
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +030083
84#ifdef DEBUG
85#include <linux/highmem.h>
86
87static void dbg_dump_sg(const char *level, const char *prefix_str,
88 int prefix_type, int rowsize, int groupsize,
Horia Geantă00fef2b2016-11-09 10:46:16 +020089 struct scatterlist *sg, size_t tlen, bool ascii)
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +030090{
91 struct scatterlist *it;
92 void *it_page;
93 size_t len;
94 void *buf;
95
96 for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
97 /*
98 * make sure the scatterlist's page
99 * has a valid virtual memory mapping
100 */
101 it_page = kmap_atomic(sg_page(it));
102 if (unlikely(!it_page)) {
103 printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
104 return;
105 }
106
107 buf = it_page + it->offset;
Arnd Bergmannd69985a2016-10-25 23:29:10 +0200108 len = min_t(size_t, tlen, it->length);
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +0300109 print_hex_dump(level, prefix_str, prefix_type, rowsize,
110 groupsize, buf, len, ascii);
111 tlen -= len;
112
113 kunmap_atomic(it_page);
114 }
115}
116#endif
117
Ruchika Guptacfc6f112013-10-25 12:01:03 +0530118static struct list_head alg_list;
Kim Phillips8e8ec592011-03-13 16:54:26 +0800119
Herbert Xu479bcc72015-07-30 17:53:17 +0800120struct caam_alg_entry {
121 int class1_alg_type;
122 int class2_alg_type;
Herbert Xu479bcc72015-07-30 17:53:17 +0800123 bool rfc3686;
124 bool geniv;
125};
126
127struct caam_aead_alg {
128 struct aead_alg aead;
129 struct caam_alg_entry caam;
130 bool registered;
131};
132
Yuan Kangacdca312011-07-15 11:21:42 +0800133/*
Kim Phillips8e8ec592011-03-13 16:54:26 +0800134 * per-session context
135 */
136struct caam_ctx {
137 struct device *jrdev;
Yuan Kang1acebad2011-07-15 11:21:42 +0800138 u32 sh_desc_enc[DESC_MAX_USED_LEN];
139 u32 sh_desc_dec[DESC_MAX_USED_LEN];
140 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
141 dma_addr_t sh_desc_enc_dma;
142 dma_addr_t sh_desc_dec_dma;
143 dma_addr_t sh_desc_givenc_dma;
Yuan Kang1acebad2011-07-15 11:21:42 +0800144 u8 key[CAAM_MAX_KEY_SIZE];
Yuan Kang885e9e22011-07-15 11:21:41 +0800145 dma_addr_t key_dma;
Horia Geantădb576562016-11-22 15:44:04 +0200146 struct alginfo adata;
147 struct alginfo cdata;
Kim Phillips8e8ec592011-03-13 16:54:26 +0800148 unsigned int authsize;
149};
150
Horia Geantaae4a8252014-03-14 17:46:52 +0200151static int aead_null_set_sh_desc(struct crypto_aead *aead)
152{
Horia Geantaae4a8252014-03-14 17:46:52 +0200153 struct caam_ctx *ctx = crypto_aead_ctx(aead);
154 struct device *jrdev = ctx->jrdev;
Horia Geantaae4a8252014-03-14 17:46:52 +0200155 u32 *desc;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200156 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
157 ctx->adata.keylen_pad;
Horia Geantaae4a8252014-03-14 17:46:52 +0200158
159 /*
160 * Job Descriptor and Shared Descriptors
161 * must all fit into the 64-word Descriptor h/w Buffer
162 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200163 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
Horia Geantădb576562016-11-22 15:44:04 +0200164 ctx->adata.key_inline = true;
165 ctx->adata.key = (uintptr_t)ctx->key;
166 } else {
167 ctx->adata.key_inline = false;
168 ctx->adata.key = ctx->key_dma;
169 }
Horia Geantaae4a8252014-03-14 17:46:52 +0200170
Herbert Xu479bcc72015-07-30 17:53:17 +0800171 /* aead_encrypt shared descriptor */
Horia Geantaae4a8252014-03-14 17:46:52 +0200172 desc = ctx->sh_desc_enc;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200173 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
Horia Geantaae4a8252014-03-14 17:46:52 +0200174 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
175 desc_bytes(desc),
176 DMA_TO_DEVICE);
177 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
178 dev_err(jrdev, "unable to map shared descriptor\n");
179 return -ENOMEM;
180 }
Horia Geantaae4a8252014-03-14 17:46:52 +0200181
182 /*
183 * Job Descriptor and Shared Descriptors
184 * must all fit into the 64-word Descriptor h/w Buffer
185 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200186 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
Horia Geantădb576562016-11-22 15:44:04 +0200187 ctx->adata.key_inline = true;
188 ctx->adata.key = (uintptr_t)ctx->key;
189 } else {
190 ctx->adata.key_inline = false;
191 ctx->adata.key = ctx->key_dma;
192 }
Horia Geantaae4a8252014-03-14 17:46:52 +0200193
Herbert Xu479bcc72015-07-30 17:53:17 +0800194 /* aead_decrypt shared descriptor */
Horia Geantă8cea7b62016-11-22 15:44:09 +0200195 desc = ctx->sh_desc_dec;
196 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
Horia Geantaae4a8252014-03-14 17:46:52 +0200197 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
198 desc_bytes(desc),
199 DMA_TO_DEVICE);
200 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
201 dev_err(jrdev, "unable to map shared descriptor\n");
202 return -ENOMEM;
203 }
Horia Geantaae4a8252014-03-14 17:46:52 +0200204
205 return 0;
206}
207
Yuan Kang1acebad2011-07-15 11:21:42 +0800208static int aead_set_sh_desc(struct crypto_aead *aead)
209{
Herbert Xu479bcc72015-07-30 17:53:17 +0800210 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
211 struct caam_aead_alg, aead);
Herbert Xuadd86d52015-05-11 17:47:50 +0800212 unsigned int ivsize = crypto_aead_ivsize(aead);
Yuan Kang1acebad2011-07-15 11:21:42 +0800213 struct caam_ctx *ctx = crypto_aead_ctx(aead);
214 struct device *jrdev = ctx->jrdev;
Catalin Vasiledaebc462014-10-31 12:45:37 +0200215 u32 ctx1_iv_off = 0;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200216 u32 *desc, *nonce = NULL;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200217 u32 inl_mask;
218 unsigned int data_len[2];
Horia Geantădb576562016-11-22 15:44:04 +0200219 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
Catalin Vasiledaebc462014-10-31 12:45:37 +0200220 OP_ALG_AAI_CTR_MOD128);
Herbert Xu479bcc72015-07-30 17:53:17 +0800221 const bool is_rfc3686 = alg->caam.rfc3686;
Yuan Kang1acebad2011-07-15 11:21:42 +0800222
Horia Geantă2fdea252016-08-04 20:02:47 +0300223 if (!ctx->authsize)
224 return 0;
225
Horia Geantaae4a8252014-03-14 17:46:52 +0200226 /* NULL encryption / decryption */
Horia Geantădb576562016-11-22 15:44:04 +0200227 if (!ctx->cdata.keylen)
Horia Geantaae4a8252014-03-14 17:46:52 +0200228 return aead_null_set_sh_desc(aead);
229
Yuan Kang1acebad2011-07-15 11:21:42 +0800230 /*
Catalin Vasiledaebc462014-10-31 12:45:37 +0200231 * AES-CTR needs to load IV in CONTEXT1 reg
232 * at an offset of 128bits (16bytes)
233 * CONTEXT1[255:128] = IV
234 */
235 if (ctr_mode)
236 ctx1_iv_off = 16;
237
238 /*
239 * RFC3686 specific:
240 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
241 */
Horia Geantă8cea7b62016-11-22 15:44:09 +0200242 if (is_rfc3686) {
Catalin Vasiledaebc462014-10-31 12:45:37 +0200243 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200244 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
245 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
246 }
Catalin Vasiledaebc462014-10-31 12:45:37 +0200247
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200248 data_len[0] = ctx->adata.keylen_pad;
249 data_len[1] = ctx->cdata.keylen;
250
Herbert Xu479bcc72015-07-30 17:53:17 +0800251 if (alg->caam.geniv)
252 goto skip_enc;
253
Catalin Vasiledaebc462014-10-31 12:45:37 +0200254 /*
Yuan Kang1acebad2011-07-15 11:21:42 +0800255 * Job Descriptor and Shared Descriptors
256 * must all fit into the 64-word Descriptor h/w Buffer
257 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200258 if (desc_inline_query(DESC_AEAD_ENC_LEN +
259 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
260 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
261 ARRAY_SIZE(data_len)) < 0)
262 return -EINVAL;
263
264 if (inl_mask & 1)
Horia Geantădb576562016-11-22 15:44:04 +0200265 ctx->adata.key = (uintptr_t)ctx->key;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200266 else
Horia Geantădb576562016-11-22 15:44:04 +0200267 ctx->adata.key = ctx->key_dma;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200268
269 if (inl_mask & 2)
270 ctx->cdata.key = (uintptr_t)(ctx->key + ctx->adata.keylen_pad);
271 else
Horia Geantădb576562016-11-22 15:44:04 +0200272 ctx->cdata.key = ctx->key_dma + ctx->adata.keylen_pad;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200273
274 ctx->adata.key_inline = !!(inl_mask & 1);
275 ctx->cdata.key_inline = !!(inl_mask & 2);
Yuan Kang1acebad2011-07-15 11:21:42 +0800276
Herbert Xu479bcc72015-07-30 17:53:17 +0800277 /* aead_encrypt shared descriptor */
Yuan Kang1acebad2011-07-15 11:21:42 +0800278 desc = ctx->sh_desc_enc;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200279 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ctx->authsize,
280 is_rfc3686, nonce, ctx1_iv_off);
Yuan Kang1acebad2011-07-15 11:21:42 +0800281 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
282 desc_bytes(desc),
283 DMA_TO_DEVICE);
284 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
285 dev_err(jrdev, "unable to map shared descriptor\n");
286 return -ENOMEM;
287 }
Yuan Kang1acebad2011-07-15 11:21:42 +0800288
Herbert Xu479bcc72015-07-30 17:53:17 +0800289skip_enc:
Yuan Kang1acebad2011-07-15 11:21:42 +0800290 /*
291 * Job Descriptor and Shared Descriptors
292 * must all fit into the 64-word Descriptor h/w Buffer
293 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200294 if (desc_inline_query(DESC_AEAD_DEC_LEN +
295 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
296 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
297 ARRAY_SIZE(data_len)) < 0)
298 return -EINVAL;
299
300 if (inl_mask & 1)
Horia Geantădb576562016-11-22 15:44:04 +0200301 ctx->adata.key = (uintptr_t)ctx->key;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200302 else
Horia Geantădb576562016-11-22 15:44:04 +0200303 ctx->adata.key = ctx->key_dma;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200304
305 if (inl_mask & 2)
306 ctx->cdata.key = (uintptr_t)(ctx->key + ctx->adata.keylen_pad);
307 else
Horia Geantădb576562016-11-22 15:44:04 +0200308 ctx->cdata.key = ctx->key_dma + ctx->adata.keylen_pad;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200309
310 ctx->adata.key_inline = !!(inl_mask & 1);
311 ctx->cdata.key_inline = !!(inl_mask & 2);
Yuan Kang1acebad2011-07-15 11:21:42 +0800312
Herbert Xu479bcc72015-07-30 17:53:17 +0800313 /* aead_decrypt shared descriptor */
Yuan Kang1acebad2011-07-15 11:21:42 +0800314 desc = ctx->sh_desc_dec;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200315 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
316 ctx->authsize, alg->caam.geniv, is_rfc3686,
317 nonce, ctx1_iv_off);
Yuan Kang1acebad2011-07-15 11:21:42 +0800318 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
319 desc_bytes(desc),
320 DMA_TO_DEVICE);
321 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
322 dev_err(jrdev, "unable to map shared descriptor\n");
323 return -ENOMEM;
324 }
Yuan Kang1acebad2011-07-15 11:21:42 +0800325
Herbert Xu479bcc72015-07-30 17:53:17 +0800326 if (!alg->caam.geniv)
327 goto skip_givenc;
328
Yuan Kang1acebad2011-07-15 11:21:42 +0800329 /*
330 * Job Descriptor and Shared Descriptors
331 * must all fit into the 64-word Descriptor h/w Buffer
332 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200333 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
334 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
335 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
336 ARRAY_SIZE(data_len)) < 0)
337 return -EINVAL;
338
339 if (inl_mask & 1)
Horia Geantădb576562016-11-22 15:44:04 +0200340 ctx->adata.key = (uintptr_t)ctx->key;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200341 else
Horia Geantădb576562016-11-22 15:44:04 +0200342 ctx->adata.key = ctx->key_dma;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200343
344 if (inl_mask & 2)
345 ctx->cdata.key = (uintptr_t)(ctx->key + ctx->adata.keylen_pad);
346 else
Horia Geantădb576562016-11-22 15:44:04 +0200347 ctx->cdata.key = ctx->key_dma + ctx->adata.keylen_pad;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200348
349 ctx->adata.key_inline = !!(inl_mask & 1);
350 ctx->cdata.key_inline = !!(inl_mask & 2);
Yuan Kang1acebad2011-07-15 11:21:42 +0800351
352 /* aead_givencrypt shared descriptor */
Horia Geantă1d2d87e2016-08-04 20:02:46 +0300353 desc = ctx->sh_desc_enc;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200354 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
355 ctx->authsize, is_rfc3686, nonce,
356 ctx1_iv_off);
Herbert Xu479bcc72015-07-30 17:53:17 +0800357 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
358 desc_bytes(desc),
359 DMA_TO_DEVICE);
Horia Geantă1d2d87e2016-08-04 20:02:46 +0300360 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
Yuan Kang1acebad2011-07-15 11:21:42 +0800361 dev_err(jrdev, "unable to map shared descriptor\n");
362 return -ENOMEM;
363 }
Yuan Kang1acebad2011-07-15 11:21:42 +0800364
Herbert Xu479bcc72015-07-30 17:53:17 +0800365skip_givenc:
Yuan Kang1acebad2011-07-15 11:21:42 +0800366 return 0;
367}
368
Yuan Kang0e479302011-07-15 11:21:41 +0800369static int aead_setauthsize(struct crypto_aead *authenc,
Kim Phillips8e8ec592011-03-13 16:54:26 +0800370 unsigned int authsize)
371{
372 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
373
374 ctx->authsize = authsize;
Yuan Kang1acebad2011-07-15 11:21:42 +0800375 aead_set_sh_desc(authenc);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800376
377 return 0;
378}
379
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300380static int gcm_set_sh_desc(struct crypto_aead *aead)
381{
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300382 struct caam_ctx *ctx = crypto_aead_ctx(aead);
383 struct device *jrdev = ctx->jrdev;
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300384 u32 *desc;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200385 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
386 ctx->cdata.keylen;
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300387
Horia Geantădb576562016-11-22 15:44:04 +0200388 if (!ctx->cdata.keylen || !ctx->authsize)
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300389 return 0;
390
391 /*
392 * AES GCM encrypt shared descriptor
393 * Job Descriptor and Shared Descriptor
394 * must fit into the 64-word Descriptor h/w Buffer
395 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200396 if (rem_bytes >= DESC_GCM_ENC_LEN) {
Horia Geantădb576562016-11-22 15:44:04 +0200397 ctx->cdata.key_inline = true;
398 ctx->cdata.key = (uintptr_t)ctx->key;
399 } else {
400 ctx->cdata.key_inline = false;
401 ctx->cdata.key = ctx->key_dma;
402 }
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300403
404 desc = ctx->sh_desc_enc;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200405 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300406 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
407 desc_bytes(desc),
408 DMA_TO_DEVICE);
409 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
410 dev_err(jrdev, "unable to map shared descriptor\n");
411 return -ENOMEM;
412 }
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300413
414 /*
415 * Job Descriptor and Shared Descriptors
416 * must all fit into the 64-word Descriptor h/w Buffer
417 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200418 if (rem_bytes >= DESC_GCM_DEC_LEN) {
Horia Geantădb576562016-11-22 15:44:04 +0200419 ctx->cdata.key_inline = true;
420 ctx->cdata.key = (uintptr_t)ctx->key;
421 } else {
422 ctx->cdata.key_inline = false;
423 ctx->cdata.key = ctx->key_dma;
424 }
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300425
426 desc = ctx->sh_desc_dec;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200427 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300428 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
429 desc_bytes(desc),
430 DMA_TO_DEVICE);
431 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
432 dev_err(jrdev, "unable to map shared descriptor\n");
433 return -ENOMEM;
434 }
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300435
436 return 0;
437}
438
439static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
440{
441 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
442
443 ctx->authsize = authsize;
444 gcm_set_sh_desc(authenc);
445
446 return 0;
447}
448
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300449static int rfc4106_set_sh_desc(struct crypto_aead *aead)
450{
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300451 struct caam_ctx *ctx = crypto_aead_ctx(aead);
452 struct device *jrdev = ctx->jrdev;
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300453 u32 *desc;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200454 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
455 ctx->cdata.keylen;
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300456
Horia Geantădb576562016-11-22 15:44:04 +0200457 if (!ctx->cdata.keylen || !ctx->authsize)
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300458 return 0;
459
460 /*
461 * RFC4106 encrypt shared descriptor
462 * Job Descriptor and Shared Descriptor
463 * must fit into the 64-word Descriptor h/w Buffer
464 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200465 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
Horia Geantădb576562016-11-22 15:44:04 +0200466 ctx->cdata.key_inline = true;
467 ctx->cdata.key = (uintptr_t)ctx->key;
468 } else {
469 ctx->cdata.key_inline = false;
470 ctx->cdata.key = ctx->key_dma;
471 }
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300472
473 desc = ctx->sh_desc_enc;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200474 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300475 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
476 desc_bytes(desc),
477 DMA_TO_DEVICE);
478 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
479 dev_err(jrdev, "unable to map shared descriptor\n");
480 return -ENOMEM;
481 }
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300482
483 /*
484 * Job Descriptor and Shared Descriptors
485 * must all fit into the 64-word Descriptor h/w Buffer
486 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200487 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
Horia Geantădb576562016-11-22 15:44:04 +0200488 ctx->cdata.key_inline = true;
489 ctx->cdata.key = (uintptr_t)ctx->key;
490 } else {
491 ctx->cdata.key_inline = false;
492 ctx->cdata.key = ctx->key_dma;
493 }
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300494
495 desc = ctx->sh_desc_dec;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200496 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300497 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
498 desc_bytes(desc),
499 DMA_TO_DEVICE);
500 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
501 dev_err(jrdev, "unable to map shared descriptor\n");
502 return -ENOMEM;
503 }
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300504
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300505 return 0;
506}
507
508static int rfc4106_setauthsize(struct crypto_aead *authenc,
509 unsigned int authsize)
510{
511 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
512
513 ctx->authsize = authsize;
514 rfc4106_set_sh_desc(authenc);
515
516 return 0;
517}
518
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200519static int rfc4543_set_sh_desc(struct crypto_aead *aead)
520{
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200521 struct caam_ctx *ctx = crypto_aead_ctx(aead);
522 struct device *jrdev = ctx->jrdev;
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200523 u32 *desc;
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200524 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
525 ctx->cdata.keylen;
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200526
Horia Geantădb576562016-11-22 15:44:04 +0200527 if (!ctx->cdata.keylen || !ctx->authsize)
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200528 return 0;
529
530 /*
531 * RFC4543 encrypt shared descriptor
532 * Job Descriptor and Shared Descriptor
533 * must fit into the 64-word Descriptor h/w Buffer
534 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200535 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
Horia Geantădb576562016-11-22 15:44:04 +0200536 ctx->cdata.key_inline = true;
537 ctx->cdata.key = (uintptr_t)ctx->key;
538 } else {
539 ctx->cdata.key_inline = false;
540 ctx->cdata.key = ctx->key_dma;
541 }
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200542
543 desc = ctx->sh_desc_enc;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200544 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200545 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
546 desc_bytes(desc),
547 DMA_TO_DEVICE);
548 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
549 dev_err(jrdev, "unable to map shared descriptor\n");
550 return -ENOMEM;
551 }
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200552
553 /*
554 * Job Descriptor and Shared Descriptors
555 * must all fit into the 64-word Descriptor h/w Buffer
556 */
Horia Geantă4cbe79c2016-11-22 15:44:06 +0200557 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
Horia Geantădb576562016-11-22 15:44:04 +0200558 ctx->cdata.key_inline = true;
559 ctx->cdata.key = (uintptr_t)ctx->key;
560 } else {
561 ctx->cdata.key_inline = false;
562 ctx->cdata.key = ctx->key_dma;
563 }
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200564
565 desc = ctx->sh_desc_dec;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200566 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200567 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
568 desc_bytes(desc),
569 DMA_TO_DEVICE);
570 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
571 dev_err(jrdev, "unable to map shared descriptor\n");
572 return -ENOMEM;
573 }
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200574
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200575 return 0;
576}
577
578static int rfc4543_setauthsize(struct crypto_aead *authenc,
579 unsigned int authsize)
580{
581 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
582
583 ctx->authsize = authsize;
584 rfc4543_set_sh_desc(authenc);
585
586 return 0;
587}
588
Yuan Kang4c1ec1f2012-06-22 19:48:45 -0500589static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in,
590 u32 authkeylen)
Kim Phillips8e8ec592011-03-13 16:54:26 +0800591{
Horia Geantădb576562016-11-22 15:44:04 +0200592 return gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, key_in,
Horia Geantă488ebc32016-11-22 15:44:05 +0200593 authkeylen);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800594}
595
Yuan Kang0e479302011-07-15 11:21:41 +0800596static int aead_setkey(struct crypto_aead *aead,
Kim Phillips8e8ec592011-03-13 16:54:26 +0800597 const u8 *key, unsigned int keylen)
598{
599 /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
600 static const u8 mdpadlen[] = { 16, 20, 32, 32, 64, 64 };
601 struct caam_ctx *ctx = crypto_aead_ctx(aead);
602 struct device *jrdev = ctx->jrdev;
Horia Geanta4e6e0b22013-12-19 17:27:35 +0200603 struct crypto_authenc_keys keys;
Kim Phillips8e8ec592011-03-13 16:54:26 +0800604 int ret = 0;
605
Horia Geanta4e6e0b22013-12-19 17:27:35 +0200606 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
Kim Phillips8e8ec592011-03-13 16:54:26 +0800607 goto badkey;
608
609 /* Pick class 2 key length from algorithm submask */
Horia Geantă488ebc32016-11-22 15:44:05 +0200610 ctx->adata.keylen = mdpadlen[(ctx->adata.algtype &
611 OP_ALG_ALGSEL_SUBMASK) >>
Horia Geantădb576562016-11-22 15:44:04 +0200612 OP_ALG_ALGSEL_SHIFT] * 2;
613 ctx->adata.keylen_pad = ALIGN(ctx->adata.keylen, 16);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800614
Horia Geantădb576562016-11-22 15:44:04 +0200615 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
Horia Geanta4e6e0b22013-12-19 17:27:35 +0200616 goto badkey;
617
Kim Phillips8e8ec592011-03-13 16:54:26 +0800618#ifdef DEBUG
619 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
Horia Geanta4e6e0b22013-12-19 17:27:35 +0200620 keys.authkeylen + keys.enckeylen, keys.enckeylen,
621 keys.authkeylen);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800622 printk(KERN_ERR "split_key_len %d split_key_pad_len %d\n",
Horia Geantădb576562016-11-22 15:44:04 +0200623 ctx->adata.keylen, ctx->adata.keylen_pad);
Alex Porosanu514df282013-08-14 18:56:45 +0300624 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
Kim Phillips8e8ec592011-03-13 16:54:26 +0800625 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
626#endif
Kim Phillips8e8ec592011-03-13 16:54:26 +0800627
Horia Geanta4e6e0b22013-12-19 17:27:35 +0200628 ret = gen_split_aead_key(ctx, keys.authkey, keys.authkeylen);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800629 if (ret) {
Kim Phillips8e8ec592011-03-13 16:54:26 +0800630 goto badkey;
631 }
632
633 /* postpend encryption key to auth split key */
Horia Geantădb576562016-11-22 15:44:04 +0200634 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800635
Horia Geantădb576562016-11-22 15:44:04 +0200636 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->adata.keylen_pad +
Horia Geanta4e6e0b22013-12-19 17:27:35 +0200637 keys.enckeylen, DMA_TO_DEVICE);
Yuan Kang885e9e22011-07-15 11:21:41 +0800638 if (dma_mapping_error(jrdev, ctx->key_dma)) {
Kim Phillips8e8ec592011-03-13 16:54:26 +0800639 dev_err(jrdev, "unable to map key i/o memory\n");
Kim Phillips8e8ec592011-03-13 16:54:26 +0800640 return -ENOMEM;
641 }
642#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +0300643 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
Kim Phillips8e8ec592011-03-13 16:54:26 +0800644 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
Horia Geantădb576562016-11-22 15:44:04 +0200645 ctx->adata.keylen_pad + keys.enckeylen, 1);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800646#endif
647
Horia Geantădb576562016-11-22 15:44:04 +0200648 ctx->cdata.keylen = keys.enckeylen;
Kim Phillips8e8ec592011-03-13 16:54:26 +0800649
Yuan Kang1acebad2011-07-15 11:21:42 +0800650 ret = aead_set_sh_desc(aead);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800651 if (ret) {
Horia Geantădb576562016-11-22 15:44:04 +0200652 dma_unmap_single(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
Horia Geanta4e6e0b22013-12-19 17:27:35 +0200653 keys.enckeylen, DMA_TO_DEVICE);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800654 }
655
656 return ret;
657badkey:
658 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
659 return -EINVAL;
660}
661
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300662static int gcm_setkey(struct crypto_aead *aead,
663 const u8 *key, unsigned int keylen)
664{
665 struct caam_ctx *ctx = crypto_aead_ctx(aead);
666 struct device *jrdev = ctx->jrdev;
667 int ret = 0;
668
669#ifdef DEBUG
670 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
671 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
672#endif
673
674 memcpy(ctx->key, key, keylen);
675 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
676 DMA_TO_DEVICE);
677 if (dma_mapping_error(jrdev, ctx->key_dma)) {
678 dev_err(jrdev, "unable to map key i/o memory\n");
679 return -ENOMEM;
680 }
Horia Geantădb576562016-11-22 15:44:04 +0200681 ctx->cdata.keylen = keylen;
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300682
683 ret = gcm_set_sh_desc(aead);
684 if (ret) {
Horia Geantădb576562016-11-22 15:44:04 +0200685 dma_unmap_single(jrdev, ctx->key_dma, ctx->cdata.keylen,
Tudor Ambarus3ef8d942014-10-23 16:11:23 +0300686 DMA_TO_DEVICE);
687 }
688
689 return ret;
690}
691
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300692static int rfc4106_setkey(struct crypto_aead *aead,
693 const u8 *key, unsigned int keylen)
694{
695 struct caam_ctx *ctx = crypto_aead_ctx(aead);
696 struct device *jrdev = ctx->jrdev;
697 int ret = 0;
698
699 if (keylen < 4)
700 return -EINVAL;
701
702#ifdef DEBUG
703 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
704 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
705#endif
706
707 memcpy(ctx->key, key, keylen);
708
709 /*
710 * The last four bytes of the key material are used as the salt value
711 * in the nonce. Update the AES key length.
712 */
Horia Geantădb576562016-11-22 15:44:04 +0200713 ctx->cdata.keylen = keylen - 4;
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300714
Horia Geantădb576562016-11-22 15:44:04 +0200715 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->cdata.keylen,
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300716 DMA_TO_DEVICE);
717 if (dma_mapping_error(jrdev, ctx->key_dma)) {
718 dev_err(jrdev, "unable to map key i/o memory\n");
719 return -ENOMEM;
720 }
721
722 ret = rfc4106_set_sh_desc(aead);
723 if (ret) {
Horia Geantădb576562016-11-22 15:44:04 +0200724 dma_unmap_single(jrdev, ctx->key_dma, ctx->cdata.keylen,
Tudor Ambarusbac68f22014-10-23 16:14:03 +0300725 DMA_TO_DEVICE);
726 }
727
728 return ret;
729}
730
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200731static int rfc4543_setkey(struct crypto_aead *aead,
732 const u8 *key, unsigned int keylen)
733{
734 struct caam_ctx *ctx = crypto_aead_ctx(aead);
735 struct device *jrdev = ctx->jrdev;
736 int ret = 0;
737
738 if (keylen < 4)
739 return -EINVAL;
740
741#ifdef DEBUG
742 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
743 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
744#endif
745
746 memcpy(ctx->key, key, keylen);
747
748 /*
749 * The last four bytes of the key material are used as the salt value
750 * in the nonce. Update the AES key length.
751 */
Horia Geantădb576562016-11-22 15:44:04 +0200752 ctx->cdata.keylen = keylen - 4;
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200753
Horia Geantădb576562016-11-22 15:44:04 +0200754 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->cdata.keylen,
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200755 DMA_TO_DEVICE);
756 if (dma_mapping_error(jrdev, ctx->key_dma)) {
757 dev_err(jrdev, "unable to map key i/o memory\n");
758 return -ENOMEM;
759 }
760
761 ret = rfc4543_set_sh_desc(aead);
762 if (ret) {
Horia Geantădb576562016-11-22 15:44:04 +0200763 dma_unmap_single(jrdev, ctx->key_dma, ctx->cdata.keylen,
Tudor Ambarus5d0429a2014-10-30 18:55:07 +0200764 DMA_TO_DEVICE);
765 }
766
767 return ret;
768}
769
Yuan Kangacdca312011-07-15 11:21:42 +0800770static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
771 const u8 *key, unsigned int keylen)
772{
773 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
Catalin Vasilea5f57cf2014-10-31 12:45:36 +0200774 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
775 const char *alg_name = crypto_tfm_alg_name(tfm);
Yuan Kangacdca312011-07-15 11:21:42 +0800776 struct device *jrdev = ctx->jrdev;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200777 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
Yuan Kangacdca312011-07-15 11:21:42 +0800778 u32 *desc;
Catalin Vasile2b22f6c2014-10-31 12:45:35 +0200779 u32 ctx1_iv_off = 0;
Horia Geantădb576562016-11-22 15:44:04 +0200780 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
Catalin Vasile2b22f6c2014-10-31 12:45:35 +0200781 OP_ALG_AAI_CTR_MOD128);
Catalin Vasilea5f57cf2014-10-31 12:45:36 +0200782 const bool is_rfc3686 = (ctr_mode &&
783 (strstr(alg_name, "rfc3686") != NULL));
Yuan Kangacdca312011-07-15 11:21:42 +0800784
Horia Geantă8cea7b62016-11-22 15:44:09 +0200785 memcpy(ctx->key, key, keylen);
Yuan Kangacdca312011-07-15 11:21:42 +0800786#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +0300787 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
Yuan Kangacdca312011-07-15 11:21:42 +0800788 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
789#endif
Catalin Vasile2b22f6c2014-10-31 12:45:35 +0200790 /*
791 * AES-CTR needs to load IV in CONTEXT1 reg
792 * at an offset of 128bits (16bytes)
793 * CONTEXT1[255:128] = IV
794 */
795 if (ctr_mode)
796 ctx1_iv_off = 16;
Yuan Kangacdca312011-07-15 11:21:42 +0800797
Catalin Vasilea5f57cf2014-10-31 12:45:36 +0200798 /*
799 * RFC3686 specific:
800 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
801 * | *key = {KEY, NONCE}
802 */
803 if (is_rfc3686) {
804 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
805 keylen -= CTR_RFC3686_NONCE_SIZE;
806 }
807
Yuan Kangacdca312011-07-15 11:21:42 +0800808 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
809 DMA_TO_DEVICE);
810 if (dma_mapping_error(jrdev, ctx->key_dma)) {
811 dev_err(jrdev, "unable to map key i/o memory\n");
812 return -ENOMEM;
813 }
Horia Geantădb576562016-11-22 15:44:04 +0200814 ctx->cdata.keylen = keylen;
815 ctx->cdata.key = (uintptr_t)ctx->key;
816 ctx->cdata.key_inline = true;
Yuan Kangacdca312011-07-15 11:21:42 +0800817
818 /* ablkcipher_encrypt shared descriptor */
819 desc = ctx->sh_desc_enc;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200820 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
821 ctx1_iv_off);
Yuan Kangacdca312011-07-15 11:21:42 +0800822 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
823 desc_bytes(desc),
824 DMA_TO_DEVICE);
825 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
826 dev_err(jrdev, "unable to map shared descriptor\n");
827 return -ENOMEM;
828 }
Horia Geantă8cea7b62016-11-22 15:44:09 +0200829
Yuan Kangacdca312011-07-15 11:21:42 +0800830 /* ablkcipher_decrypt shared descriptor */
831 desc = ctx->sh_desc_dec;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200832 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
833 ctx1_iv_off);
Yuan Kangacdca312011-07-15 11:21:42 +0800834 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
835 desc_bytes(desc),
836 DMA_TO_DEVICE);
Horia Geanta71c65f72014-07-11 15:34:48 +0300837 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
Yuan Kangacdca312011-07-15 11:21:42 +0800838 dev_err(jrdev, "unable to map shared descriptor\n");
839 return -ENOMEM;
840 }
841
Catalin Vasile7222d1a2014-10-31 12:45:38 +0200842 /* ablkcipher_givencrypt shared descriptor */
843 desc = ctx->sh_desc_givenc;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200844 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
845 ctx1_iv_off);
Catalin Vasile7222d1a2014-10-31 12:45:38 +0200846 ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc,
847 desc_bytes(desc),
848 DMA_TO_DEVICE);
849 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) {
850 dev_err(jrdev, "unable to map shared descriptor\n");
851 return -ENOMEM;
852 }
Yuan Kangacdca312011-07-15 11:21:42 +0800853
Horia Geantă8cea7b62016-11-22 15:44:09 +0200854 return 0;
Yuan Kangacdca312011-07-15 11:21:42 +0800855}
856
Catalin Vasilec6415a62015-10-02 13:13:18 +0300857static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
858 const u8 *key, unsigned int keylen)
859{
860 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
861 struct device *jrdev = ctx->jrdev;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200862 u32 *desc;
Catalin Vasilec6415a62015-10-02 13:13:18 +0300863
864 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
865 crypto_ablkcipher_set_flags(ablkcipher,
866 CRYPTO_TFM_RES_BAD_KEY_LEN);
867 dev_err(jrdev, "key size mismatch\n");
868 return -EINVAL;
869 }
870
871 memcpy(ctx->key, key, keylen);
872 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE);
873 if (dma_mapping_error(jrdev, ctx->key_dma)) {
874 dev_err(jrdev, "unable to map key i/o memory\n");
875 return -ENOMEM;
876 }
Horia Geantădb576562016-11-22 15:44:04 +0200877 ctx->cdata.keylen = keylen;
878 ctx->cdata.key = (uintptr_t)ctx->key;
879 ctx->cdata.key_inline = true;
Catalin Vasilec6415a62015-10-02 13:13:18 +0300880
881 /* xts_ablkcipher_encrypt shared descriptor */
882 desc = ctx->sh_desc_enc;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200883 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
Catalin Vasilec6415a62015-10-02 13:13:18 +0300884 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
885 DMA_TO_DEVICE);
886 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
887 dev_err(jrdev, "unable to map shared descriptor\n");
888 return -ENOMEM;
889 }
Catalin Vasilec6415a62015-10-02 13:13:18 +0300890
891 /* xts_ablkcipher_decrypt shared descriptor */
892 desc = ctx->sh_desc_dec;
Horia Geantă8cea7b62016-11-22 15:44:09 +0200893 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
Catalin Vasilec6415a62015-10-02 13:13:18 +0300894 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
895 DMA_TO_DEVICE);
896 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
897 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma,
898 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
899 dev_err(jrdev, "unable to map shared descriptor\n");
900 return -ENOMEM;
901 }
Catalin Vasilec6415a62015-10-02 13:13:18 +0300902
903 return 0;
904}
905
Kim Phillips8e8ec592011-03-13 16:54:26 +0800906/*
Yuan Kang1acebad2011-07-15 11:21:42 +0800907 * aead_edesc - s/w-extended aead descriptor
Kim Phillips8e8ec592011-03-13 16:54:26 +0800908 * @src_nents: number of segments in input scatterlist
909 * @dst_nents: number of segments in output scatterlist
Yuan Kanga299c832012-06-22 19:48:46 -0500910 * @sec4_sg_bytes: length of dma mapped sec4_sg space
911 * @sec4_sg_dma: bus physical mapped address of h/w link table
Horia Geantă4ca7c7d2016-11-09 10:46:18 +0200912 * @sec4_sg: pointer to h/w link table
Kim Phillips8e8ec592011-03-13 16:54:26 +0800913 * @hw_desc: the h/w job descriptor followed by any referenced link tables
914 */
Yuan Kang0e479302011-07-15 11:21:41 +0800915struct aead_edesc {
Kim Phillips8e8ec592011-03-13 16:54:26 +0800916 int src_nents;
917 int dst_nents;
Yuan Kanga299c832012-06-22 19:48:46 -0500918 int sec4_sg_bytes;
919 dma_addr_t sec4_sg_dma;
920 struct sec4_sg_entry *sec4_sg;
Herbert Xuf2147b82015-06-16 13:54:23 +0800921 u32 hw_desc[];
Kim Phillips8e8ec592011-03-13 16:54:26 +0800922};
923
Yuan Kangacdca312011-07-15 11:21:42 +0800924/*
925 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
926 * @src_nents: number of segments in input scatterlist
927 * @dst_nents: number of segments in output scatterlist
928 * @iv_dma: dma address of iv for checking continuity and link table
Yuan Kanga299c832012-06-22 19:48:46 -0500929 * @sec4_sg_bytes: length of dma mapped sec4_sg space
930 * @sec4_sg_dma: bus physical mapped address of h/w link table
Horia Geantă4ca7c7d2016-11-09 10:46:18 +0200931 * @sec4_sg: pointer to h/w link table
Yuan Kangacdca312011-07-15 11:21:42 +0800932 * @hw_desc: the h/w job descriptor followed by any referenced link tables
933 */
934struct ablkcipher_edesc {
935 int src_nents;
936 int dst_nents;
937 dma_addr_t iv_dma;
Yuan Kanga299c832012-06-22 19:48:46 -0500938 int sec4_sg_bytes;
939 dma_addr_t sec4_sg_dma;
940 struct sec4_sg_entry *sec4_sg;
Yuan Kangacdca312011-07-15 11:21:42 +0800941 u32 hw_desc[0];
942};
943
Yuan Kang1acebad2011-07-15 11:21:42 +0800944static void caam_unmap(struct device *dev, struct scatterlist *src,
Yuan Kang643b39b2012-06-22 19:48:49 -0500945 struct scatterlist *dst, int src_nents,
LABBE Corentin13fb8fd2015-09-23 13:55:27 +0200946 int dst_nents,
Yuan Kanga299c832012-06-22 19:48:46 -0500947 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
948 int sec4_sg_bytes)
Kim Phillips8e8ec592011-03-13 16:54:26 +0800949{
Yuan Kang643b39b2012-06-22 19:48:49 -0500950 if (dst != src) {
LABBE Corentin13fb8fd2015-09-23 13:55:27 +0200951 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_TO_DEVICE);
952 dma_unmap_sg(dev, dst, dst_nents ? : 1, DMA_FROM_DEVICE);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800953 } else {
LABBE Corentin13fb8fd2015-09-23 13:55:27 +0200954 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_BIDIRECTIONAL);
Kim Phillips8e8ec592011-03-13 16:54:26 +0800955 }
956
Yuan Kang1acebad2011-07-15 11:21:42 +0800957 if (iv_dma)
958 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
Yuan Kanga299c832012-06-22 19:48:46 -0500959 if (sec4_sg_bytes)
960 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
Kim Phillips8e8ec592011-03-13 16:54:26 +0800961 DMA_TO_DEVICE);
962}
963
Yuan Kang1acebad2011-07-15 11:21:42 +0800964static void aead_unmap(struct device *dev,
965 struct aead_edesc *edesc,
966 struct aead_request *req)
967{
Herbert Xuf2147b82015-06-16 13:54:23 +0800968 caam_unmap(dev, req->src, req->dst,
LABBE Corentin13fb8fd2015-09-23 13:55:27 +0200969 edesc->src_nents, edesc->dst_nents, 0, 0,
Herbert Xuf2147b82015-06-16 13:54:23 +0800970 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
971}
972
Yuan Kangacdca312011-07-15 11:21:42 +0800973static void ablkcipher_unmap(struct device *dev,
974 struct ablkcipher_edesc *edesc,
975 struct ablkcipher_request *req)
976{
977 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
978 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
979
980 caam_unmap(dev, req->src, req->dst,
LABBE Corentin13fb8fd2015-09-23 13:55:27 +0200981 edesc->src_nents, edesc->dst_nents,
982 edesc->iv_dma, ivsize,
Yuan Kang643b39b2012-06-22 19:48:49 -0500983 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
Yuan Kangacdca312011-07-15 11:21:42 +0800984}
985
Yuan Kang0e479302011-07-15 11:21:41 +0800986static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
Kim Phillips8e8ec592011-03-13 16:54:26 +0800987 void *context)
988{
Yuan Kang0e479302011-07-15 11:21:41 +0800989 struct aead_request *req = context;
990 struct aead_edesc *edesc;
Herbert Xuf2147b82015-06-16 13:54:23 +0800991
992#ifdef DEBUG
993 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
994#endif
995
996 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
997
998 if (err)
999 caam_jr_strstatus(jrdev, err);
1000
1001 aead_unmap(jrdev, edesc, req);
1002
1003 kfree(edesc);
1004
1005 aead_request_complete(req, err);
1006}
1007
Yuan Kang0e479302011-07-15 11:21:41 +08001008static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
Kim Phillips8e8ec592011-03-13 16:54:26 +08001009 void *context)
1010{
Yuan Kang0e479302011-07-15 11:21:41 +08001011 struct aead_request *req = context;
1012 struct aead_edesc *edesc;
Herbert Xuf2147b82015-06-16 13:54:23 +08001013
1014#ifdef DEBUG
1015 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1016#endif
1017
1018 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1019
1020 if (err)
1021 caam_jr_strstatus(jrdev, err);
1022
1023 aead_unmap(jrdev, edesc, req);
1024
1025 /*
1026 * verify hw auth check passed else return -EBADMSG
1027 */
1028 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
1029 err = -EBADMSG;
1030
1031 kfree(edesc);
1032
1033 aead_request_complete(req, err);
1034}
1035
Yuan Kangacdca312011-07-15 11:21:42 +08001036static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1037 void *context)
1038{
1039 struct ablkcipher_request *req = context;
1040 struct ablkcipher_edesc *edesc;
1041#ifdef DEBUG
1042 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1043 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1044
1045 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1046#endif
1047
Horia Geantă4ca7c7d2016-11-09 10:46:18 +02001048 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
Yuan Kangacdca312011-07-15 11:21:42 +08001049
Marek Vasutfa9659c2014-04-24 20:05:12 +02001050 if (err)
1051 caam_jr_strstatus(jrdev, err);
Yuan Kangacdca312011-07-15 11:21:42 +08001052
1053#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +03001054 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
Yuan Kangacdca312011-07-15 11:21:42 +08001055 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1056 edesc->src_nents > 1 ? 100 : ivsize, 1);
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +03001057 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
1058 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
Horia Geantă00fef2b2016-11-09 10:46:16 +02001059 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
Yuan Kangacdca312011-07-15 11:21:42 +08001060#endif
1061
1062 ablkcipher_unmap(jrdev, edesc, req);
1063 kfree(edesc);
1064
1065 ablkcipher_request_complete(req, err);
1066}
1067
1068static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1069 void *context)
1070{
1071 struct ablkcipher_request *req = context;
1072 struct ablkcipher_edesc *edesc;
1073#ifdef DEBUG
1074 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1075 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1076
1077 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1078#endif
1079
Horia Geantă4ca7c7d2016-11-09 10:46:18 +02001080 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
Marek Vasutfa9659c2014-04-24 20:05:12 +02001081 if (err)
1082 caam_jr_strstatus(jrdev, err);
Yuan Kangacdca312011-07-15 11:21:42 +08001083
1084#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +03001085 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
Yuan Kangacdca312011-07-15 11:21:42 +08001086 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1087 ivsize, 1);
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +03001088 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
1089 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
Horia Geantă00fef2b2016-11-09 10:46:16 +02001090 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
Yuan Kangacdca312011-07-15 11:21:42 +08001091#endif
1092
1093 ablkcipher_unmap(jrdev, edesc, req);
1094 kfree(edesc);
1095
1096 ablkcipher_request_complete(req, err);
1097}
1098
Kim Phillips8e8ec592011-03-13 16:54:26 +08001099/*
Yuan Kang1acebad2011-07-15 11:21:42 +08001100 * Fill in aead job descriptor
Kim Phillips8e8ec592011-03-13 16:54:26 +08001101 */
Herbert Xuf2147b82015-06-16 13:54:23 +08001102static void init_aead_job(struct aead_request *req,
1103 struct aead_edesc *edesc,
1104 bool all_contig, bool encrypt)
1105{
1106 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1107 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1108 int authsize = ctx->authsize;
1109 u32 *desc = edesc->hw_desc;
1110 u32 out_options, in_options;
1111 dma_addr_t dst_dma, src_dma;
1112 int len, sec4_sg_index = 0;
1113 dma_addr_t ptr;
1114 u32 *sh_desc;
1115
1116 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1117 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1118
1119 len = desc_len(sh_desc);
1120 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1121
1122 if (all_contig) {
1123 src_dma = sg_dma_address(req->src);
1124 in_options = 0;
1125 } else {
1126 src_dma = edesc->sec4_sg_dma;
1127 sec4_sg_index += edesc->src_nents;
1128 in_options = LDST_SGF;
1129 }
1130
1131 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1132 in_options);
1133
1134 dst_dma = src_dma;
1135 out_options = in_options;
1136
1137 if (unlikely(req->src != req->dst)) {
1138 if (!edesc->dst_nents) {
1139 dst_dma = sg_dma_address(req->dst);
1140 } else {
1141 dst_dma = edesc->sec4_sg_dma +
1142 sec4_sg_index *
1143 sizeof(struct sec4_sg_entry);
1144 out_options = LDST_SGF;
1145 }
1146 }
1147
1148 if (encrypt)
1149 append_seq_out_ptr(desc, dst_dma,
1150 req->assoclen + req->cryptlen + authsize,
1151 out_options);
1152 else
1153 append_seq_out_ptr(desc, dst_dma,
1154 req->assoclen + req->cryptlen - authsize,
1155 out_options);
1156
1157 /* REG3 = assoclen */
1158 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1159}
1160
1161static void init_gcm_job(struct aead_request *req,
1162 struct aead_edesc *edesc,
1163 bool all_contig, bool encrypt)
1164{
1165 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1166 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1167 unsigned int ivsize = crypto_aead_ivsize(aead);
1168 u32 *desc = edesc->hw_desc;
1169 bool generic_gcm = (ivsize == 12);
1170 unsigned int last;
1171
1172 init_aead_job(req, edesc, all_contig, encrypt);
1173
1174 /* BUG This should not be specific to generic GCM. */
1175 last = 0;
1176 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1177 last = FIFOLD_TYPE_LAST1;
1178
1179 /* Read GCM IV */
1180 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1181 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
1182 /* Append Salt */
1183 if (!generic_gcm)
Horia Geantădb576562016-11-22 15:44:04 +02001184 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
Herbert Xuf2147b82015-06-16 13:54:23 +08001185 /* Append IV */
1186 append_data(desc, req->iv, ivsize);
1187 /* End of blank commands */
1188}
1189
Herbert Xu479bcc72015-07-30 17:53:17 +08001190static void init_authenc_job(struct aead_request *req,
1191 struct aead_edesc *edesc,
1192 bool all_contig, bool encrypt)
Yuan Kang1acebad2011-07-15 11:21:42 +08001193{
1194 struct crypto_aead *aead = crypto_aead_reqtfm(req);
Herbert Xu479bcc72015-07-30 17:53:17 +08001195 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1196 struct caam_aead_alg, aead);
1197 unsigned int ivsize = crypto_aead_ivsize(aead);
Yuan Kang1acebad2011-07-15 11:21:42 +08001198 struct caam_ctx *ctx = crypto_aead_ctx(aead);
Horia Geantădb576562016-11-22 15:44:04 +02001199 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
Herbert Xu479bcc72015-07-30 17:53:17 +08001200 OP_ALG_AAI_CTR_MOD128);
1201 const bool is_rfc3686 = alg->caam.rfc3686;
Yuan Kang1acebad2011-07-15 11:21:42 +08001202 u32 *desc = edesc->hw_desc;
Herbert Xu479bcc72015-07-30 17:53:17 +08001203 u32 ivoffset = 0;
Kim Phillips8e8ec592011-03-13 16:54:26 +08001204
Herbert Xu479bcc72015-07-30 17:53:17 +08001205 /*
1206 * AES-CTR needs to load IV in CONTEXT1 reg
1207 * at an offset of 128bits (16bytes)
1208 * CONTEXT1[255:128] = IV
1209 */
1210 if (ctr_mode)
1211 ivoffset = 16;
Kim Phillips8e8ec592011-03-13 16:54:26 +08001212
Herbert Xu479bcc72015-07-30 17:53:17 +08001213 /*
1214 * RFC3686 specific:
1215 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1216 */
1217 if (is_rfc3686)
1218 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
Tudor Ambarusbac68f22014-10-23 16:14:03 +03001219
Herbert Xu479bcc72015-07-30 17:53:17 +08001220 init_aead_job(req, edesc, all_contig, encrypt);
Yuan Kang1acebad2011-07-15 11:21:42 +08001221
Horia Geantă8b18e232016-08-29 14:52:14 +03001222 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
Herbert Xu479bcc72015-07-30 17:53:17 +08001223 append_load_as_imm(desc, req->iv, ivsize,
1224 LDST_CLASS_1_CCB |
1225 LDST_SRCDST_BYTE_CONTEXT |
1226 (ivoffset << LDST_OFFSET_SHIFT));
Kim Phillips8e8ec592011-03-13 16:54:26 +08001227}
1228
1229/*
Yuan Kangacdca312011-07-15 11:21:42 +08001230 * Fill in ablkcipher job descriptor
1231 */
1232static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1233 struct ablkcipher_edesc *edesc,
1234 struct ablkcipher_request *req,
1235 bool iv_contig)
1236{
1237 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1238 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1239 u32 *desc = edesc->hw_desc;
1240 u32 out_options = 0, in_options;
1241 dma_addr_t dst_dma, src_dma;
Yuan Kanga299c832012-06-22 19:48:46 -05001242 int len, sec4_sg_index = 0;
Yuan Kangacdca312011-07-15 11:21:42 +08001243
1244#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +03001245 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
Yuan Kangacdca312011-07-15 11:21:42 +08001246 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1247 ivsize, 1);
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +03001248 printk(KERN_ERR "asked=%d, nbytes%d\n", (int)edesc->src_nents ? 100 : req->nbytes, req->nbytes);
1249 dbg_dump_sg(KERN_ERR, "src @"__stringify(__LINE__)": ",
1250 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
Horia Geantă00fef2b2016-11-09 10:46:16 +02001251 edesc->src_nents ? 100 : req->nbytes, 1);
Yuan Kangacdca312011-07-15 11:21:42 +08001252#endif
1253
1254 len = desc_len(sh_desc);
1255 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1256
1257 if (iv_contig) {
1258 src_dma = edesc->iv_dma;
1259 in_options = 0;
1260 } else {
Yuan Kanga299c832012-06-22 19:48:46 -05001261 src_dma = edesc->sec4_sg_dma;
Cristian Stoica35b82e52015-01-21 11:53:30 +02001262 sec4_sg_index += edesc->src_nents + 1;
Yuan Kangacdca312011-07-15 11:21:42 +08001263 in_options = LDST_SGF;
1264 }
1265 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
1266
1267 if (likely(req->src == req->dst)) {
1268 if (!edesc->src_nents && iv_contig) {
1269 dst_dma = sg_dma_address(req->src);
1270 } else {
Yuan Kanga299c832012-06-22 19:48:46 -05001271 dst_dma = edesc->sec4_sg_dma +
1272 sizeof(struct sec4_sg_entry);
Yuan Kangacdca312011-07-15 11:21:42 +08001273 out_options = LDST_SGF;
1274 }
1275 } else {
1276 if (!edesc->dst_nents) {
1277 dst_dma = sg_dma_address(req->dst);
1278 } else {
Yuan Kanga299c832012-06-22 19:48:46 -05001279 dst_dma = edesc->sec4_sg_dma +
1280 sec4_sg_index * sizeof(struct sec4_sg_entry);
Yuan Kangacdca312011-07-15 11:21:42 +08001281 out_options = LDST_SGF;
1282 }
1283 }
1284 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
1285}
1286
1287/*
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001288 * Fill in ablkcipher givencrypt job descriptor
1289 */
1290static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
1291 struct ablkcipher_edesc *edesc,
1292 struct ablkcipher_request *req,
1293 bool iv_contig)
1294{
1295 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1296 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1297 u32 *desc = edesc->hw_desc;
1298 u32 out_options, in_options;
1299 dma_addr_t dst_dma, src_dma;
1300 int len, sec4_sg_index = 0;
1301
1302#ifdef DEBUG
1303 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
1304 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1305 ivsize, 1);
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +03001306 dbg_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
1307 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
Horia Geantă00fef2b2016-11-09 10:46:16 +02001308 edesc->src_nents ? 100 : req->nbytes, 1);
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001309#endif
1310
1311 len = desc_len(sh_desc);
1312 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1313
1314 if (!edesc->src_nents) {
1315 src_dma = sg_dma_address(req->src);
1316 in_options = 0;
1317 } else {
1318 src_dma = edesc->sec4_sg_dma;
1319 sec4_sg_index += edesc->src_nents;
1320 in_options = LDST_SGF;
1321 }
1322 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
1323
1324 if (iv_contig) {
1325 dst_dma = edesc->iv_dma;
1326 out_options = 0;
1327 } else {
1328 dst_dma = edesc->sec4_sg_dma +
1329 sec4_sg_index * sizeof(struct sec4_sg_entry);
1330 out_options = LDST_SGF;
1331 }
1332 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
1333}
1334
1335/*
Yuan Kang1acebad2011-07-15 11:21:42 +08001336 * allocate and map the aead extended descriptor
Kim Phillips8e8ec592011-03-13 16:54:26 +08001337 */
Herbert Xuf2147b82015-06-16 13:54:23 +08001338static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1339 int desc_bytes, bool *all_contig_ptr,
1340 bool encrypt)
1341{
1342 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1343 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1344 struct device *jrdev = ctx->jrdev;
1345 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1346 CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
1347 int src_nents, dst_nents = 0;
1348 struct aead_edesc *edesc;
1349 int sgc;
1350 bool all_contig = true;
Herbert Xuf2147b82015-06-16 13:54:23 +08001351 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
1352 unsigned int authsize = ctx->authsize;
1353
1354 if (unlikely(req->dst != req->src)) {
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001355 src_nents = sg_count(req->src, req->assoclen + req->cryptlen);
Herbert Xuf2147b82015-06-16 13:54:23 +08001356 dst_nents = sg_count(req->dst,
1357 req->assoclen + req->cryptlen +
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001358 (encrypt ? authsize : (-authsize)));
Herbert Xuf2147b82015-06-16 13:54:23 +08001359 } else {
1360 src_nents = sg_count(req->src,
1361 req->assoclen + req->cryptlen +
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001362 (encrypt ? authsize : 0));
Herbert Xuf2147b82015-06-16 13:54:23 +08001363 }
1364
1365 /* Check if data are contiguous. */
1366 all_contig = !src_nents;
Horia Geantăc530e342016-11-09 10:46:15 +02001367 if (!all_contig)
Herbert Xuf2147b82015-06-16 13:54:23 +08001368 sec4_sg_len = src_nents;
Herbert Xuf2147b82015-06-16 13:54:23 +08001369
1370 sec4_sg_len += dst_nents;
1371
1372 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1373
1374 /* allocate space for base edesc and hw desc commands, link tables */
Victoria Milhoandde20ae2015-08-05 11:28:39 -07001375 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1376 GFP_DMA | flags);
Herbert Xuf2147b82015-06-16 13:54:23 +08001377 if (!edesc) {
1378 dev_err(jrdev, "could not allocate extended descriptor\n");
1379 return ERR_PTR(-ENOMEM);
1380 }
1381
1382 if (likely(req->src == req->dst)) {
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001383 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1384 DMA_BIDIRECTIONAL);
Herbert Xuf2147b82015-06-16 13:54:23 +08001385 if (unlikely(!sgc)) {
1386 dev_err(jrdev, "unable to map source\n");
1387 kfree(edesc);
1388 return ERR_PTR(-ENOMEM);
1389 }
1390 } else {
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001391 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1392 DMA_TO_DEVICE);
Herbert Xuf2147b82015-06-16 13:54:23 +08001393 if (unlikely(!sgc)) {
1394 dev_err(jrdev, "unable to map source\n");
1395 kfree(edesc);
1396 return ERR_PTR(-ENOMEM);
1397 }
1398
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001399 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
1400 DMA_FROM_DEVICE);
Herbert Xuf2147b82015-06-16 13:54:23 +08001401 if (unlikely(!sgc)) {
1402 dev_err(jrdev, "unable to map destination\n");
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001403 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
1404 DMA_TO_DEVICE);
Herbert Xuf2147b82015-06-16 13:54:23 +08001405 kfree(edesc);
1406 return ERR_PTR(-ENOMEM);
1407 }
1408 }
1409
1410 edesc->src_nents = src_nents;
Herbert Xuf2147b82015-06-16 13:54:23 +08001411 edesc->dst_nents = dst_nents;
Herbert Xuf2147b82015-06-16 13:54:23 +08001412 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1413 desc_bytes;
1414 *all_contig_ptr = all_contig;
1415
1416 sec4_sg_index = 0;
1417 if (!all_contig) {
Herbert Xu7793bda2015-06-18 14:25:56 +08001418 sg_to_sec4_sg_last(req->src, src_nents,
Herbert Xuf2147b82015-06-16 13:54:23 +08001419 edesc->sec4_sg + sec4_sg_index, 0);
1420 sec4_sg_index += src_nents;
1421 }
1422 if (dst_nents) {
1423 sg_to_sec4_sg_last(req->dst, dst_nents,
1424 edesc->sec4_sg + sec4_sg_index, 0);
1425 }
1426
1427 if (!sec4_sg_bytes)
1428 return edesc;
1429
1430 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1431 sec4_sg_bytes, DMA_TO_DEVICE);
1432 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1433 dev_err(jrdev, "unable to map S/G table\n");
1434 aead_unmap(jrdev, edesc, req);
1435 kfree(edesc);
1436 return ERR_PTR(-ENOMEM);
1437 }
1438
1439 edesc->sec4_sg_bytes = sec4_sg_bytes;
1440
1441 return edesc;
1442}
1443
1444static int gcm_encrypt(struct aead_request *req)
Kim Phillips8e8ec592011-03-13 16:54:26 +08001445{
Yuan Kang0e479302011-07-15 11:21:41 +08001446 struct aead_edesc *edesc;
Kim Phillips8e8ec592011-03-13 16:54:26 +08001447 struct crypto_aead *aead = crypto_aead_reqtfm(req);
Kim Phillips8e8ec592011-03-13 16:54:26 +08001448 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1449 struct device *jrdev = ctx->jrdev;
Yuan Kang1acebad2011-07-15 11:21:42 +08001450 bool all_contig;
Kim Phillips8e8ec592011-03-13 16:54:26 +08001451 u32 *desc;
Yuan Kang1acebad2011-07-15 11:21:42 +08001452 int ret = 0;
1453
Kim Phillips8e8ec592011-03-13 16:54:26 +08001454 /* allocate extended descriptor */
Herbert Xuf2147b82015-06-16 13:54:23 +08001455 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
Kim Phillips8e8ec592011-03-13 16:54:26 +08001456 if (IS_ERR(edesc))
1457 return PTR_ERR(edesc);
1458
Yuan Kang1acebad2011-07-15 11:21:42 +08001459 /* Create and submit job descriptor */
Herbert Xuf2147b82015-06-16 13:54:23 +08001460 init_gcm_job(req, edesc, all_contig, true);
Yuan Kang1acebad2011-07-15 11:21:42 +08001461#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +03001462 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
Yuan Kang1acebad2011-07-15 11:21:42 +08001463 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1464 desc_bytes(edesc->hw_desc), 1);
1465#endif
1466
Kim Phillips8e8ec592011-03-13 16:54:26 +08001467 desc = edesc->hw_desc;
Yuan Kang1acebad2011-07-15 11:21:42 +08001468 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1469 if (!ret) {
1470 ret = -EINPROGRESS;
1471 } else {
1472 aead_unmap(jrdev, edesc, req);
1473 kfree(edesc);
1474 }
Kim Phillips8e8ec592011-03-13 16:54:26 +08001475
Yuan Kang1acebad2011-07-15 11:21:42 +08001476 return ret;
Kim Phillips8e8ec592011-03-13 16:54:26 +08001477}
1478
Herbert Xu46218752015-07-09 07:17:33 +08001479static int ipsec_gcm_encrypt(struct aead_request *req)
1480{
1481 if (req->assoclen < 8)
1482 return -EINVAL;
1483
1484 return gcm_encrypt(req);
1485}
1486
Herbert Xu479bcc72015-07-30 17:53:17 +08001487static int aead_encrypt(struct aead_request *req)
Kim Phillips8e8ec592011-03-13 16:54:26 +08001488{
Yuan Kang1acebad2011-07-15 11:21:42 +08001489 struct aead_edesc *edesc;
Yuan Kang0e479302011-07-15 11:21:41 +08001490 struct crypto_aead *aead = crypto_aead_reqtfm(req);
Yuan Kang0e479302011-07-15 11:21:41 +08001491 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1492 struct device *jrdev = ctx->jrdev;
Yuan Kang1acebad2011-07-15 11:21:42 +08001493 bool all_contig;
Yuan Kang0e479302011-07-15 11:21:41 +08001494 u32 *desc;
Yuan Kang1acebad2011-07-15 11:21:42 +08001495 int ret = 0;
Yuan Kang0e479302011-07-15 11:21:41 +08001496
1497 /* allocate extended descriptor */
Herbert Xu479bcc72015-07-30 17:53:17 +08001498 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1499 &all_contig, true);
Yuan Kang0e479302011-07-15 11:21:41 +08001500 if (IS_ERR(edesc))
1501 return PTR_ERR(edesc);
1502
Herbert Xuf2147b82015-06-16 13:54:23 +08001503 /* Create and submit job descriptor */
Herbert Xu479bcc72015-07-30 17:53:17 +08001504 init_authenc_job(req, edesc, all_contig, true);
Yuan Kang1acebad2011-07-15 11:21:42 +08001505#ifdef DEBUG
Herbert Xuf2147b82015-06-16 13:54:23 +08001506 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1507 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1508 desc_bytes(edesc->hw_desc), 1);
Yuan Kang1acebad2011-07-15 11:21:42 +08001509#endif
1510
Herbert Xuf2147b82015-06-16 13:54:23 +08001511 desc = edesc->hw_desc;
Herbert Xu479bcc72015-07-30 17:53:17 +08001512 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
Herbert Xuf2147b82015-06-16 13:54:23 +08001513 if (!ret) {
1514 ret = -EINPROGRESS;
1515 } else {
Herbert Xu479bcc72015-07-30 17:53:17 +08001516 aead_unmap(jrdev, edesc, req);
Herbert Xuf2147b82015-06-16 13:54:23 +08001517 kfree(edesc);
1518 }
1519
1520 return ret;
1521}
1522
1523static int gcm_decrypt(struct aead_request *req)
1524{
1525 struct aead_edesc *edesc;
1526 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1527 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1528 struct device *jrdev = ctx->jrdev;
1529 bool all_contig;
1530 u32 *desc;
1531 int ret = 0;
1532
1533 /* allocate extended descriptor */
1534 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1535 if (IS_ERR(edesc))
1536 return PTR_ERR(edesc);
1537
Yuan Kang1acebad2011-07-15 11:21:42 +08001538 /* Create and submit job descriptor*/
Herbert Xuf2147b82015-06-16 13:54:23 +08001539 init_gcm_job(req, edesc, all_contig, false);
Yuan Kang1acebad2011-07-15 11:21:42 +08001540#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +03001541 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
Yuan Kang1acebad2011-07-15 11:21:42 +08001542 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1543 desc_bytes(edesc->hw_desc), 1);
1544#endif
1545
Yuan Kang0e479302011-07-15 11:21:41 +08001546 desc = edesc->hw_desc;
Yuan Kang1acebad2011-07-15 11:21:42 +08001547 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1548 if (!ret) {
1549 ret = -EINPROGRESS;
1550 } else {
1551 aead_unmap(jrdev, edesc, req);
1552 kfree(edesc);
1553 }
Yuan Kang0e479302011-07-15 11:21:41 +08001554
Yuan Kang1acebad2011-07-15 11:21:42 +08001555 return ret;
1556}
Yuan Kang0e479302011-07-15 11:21:41 +08001557
Herbert Xu46218752015-07-09 07:17:33 +08001558static int ipsec_gcm_decrypt(struct aead_request *req)
1559{
1560 if (req->assoclen < 8)
1561 return -EINVAL;
1562
1563 return gcm_decrypt(req);
1564}
1565
Herbert Xu479bcc72015-07-30 17:53:17 +08001566static int aead_decrypt(struct aead_request *req)
Herbert Xuf2147b82015-06-16 13:54:23 +08001567{
1568 struct aead_edesc *edesc;
1569 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1570 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1571 struct device *jrdev = ctx->jrdev;
1572 bool all_contig;
1573 u32 *desc;
1574 int ret = 0;
1575
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +03001576#ifdef DEBUG
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +03001577 dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
1578 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
Horia Geantă00fef2b2016-11-09 10:46:16 +02001579 req->assoclen + req->cryptlen, 1);
Catalin Vasile5ecf8ef2016-09-22 11:57:58 +03001580#endif
1581
Herbert Xuf2147b82015-06-16 13:54:23 +08001582 /* allocate extended descriptor */
Herbert Xu479bcc72015-07-30 17:53:17 +08001583 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1584 &all_contig, false);
Herbert Xuf2147b82015-06-16 13:54:23 +08001585 if (IS_ERR(edesc))
1586 return PTR_ERR(edesc);
1587
Herbert Xuf2147b82015-06-16 13:54:23 +08001588 /* Create and submit job descriptor*/
Herbert Xu479bcc72015-07-30 17:53:17 +08001589 init_authenc_job(req, edesc, all_contig, false);
Herbert Xuf2147b82015-06-16 13:54:23 +08001590#ifdef DEBUG
1591 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1592 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1593 desc_bytes(edesc->hw_desc), 1);
1594#endif
1595
1596 desc = edesc->hw_desc;
Herbert Xu479bcc72015-07-30 17:53:17 +08001597 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
Herbert Xuf2147b82015-06-16 13:54:23 +08001598 if (!ret) {
1599 ret = -EINPROGRESS;
1600 } else {
Herbert Xu479bcc72015-07-30 17:53:17 +08001601 aead_unmap(jrdev, edesc, req);
Herbert Xuf2147b82015-06-16 13:54:23 +08001602 kfree(edesc);
1603 }
1604
1605 return ret;
1606}
1607
Yuan Kangacdca312011-07-15 11:21:42 +08001608/*
1609 * allocate and map the ablkcipher extended descriptor for ablkcipher
1610 */
1611static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
1612 *req, int desc_bytes,
1613 bool *iv_contig_out)
1614{
1615 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1616 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1617 struct device *jrdev = ctx->jrdev;
1618 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1619 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
1620 GFP_KERNEL : GFP_ATOMIC;
Yuan Kanga299c832012-06-22 19:48:46 -05001621 int src_nents, dst_nents = 0, sec4_sg_bytes;
Yuan Kangacdca312011-07-15 11:21:42 +08001622 struct ablkcipher_edesc *edesc;
1623 dma_addr_t iv_dma = 0;
1624 bool iv_contig = false;
1625 int sgc;
1626 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
Yuan Kanga299c832012-06-22 19:48:46 -05001627 int sec4_sg_index;
Yuan Kangacdca312011-07-15 11:21:42 +08001628
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001629 src_nents = sg_count(req->src, req->nbytes);
Yuan Kangacdca312011-07-15 11:21:42 +08001630
Yuan Kang643b39b2012-06-22 19:48:49 -05001631 if (req->dst != req->src)
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001632 dst_nents = sg_count(req->dst, req->nbytes);
Yuan Kangacdca312011-07-15 11:21:42 +08001633
1634 if (likely(req->src == req->dst)) {
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001635 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1636 DMA_BIDIRECTIONAL);
Horia Geantăc73e36e2016-11-09 10:46:20 +02001637 if (unlikely(!sgc)) {
1638 dev_err(jrdev, "unable to map source\n");
1639 return ERR_PTR(-ENOMEM);
1640 }
Yuan Kangacdca312011-07-15 11:21:42 +08001641 } else {
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001642 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1643 DMA_TO_DEVICE);
Horia Geantăc73e36e2016-11-09 10:46:20 +02001644 if (unlikely(!sgc)) {
1645 dev_err(jrdev, "unable to map source\n");
1646 return ERR_PTR(-ENOMEM);
1647 }
1648
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001649 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
1650 DMA_FROM_DEVICE);
Horia Geantăc73e36e2016-11-09 10:46:20 +02001651 if (unlikely(!sgc)) {
1652 dev_err(jrdev, "unable to map destination\n");
1653 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
1654 DMA_TO_DEVICE);
1655 return ERR_PTR(-ENOMEM);
1656 }
Yuan Kangacdca312011-07-15 11:21:42 +08001657 }
1658
Horia Geantace572082014-07-11 15:34:49 +03001659 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
1660 if (dma_mapping_error(jrdev, iv_dma)) {
1661 dev_err(jrdev, "unable to map IV\n");
Horia Geantăc73e36e2016-11-09 10:46:20 +02001662 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1663 0, 0, 0);
Horia Geantace572082014-07-11 15:34:49 +03001664 return ERR_PTR(-ENOMEM);
1665 }
1666
Yuan Kangacdca312011-07-15 11:21:42 +08001667 /*
1668 * Check if iv can be contiguous with source and destination.
1669 * If so, include it. If not, create scatterlist.
1670 */
Yuan Kangacdca312011-07-15 11:21:42 +08001671 if (!src_nents && iv_dma + ivsize == sg_dma_address(req->src))
1672 iv_contig = true;
1673 else
1674 src_nents = src_nents ? : 1;
Yuan Kanga299c832012-06-22 19:48:46 -05001675 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
1676 sizeof(struct sec4_sg_entry);
Yuan Kangacdca312011-07-15 11:21:42 +08001677
1678 /* allocate space for base edesc and hw desc commands, link tables */
Victoria Milhoandde20ae2015-08-05 11:28:39 -07001679 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1680 GFP_DMA | flags);
Yuan Kangacdca312011-07-15 11:21:42 +08001681 if (!edesc) {
1682 dev_err(jrdev, "could not allocate extended descriptor\n");
Horia Geantăc73e36e2016-11-09 10:46:20 +02001683 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1684 iv_dma, ivsize, 0, 0);
Yuan Kangacdca312011-07-15 11:21:42 +08001685 return ERR_PTR(-ENOMEM);
1686 }
1687
1688 edesc->src_nents = src_nents;
1689 edesc->dst_nents = dst_nents;
Yuan Kanga299c832012-06-22 19:48:46 -05001690 edesc->sec4_sg_bytes = sec4_sg_bytes;
1691 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1692 desc_bytes;
Yuan Kangacdca312011-07-15 11:21:42 +08001693
Yuan Kanga299c832012-06-22 19:48:46 -05001694 sec4_sg_index = 0;
Yuan Kangacdca312011-07-15 11:21:42 +08001695 if (!iv_contig) {
Yuan Kanga299c832012-06-22 19:48:46 -05001696 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1697 sg_to_sec4_sg_last(req->src, src_nents,
1698 edesc->sec4_sg + 1, 0);
1699 sec4_sg_index += 1 + src_nents;
Yuan Kangacdca312011-07-15 11:21:42 +08001700 }
1701
Yuan Kang643b39b2012-06-22 19:48:49 -05001702 if (dst_nents) {
Yuan Kanga299c832012-06-22 19:48:46 -05001703 sg_to_sec4_sg_last(req->dst, dst_nents,
1704 edesc->sec4_sg + sec4_sg_index, 0);
Yuan Kangacdca312011-07-15 11:21:42 +08001705 }
1706
Yuan Kanga299c832012-06-22 19:48:46 -05001707 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1708 sec4_sg_bytes, DMA_TO_DEVICE);
Horia Geantace572082014-07-11 15:34:49 +03001709 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1710 dev_err(jrdev, "unable to map S/G table\n");
Horia Geantăc73e36e2016-11-09 10:46:20 +02001711 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1712 iv_dma, ivsize, 0, 0);
1713 kfree(edesc);
Horia Geantace572082014-07-11 15:34:49 +03001714 return ERR_PTR(-ENOMEM);
1715 }
1716
Yuan Kangacdca312011-07-15 11:21:42 +08001717 edesc->iv_dma = iv_dma;
1718
1719#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +03001720 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
Yuan Kanga299c832012-06-22 19:48:46 -05001721 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1722 sec4_sg_bytes, 1);
Yuan Kangacdca312011-07-15 11:21:42 +08001723#endif
1724
1725 *iv_contig_out = iv_contig;
1726 return edesc;
1727}
1728
1729static int ablkcipher_encrypt(struct ablkcipher_request *req)
1730{
1731 struct ablkcipher_edesc *edesc;
1732 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1733 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1734 struct device *jrdev = ctx->jrdev;
1735 bool iv_contig;
1736 u32 *desc;
1737 int ret = 0;
1738
1739 /* allocate extended descriptor */
1740 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1741 CAAM_CMD_SZ, &iv_contig);
1742 if (IS_ERR(edesc))
1743 return PTR_ERR(edesc);
1744
1745 /* Create and submit job descriptor*/
1746 init_ablkcipher_job(ctx->sh_desc_enc,
1747 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
1748#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +03001749 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
Yuan Kangacdca312011-07-15 11:21:42 +08001750 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1751 desc_bytes(edesc->hw_desc), 1);
1752#endif
1753 desc = edesc->hw_desc;
1754 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1755
1756 if (!ret) {
1757 ret = -EINPROGRESS;
1758 } else {
1759 ablkcipher_unmap(jrdev, edesc, req);
1760 kfree(edesc);
1761 }
1762
1763 return ret;
1764}
1765
1766static int ablkcipher_decrypt(struct ablkcipher_request *req)
1767{
1768 struct ablkcipher_edesc *edesc;
1769 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1770 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1771 struct device *jrdev = ctx->jrdev;
1772 bool iv_contig;
1773 u32 *desc;
1774 int ret = 0;
1775
1776 /* allocate extended descriptor */
1777 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1778 CAAM_CMD_SZ, &iv_contig);
1779 if (IS_ERR(edesc))
1780 return PTR_ERR(edesc);
1781
1782 /* Create and submit job descriptor*/
1783 init_ablkcipher_job(ctx->sh_desc_dec,
1784 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
1785 desc = edesc->hw_desc;
1786#ifdef DEBUG
Alex Porosanu514df282013-08-14 18:56:45 +03001787 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
Yuan Kangacdca312011-07-15 11:21:42 +08001788 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1789 desc_bytes(edesc->hw_desc), 1);
1790#endif
1791
1792 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
1793 if (!ret) {
1794 ret = -EINPROGRESS;
1795 } else {
1796 ablkcipher_unmap(jrdev, edesc, req);
1797 kfree(edesc);
1798 }
1799
1800 return ret;
1801}
1802
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001803/*
1804 * allocate and map the ablkcipher extended descriptor
1805 * for ablkcipher givencrypt
1806 */
1807static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
1808 struct skcipher_givcrypt_request *greq,
1809 int desc_bytes,
1810 bool *iv_contig_out)
1811{
1812 struct ablkcipher_request *req = &greq->creq;
1813 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1814 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1815 struct device *jrdev = ctx->jrdev;
1816 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1817 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
1818 GFP_KERNEL : GFP_ATOMIC;
1819 int src_nents, dst_nents = 0, sec4_sg_bytes;
1820 struct ablkcipher_edesc *edesc;
1821 dma_addr_t iv_dma = 0;
1822 bool iv_contig = false;
1823 int sgc;
1824 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001825 int sec4_sg_index;
1826
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001827 src_nents = sg_count(req->src, req->nbytes);
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001828
1829 if (unlikely(req->dst != req->src))
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001830 dst_nents = sg_count(req->dst, req->nbytes);
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001831
1832 if (likely(req->src == req->dst)) {
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001833 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1834 DMA_BIDIRECTIONAL);
Horia Geantăc73e36e2016-11-09 10:46:20 +02001835 if (unlikely(!sgc)) {
1836 dev_err(jrdev, "unable to map source\n");
1837 return ERR_PTR(-ENOMEM);
1838 }
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001839 } else {
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001840 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1841 DMA_TO_DEVICE);
Horia Geantăc73e36e2016-11-09 10:46:20 +02001842 if (unlikely(!sgc)) {
1843 dev_err(jrdev, "unable to map source\n");
1844 return ERR_PTR(-ENOMEM);
1845 }
1846
LABBE Corentin13fb8fd2015-09-23 13:55:27 +02001847 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
1848 DMA_FROM_DEVICE);
Horia Geantăc73e36e2016-11-09 10:46:20 +02001849 if (unlikely(!sgc)) {
1850 dev_err(jrdev, "unable to map destination\n");
1851 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
1852 DMA_TO_DEVICE);
1853 return ERR_PTR(-ENOMEM);
1854 }
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001855 }
1856
1857 /*
1858 * Check if iv can be contiguous with source and destination.
1859 * If so, include it. If not, create scatterlist.
1860 */
1861 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
1862 if (dma_mapping_error(jrdev, iv_dma)) {
1863 dev_err(jrdev, "unable to map IV\n");
Horia Geantăc73e36e2016-11-09 10:46:20 +02001864 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1865 0, 0, 0);
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001866 return ERR_PTR(-ENOMEM);
1867 }
1868
1869 if (!dst_nents && iv_dma + ivsize == sg_dma_address(req->dst))
1870 iv_contig = true;
1871 else
1872 dst_nents = dst_nents ? : 1;
1873 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
1874 sizeof(struct sec4_sg_entry);
1875
1876 /* allocate space for base edesc and hw desc commands, link tables */
Victoria Milhoandde20ae2015-08-05 11:28:39 -07001877 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1878 GFP_DMA | flags);
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001879 if (!edesc) {
1880 dev_err(jrdev, "could not allocate extended descriptor\n");
Horia Geantăc73e36e2016-11-09 10:46:20 +02001881 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1882 iv_dma, ivsize, 0, 0);
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001883 return ERR_PTR(-ENOMEM);
1884 }
1885
1886 edesc->src_nents = src_nents;
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001887 edesc->dst_nents = dst_nents;
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001888 edesc->sec4_sg_bytes = sec4_sg_bytes;
1889 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1890 desc_bytes;
1891
1892 sec4_sg_index = 0;
1893 if (src_nents) {
1894 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0);
1895 sec4_sg_index += src_nents;
1896 }
1897
1898 if (!iv_contig) {
1899 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
1900 iv_dma, ivsize, 0);
1901 sec4_sg_index += 1;
1902 sg_to_sec4_sg_last(req->dst, dst_nents,
1903 edesc->sec4_sg + sec4_sg_index, 0);
1904 }
1905
1906 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1907 sec4_sg_bytes, DMA_TO_DEVICE);
1908 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1909 dev_err(jrdev, "unable to map S/G table\n");
Horia Geantăc73e36e2016-11-09 10:46:20 +02001910 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1911 iv_dma, ivsize, 0, 0);
1912 kfree(edesc);
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001913 return ERR_PTR(-ENOMEM);
1914 }
1915 edesc->iv_dma = iv_dma;
1916
1917#ifdef DEBUG
1918 print_hex_dump(KERN_ERR,
1919 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
1920 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1921 sec4_sg_bytes, 1);
1922#endif
1923
1924 *iv_contig_out = iv_contig;
1925 return edesc;
1926}
1927
1928static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
1929{
1930 struct ablkcipher_request *req = &creq->creq;
1931 struct ablkcipher_edesc *edesc;
1932 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1933 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1934 struct device *jrdev = ctx->jrdev;
1935 bool iv_contig;
1936 u32 *desc;
1937 int ret = 0;
1938
1939 /* allocate extended descriptor */
1940 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
1941 CAAM_CMD_SZ, &iv_contig);
1942 if (IS_ERR(edesc))
1943 return PTR_ERR(edesc);
1944
1945 /* Create and submit job descriptor*/
1946 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
1947 edesc, req, iv_contig);
1948#ifdef DEBUG
1949 print_hex_dump(KERN_ERR,
1950 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
1951 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1952 desc_bytes(edesc->hw_desc), 1);
1953#endif
1954 desc = edesc->hw_desc;
1955 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1956
1957 if (!ret) {
1958 ret = -EINPROGRESS;
1959 } else {
1960 ablkcipher_unmap(jrdev, edesc, req);
1961 kfree(edesc);
1962 }
1963
1964 return ret;
1965}
1966
Yuan Kang885e9e22011-07-15 11:21:41 +08001967#define template_aead template_u.aead
Yuan Kangacdca312011-07-15 11:21:42 +08001968#define template_ablkcipher template_u.ablkcipher
Kim Phillips8e8ec592011-03-13 16:54:26 +08001969struct caam_alg_template {
1970 char name[CRYPTO_MAX_ALG_NAME];
1971 char driver_name[CRYPTO_MAX_ALG_NAME];
1972 unsigned int blocksize;
Yuan Kang885e9e22011-07-15 11:21:41 +08001973 u32 type;
1974 union {
1975 struct ablkcipher_alg ablkcipher;
Yuan Kang885e9e22011-07-15 11:21:41 +08001976 } template_u;
Kim Phillips8e8ec592011-03-13 16:54:26 +08001977 u32 class1_alg_type;
1978 u32 class2_alg_type;
Kim Phillips8e8ec592011-03-13 16:54:26 +08001979};
1980
1981static struct caam_alg_template driver_algs[] = {
Yuan Kangacdca312011-07-15 11:21:42 +08001982 /* ablkcipher descriptor */
1983 {
1984 .name = "cbc(aes)",
1985 .driver_name = "cbc-aes-caam",
1986 .blocksize = AES_BLOCK_SIZE,
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001987 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
Yuan Kangacdca312011-07-15 11:21:42 +08001988 .template_ablkcipher = {
1989 .setkey = ablkcipher_setkey,
1990 .encrypt = ablkcipher_encrypt,
1991 .decrypt = ablkcipher_decrypt,
Catalin Vasile7222d1a2014-10-31 12:45:38 +02001992 .givencrypt = ablkcipher_givencrypt,
1993 .geniv = "<built-in>",
Yuan Kangacdca312011-07-15 11:21:42 +08001994 .min_keysize = AES_MIN_KEY_SIZE,
1995 .max_keysize = AES_MAX_KEY_SIZE,
1996 .ivsize = AES_BLOCK_SIZE,
1997 },
1998 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1999 },
2000 {
2001 .name = "cbc(des3_ede)",
2002 .driver_name = "cbc-3des-caam",
2003 .blocksize = DES3_EDE_BLOCK_SIZE,
Catalin Vasileff2c3a32014-11-11 16:18:13 +02002004 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
Yuan Kangacdca312011-07-15 11:21:42 +08002005 .template_ablkcipher = {
2006 .setkey = ablkcipher_setkey,
2007 .encrypt = ablkcipher_encrypt,
2008 .decrypt = ablkcipher_decrypt,
Catalin Vasileff2c3a32014-11-11 16:18:13 +02002009 .givencrypt = ablkcipher_givencrypt,
2010 .geniv = "<built-in>",
Yuan Kangacdca312011-07-15 11:21:42 +08002011 .min_keysize = DES3_EDE_KEY_SIZE,
2012 .max_keysize = DES3_EDE_KEY_SIZE,
2013 .ivsize = DES3_EDE_BLOCK_SIZE,
2014 },
2015 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2016 },
2017 {
2018 .name = "cbc(des)",
2019 .driver_name = "cbc-des-caam",
2020 .blocksize = DES_BLOCK_SIZE,
Catalin Vasileff2c3a32014-11-11 16:18:13 +02002021 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
Yuan Kangacdca312011-07-15 11:21:42 +08002022 .template_ablkcipher = {
2023 .setkey = ablkcipher_setkey,
2024 .encrypt = ablkcipher_encrypt,
2025 .decrypt = ablkcipher_decrypt,
Catalin Vasileff2c3a32014-11-11 16:18:13 +02002026 .givencrypt = ablkcipher_givencrypt,
2027 .geniv = "<built-in>",
Yuan Kangacdca312011-07-15 11:21:42 +08002028 .min_keysize = DES_KEY_SIZE,
2029 .max_keysize = DES_KEY_SIZE,
2030 .ivsize = DES_BLOCK_SIZE,
2031 },
2032 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
Catalin Vasile2b22f6c2014-10-31 12:45:35 +02002033 },
2034 {
2035 .name = "ctr(aes)",
2036 .driver_name = "ctr-aes-caam",
2037 .blocksize = 1,
2038 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2039 .template_ablkcipher = {
2040 .setkey = ablkcipher_setkey,
2041 .encrypt = ablkcipher_encrypt,
2042 .decrypt = ablkcipher_decrypt,
2043 .geniv = "chainiv",
2044 .min_keysize = AES_MIN_KEY_SIZE,
2045 .max_keysize = AES_MAX_KEY_SIZE,
2046 .ivsize = AES_BLOCK_SIZE,
2047 },
2048 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
Catalin Vasilea5f57cf2014-10-31 12:45:36 +02002049 },
2050 {
2051 .name = "rfc3686(ctr(aes))",
2052 .driver_name = "rfc3686-ctr-aes-caam",
2053 .blocksize = 1,
Catalin Vasile7222d1a2014-10-31 12:45:38 +02002054 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
Catalin Vasilea5f57cf2014-10-31 12:45:36 +02002055 .template_ablkcipher = {
2056 .setkey = ablkcipher_setkey,
2057 .encrypt = ablkcipher_encrypt,
2058 .decrypt = ablkcipher_decrypt,
Catalin Vasile7222d1a2014-10-31 12:45:38 +02002059 .givencrypt = ablkcipher_givencrypt,
2060 .geniv = "<built-in>",
Catalin Vasilea5f57cf2014-10-31 12:45:36 +02002061 .min_keysize = AES_MIN_KEY_SIZE +
2062 CTR_RFC3686_NONCE_SIZE,
2063 .max_keysize = AES_MAX_KEY_SIZE +
2064 CTR_RFC3686_NONCE_SIZE,
2065 .ivsize = CTR_RFC3686_IV_SIZE,
2066 },
2067 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
Catalin Vasilec6415a62015-10-02 13:13:18 +03002068 },
2069 {
2070 .name = "xts(aes)",
2071 .driver_name = "xts-aes-caam",
2072 .blocksize = AES_BLOCK_SIZE,
2073 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2074 .template_ablkcipher = {
2075 .setkey = xts_ablkcipher_setkey,
2076 .encrypt = ablkcipher_encrypt,
2077 .decrypt = ablkcipher_decrypt,
2078 .geniv = "eseqiv",
2079 .min_keysize = 2 * AES_MIN_KEY_SIZE,
2080 .max_keysize = 2 * AES_MAX_KEY_SIZE,
2081 .ivsize = AES_BLOCK_SIZE,
2082 },
2083 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
2084 },
Kim Phillips8e8ec592011-03-13 16:54:26 +08002085};
2086
Herbert Xuf2147b82015-06-16 13:54:23 +08002087static struct caam_aead_alg driver_aeads[] = {
2088 {
2089 .aead = {
2090 .base = {
2091 .cra_name = "rfc4106(gcm(aes))",
2092 .cra_driver_name = "rfc4106-gcm-aes-caam",
2093 .cra_blocksize = 1,
2094 },
2095 .setkey = rfc4106_setkey,
2096 .setauthsize = rfc4106_setauthsize,
Herbert Xu46218752015-07-09 07:17:33 +08002097 .encrypt = ipsec_gcm_encrypt,
2098 .decrypt = ipsec_gcm_decrypt,
Herbert Xuf2147b82015-06-16 13:54:23 +08002099 .ivsize = 8,
2100 .maxauthsize = AES_BLOCK_SIZE,
2101 },
2102 .caam = {
2103 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2104 },
2105 },
2106 {
2107 .aead = {
2108 .base = {
2109 .cra_name = "rfc4543(gcm(aes))",
2110 .cra_driver_name = "rfc4543-gcm-aes-caam",
2111 .cra_blocksize = 1,
2112 },
2113 .setkey = rfc4543_setkey,
2114 .setauthsize = rfc4543_setauthsize,
Herbert Xu46218752015-07-09 07:17:33 +08002115 .encrypt = ipsec_gcm_encrypt,
2116 .decrypt = ipsec_gcm_decrypt,
Herbert Xuf2147b82015-06-16 13:54:23 +08002117 .ivsize = 8,
2118 .maxauthsize = AES_BLOCK_SIZE,
2119 },
2120 .caam = {
2121 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2122 },
2123 },
2124 /* Galois Counter Mode */
2125 {
2126 .aead = {
2127 .base = {
2128 .cra_name = "gcm(aes)",
2129 .cra_driver_name = "gcm-aes-caam",
2130 .cra_blocksize = 1,
2131 },
2132 .setkey = gcm_setkey,
2133 .setauthsize = gcm_setauthsize,
2134 .encrypt = gcm_encrypt,
2135 .decrypt = gcm_decrypt,
2136 .ivsize = 12,
2137 .maxauthsize = AES_BLOCK_SIZE,
2138 },
2139 .caam = {
2140 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2141 },
2142 },
Herbert Xu479bcc72015-07-30 17:53:17 +08002143 /* single-pass ipsec_esp descriptor */
2144 {
2145 .aead = {
2146 .base = {
2147 .cra_name = "authenc(hmac(md5),"
2148 "ecb(cipher_null))",
2149 .cra_driver_name = "authenc-hmac-md5-"
2150 "ecb-cipher_null-caam",
2151 .cra_blocksize = NULL_BLOCK_SIZE,
2152 },
2153 .setkey = aead_setkey,
2154 .setauthsize = aead_setauthsize,
2155 .encrypt = aead_encrypt,
2156 .decrypt = aead_decrypt,
2157 .ivsize = NULL_IV_SIZE,
2158 .maxauthsize = MD5_DIGEST_SIZE,
2159 },
2160 .caam = {
2161 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2162 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002163 },
2164 },
2165 {
2166 .aead = {
2167 .base = {
2168 .cra_name = "authenc(hmac(sha1),"
2169 "ecb(cipher_null))",
2170 .cra_driver_name = "authenc-hmac-sha1-"
2171 "ecb-cipher_null-caam",
2172 .cra_blocksize = NULL_BLOCK_SIZE,
2173 },
2174 .setkey = aead_setkey,
2175 .setauthsize = aead_setauthsize,
2176 .encrypt = aead_encrypt,
2177 .decrypt = aead_decrypt,
2178 .ivsize = NULL_IV_SIZE,
2179 .maxauthsize = SHA1_DIGEST_SIZE,
2180 },
2181 .caam = {
2182 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2183 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002184 },
2185 },
2186 {
2187 .aead = {
2188 .base = {
2189 .cra_name = "authenc(hmac(sha224),"
2190 "ecb(cipher_null))",
2191 .cra_driver_name = "authenc-hmac-sha224-"
2192 "ecb-cipher_null-caam",
2193 .cra_blocksize = NULL_BLOCK_SIZE,
2194 },
2195 .setkey = aead_setkey,
2196 .setauthsize = aead_setauthsize,
2197 .encrypt = aead_encrypt,
2198 .decrypt = aead_decrypt,
2199 .ivsize = NULL_IV_SIZE,
2200 .maxauthsize = SHA224_DIGEST_SIZE,
2201 },
2202 .caam = {
2203 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2204 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002205 },
2206 },
2207 {
2208 .aead = {
2209 .base = {
2210 .cra_name = "authenc(hmac(sha256),"
2211 "ecb(cipher_null))",
2212 .cra_driver_name = "authenc-hmac-sha256-"
2213 "ecb-cipher_null-caam",
2214 .cra_blocksize = NULL_BLOCK_SIZE,
2215 },
2216 .setkey = aead_setkey,
2217 .setauthsize = aead_setauthsize,
2218 .encrypt = aead_encrypt,
2219 .decrypt = aead_decrypt,
2220 .ivsize = NULL_IV_SIZE,
2221 .maxauthsize = SHA256_DIGEST_SIZE,
2222 },
2223 .caam = {
2224 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2225 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002226 },
2227 },
2228 {
2229 .aead = {
2230 .base = {
2231 .cra_name = "authenc(hmac(sha384),"
2232 "ecb(cipher_null))",
2233 .cra_driver_name = "authenc-hmac-sha384-"
2234 "ecb-cipher_null-caam",
2235 .cra_blocksize = NULL_BLOCK_SIZE,
2236 },
2237 .setkey = aead_setkey,
2238 .setauthsize = aead_setauthsize,
2239 .encrypt = aead_encrypt,
2240 .decrypt = aead_decrypt,
2241 .ivsize = NULL_IV_SIZE,
2242 .maxauthsize = SHA384_DIGEST_SIZE,
2243 },
2244 .caam = {
2245 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2246 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002247 },
2248 },
2249 {
2250 .aead = {
2251 .base = {
2252 .cra_name = "authenc(hmac(sha512),"
2253 "ecb(cipher_null))",
2254 .cra_driver_name = "authenc-hmac-sha512-"
2255 "ecb-cipher_null-caam",
2256 .cra_blocksize = NULL_BLOCK_SIZE,
2257 },
2258 .setkey = aead_setkey,
2259 .setauthsize = aead_setauthsize,
2260 .encrypt = aead_encrypt,
2261 .decrypt = aead_decrypt,
2262 .ivsize = NULL_IV_SIZE,
2263 .maxauthsize = SHA512_DIGEST_SIZE,
2264 },
2265 .caam = {
2266 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2267 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002268 },
2269 },
2270 {
2271 .aead = {
2272 .base = {
2273 .cra_name = "authenc(hmac(md5),cbc(aes))",
2274 .cra_driver_name = "authenc-hmac-md5-"
2275 "cbc-aes-caam",
2276 .cra_blocksize = AES_BLOCK_SIZE,
2277 },
2278 .setkey = aead_setkey,
2279 .setauthsize = aead_setauthsize,
2280 .encrypt = aead_encrypt,
2281 .decrypt = aead_decrypt,
2282 .ivsize = AES_BLOCK_SIZE,
2283 .maxauthsize = MD5_DIGEST_SIZE,
2284 },
2285 .caam = {
2286 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2287 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2288 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002289 },
2290 },
2291 {
2292 .aead = {
2293 .base = {
2294 .cra_name = "echainiv(authenc(hmac(md5),"
2295 "cbc(aes)))",
2296 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2297 "cbc-aes-caam",
2298 .cra_blocksize = AES_BLOCK_SIZE,
2299 },
2300 .setkey = aead_setkey,
2301 .setauthsize = aead_setauthsize,
2302 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002303 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002304 .ivsize = AES_BLOCK_SIZE,
2305 .maxauthsize = MD5_DIGEST_SIZE,
2306 },
2307 .caam = {
2308 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2309 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2310 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002311 .geniv = true,
2312 },
2313 },
2314 {
2315 .aead = {
2316 .base = {
2317 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2318 .cra_driver_name = "authenc-hmac-sha1-"
2319 "cbc-aes-caam",
2320 .cra_blocksize = AES_BLOCK_SIZE,
2321 },
2322 .setkey = aead_setkey,
2323 .setauthsize = aead_setauthsize,
2324 .encrypt = aead_encrypt,
2325 .decrypt = aead_decrypt,
2326 .ivsize = AES_BLOCK_SIZE,
2327 .maxauthsize = SHA1_DIGEST_SIZE,
2328 },
2329 .caam = {
2330 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2331 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2332 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002333 },
2334 },
2335 {
2336 .aead = {
2337 .base = {
2338 .cra_name = "echainiv(authenc(hmac(sha1),"
2339 "cbc(aes)))",
2340 .cra_driver_name = "echainiv-authenc-"
2341 "hmac-sha1-cbc-aes-caam",
2342 .cra_blocksize = AES_BLOCK_SIZE,
2343 },
2344 .setkey = aead_setkey,
2345 .setauthsize = aead_setauthsize,
2346 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002347 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002348 .ivsize = AES_BLOCK_SIZE,
2349 .maxauthsize = SHA1_DIGEST_SIZE,
2350 },
2351 .caam = {
2352 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2353 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2354 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002355 .geniv = true,
2356 },
2357 },
2358 {
2359 .aead = {
2360 .base = {
2361 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2362 .cra_driver_name = "authenc-hmac-sha224-"
2363 "cbc-aes-caam",
2364 .cra_blocksize = AES_BLOCK_SIZE,
2365 },
2366 .setkey = aead_setkey,
2367 .setauthsize = aead_setauthsize,
2368 .encrypt = aead_encrypt,
2369 .decrypt = aead_decrypt,
2370 .ivsize = AES_BLOCK_SIZE,
2371 .maxauthsize = SHA224_DIGEST_SIZE,
2372 },
2373 .caam = {
2374 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2375 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2376 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002377 },
2378 },
2379 {
2380 .aead = {
2381 .base = {
2382 .cra_name = "echainiv(authenc(hmac(sha224),"
2383 "cbc(aes)))",
2384 .cra_driver_name = "echainiv-authenc-"
2385 "hmac-sha224-cbc-aes-caam",
2386 .cra_blocksize = AES_BLOCK_SIZE,
2387 },
2388 .setkey = aead_setkey,
2389 .setauthsize = aead_setauthsize,
2390 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002391 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002392 .ivsize = AES_BLOCK_SIZE,
2393 .maxauthsize = SHA224_DIGEST_SIZE,
2394 },
2395 .caam = {
2396 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2397 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2398 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002399 .geniv = true,
2400 },
2401 },
2402 {
2403 .aead = {
2404 .base = {
2405 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2406 .cra_driver_name = "authenc-hmac-sha256-"
2407 "cbc-aes-caam",
2408 .cra_blocksize = AES_BLOCK_SIZE,
2409 },
2410 .setkey = aead_setkey,
2411 .setauthsize = aead_setauthsize,
2412 .encrypt = aead_encrypt,
2413 .decrypt = aead_decrypt,
2414 .ivsize = AES_BLOCK_SIZE,
2415 .maxauthsize = SHA256_DIGEST_SIZE,
2416 },
2417 .caam = {
2418 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2419 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2420 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002421 },
2422 },
2423 {
2424 .aead = {
2425 .base = {
2426 .cra_name = "echainiv(authenc(hmac(sha256),"
2427 "cbc(aes)))",
2428 .cra_driver_name = "echainiv-authenc-"
2429 "hmac-sha256-cbc-aes-caam",
2430 .cra_blocksize = AES_BLOCK_SIZE,
2431 },
2432 .setkey = aead_setkey,
2433 .setauthsize = aead_setauthsize,
2434 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002435 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002436 .ivsize = AES_BLOCK_SIZE,
2437 .maxauthsize = SHA256_DIGEST_SIZE,
2438 },
2439 .caam = {
2440 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2441 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2442 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002443 .geniv = true,
2444 },
2445 },
2446 {
2447 .aead = {
2448 .base = {
2449 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2450 .cra_driver_name = "authenc-hmac-sha384-"
2451 "cbc-aes-caam",
2452 .cra_blocksize = AES_BLOCK_SIZE,
2453 },
2454 .setkey = aead_setkey,
2455 .setauthsize = aead_setauthsize,
2456 .encrypt = aead_encrypt,
2457 .decrypt = aead_decrypt,
2458 .ivsize = AES_BLOCK_SIZE,
2459 .maxauthsize = SHA384_DIGEST_SIZE,
2460 },
2461 .caam = {
2462 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2463 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2464 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002465 },
2466 },
2467 {
2468 .aead = {
2469 .base = {
2470 .cra_name = "echainiv(authenc(hmac(sha384),"
2471 "cbc(aes)))",
2472 .cra_driver_name = "echainiv-authenc-"
2473 "hmac-sha384-cbc-aes-caam",
2474 .cra_blocksize = AES_BLOCK_SIZE,
2475 },
2476 .setkey = aead_setkey,
2477 .setauthsize = aead_setauthsize,
2478 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002479 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002480 .ivsize = AES_BLOCK_SIZE,
2481 .maxauthsize = SHA384_DIGEST_SIZE,
2482 },
2483 .caam = {
2484 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2485 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2486 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002487 .geniv = true,
2488 },
2489 },
2490 {
2491 .aead = {
2492 .base = {
2493 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2494 .cra_driver_name = "authenc-hmac-sha512-"
2495 "cbc-aes-caam",
2496 .cra_blocksize = AES_BLOCK_SIZE,
2497 },
2498 .setkey = aead_setkey,
2499 .setauthsize = aead_setauthsize,
2500 .encrypt = aead_encrypt,
2501 .decrypt = aead_decrypt,
2502 .ivsize = AES_BLOCK_SIZE,
2503 .maxauthsize = SHA512_DIGEST_SIZE,
2504 },
2505 .caam = {
2506 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2507 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2508 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002509 },
2510 },
2511 {
2512 .aead = {
2513 .base = {
2514 .cra_name = "echainiv(authenc(hmac(sha512),"
2515 "cbc(aes)))",
2516 .cra_driver_name = "echainiv-authenc-"
2517 "hmac-sha512-cbc-aes-caam",
2518 .cra_blocksize = AES_BLOCK_SIZE,
2519 },
2520 .setkey = aead_setkey,
2521 .setauthsize = aead_setauthsize,
2522 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002523 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002524 .ivsize = AES_BLOCK_SIZE,
2525 .maxauthsize = SHA512_DIGEST_SIZE,
2526 },
2527 .caam = {
2528 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2529 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2530 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002531 .geniv = true,
2532 },
2533 },
2534 {
2535 .aead = {
2536 .base = {
2537 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2538 .cra_driver_name = "authenc-hmac-md5-"
2539 "cbc-des3_ede-caam",
2540 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2541 },
2542 .setkey = aead_setkey,
2543 .setauthsize = aead_setauthsize,
2544 .encrypt = aead_encrypt,
2545 .decrypt = aead_decrypt,
2546 .ivsize = DES3_EDE_BLOCK_SIZE,
2547 .maxauthsize = MD5_DIGEST_SIZE,
2548 },
2549 .caam = {
2550 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2551 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2552 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002553 }
2554 },
2555 {
2556 .aead = {
2557 .base = {
2558 .cra_name = "echainiv(authenc(hmac(md5),"
2559 "cbc(des3_ede)))",
2560 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2561 "cbc-des3_ede-caam",
2562 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2563 },
2564 .setkey = aead_setkey,
2565 .setauthsize = aead_setauthsize,
2566 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002567 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002568 .ivsize = DES3_EDE_BLOCK_SIZE,
2569 .maxauthsize = MD5_DIGEST_SIZE,
2570 },
2571 .caam = {
2572 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2573 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2574 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002575 .geniv = true,
2576 }
2577 },
2578 {
2579 .aead = {
2580 .base = {
2581 .cra_name = "authenc(hmac(sha1),"
2582 "cbc(des3_ede))",
2583 .cra_driver_name = "authenc-hmac-sha1-"
2584 "cbc-des3_ede-caam",
2585 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2586 },
2587 .setkey = aead_setkey,
2588 .setauthsize = aead_setauthsize,
2589 .encrypt = aead_encrypt,
2590 .decrypt = aead_decrypt,
2591 .ivsize = DES3_EDE_BLOCK_SIZE,
2592 .maxauthsize = SHA1_DIGEST_SIZE,
2593 },
2594 .caam = {
2595 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2596 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2597 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002598 },
2599 },
2600 {
2601 .aead = {
2602 .base = {
2603 .cra_name = "echainiv(authenc(hmac(sha1),"
2604 "cbc(des3_ede)))",
2605 .cra_driver_name = "echainiv-authenc-"
2606 "hmac-sha1-"
2607 "cbc-des3_ede-caam",
2608 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2609 },
2610 .setkey = aead_setkey,
2611 .setauthsize = aead_setauthsize,
2612 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002613 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002614 .ivsize = DES3_EDE_BLOCK_SIZE,
2615 .maxauthsize = SHA1_DIGEST_SIZE,
2616 },
2617 .caam = {
2618 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2619 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2620 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002621 .geniv = true,
2622 },
2623 },
2624 {
2625 .aead = {
2626 .base = {
2627 .cra_name = "authenc(hmac(sha224),"
2628 "cbc(des3_ede))",
2629 .cra_driver_name = "authenc-hmac-sha224-"
2630 "cbc-des3_ede-caam",
2631 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2632 },
2633 .setkey = aead_setkey,
2634 .setauthsize = aead_setauthsize,
2635 .encrypt = aead_encrypt,
2636 .decrypt = aead_decrypt,
2637 .ivsize = DES3_EDE_BLOCK_SIZE,
2638 .maxauthsize = SHA224_DIGEST_SIZE,
2639 },
2640 .caam = {
2641 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2642 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2643 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002644 },
2645 },
2646 {
2647 .aead = {
2648 .base = {
2649 .cra_name = "echainiv(authenc(hmac(sha224),"
2650 "cbc(des3_ede)))",
2651 .cra_driver_name = "echainiv-authenc-"
2652 "hmac-sha224-"
2653 "cbc-des3_ede-caam",
2654 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2655 },
2656 .setkey = aead_setkey,
2657 .setauthsize = aead_setauthsize,
2658 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002659 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002660 .ivsize = DES3_EDE_BLOCK_SIZE,
2661 .maxauthsize = SHA224_DIGEST_SIZE,
2662 },
2663 .caam = {
2664 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2665 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2666 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002667 .geniv = true,
2668 },
2669 },
2670 {
2671 .aead = {
2672 .base = {
2673 .cra_name = "authenc(hmac(sha256),"
2674 "cbc(des3_ede))",
2675 .cra_driver_name = "authenc-hmac-sha256-"
2676 "cbc-des3_ede-caam",
2677 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2678 },
2679 .setkey = aead_setkey,
2680 .setauthsize = aead_setauthsize,
2681 .encrypt = aead_encrypt,
2682 .decrypt = aead_decrypt,
2683 .ivsize = DES3_EDE_BLOCK_SIZE,
2684 .maxauthsize = SHA256_DIGEST_SIZE,
2685 },
2686 .caam = {
2687 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2688 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2689 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002690 },
2691 },
2692 {
2693 .aead = {
2694 .base = {
2695 .cra_name = "echainiv(authenc(hmac(sha256),"
2696 "cbc(des3_ede)))",
2697 .cra_driver_name = "echainiv-authenc-"
2698 "hmac-sha256-"
2699 "cbc-des3_ede-caam",
2700 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2701 },
2702 .setkey = aead_setkey,
2703 .setauthsize = aead_setauthsize,
2704 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002705 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002706 .ivsize = DES3_EDE_BLOCK_SIZE,
2707 .maxauthsize = SHA256_DIGEST_SIZE,
2708 },
2709 .caam = {
2710 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2711 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2712 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002713 .geniv = true,
2714 },
2715 },
2716 {
2717 .aead = {
2718 .base = {
2719 .cra_name = "authenc(hmac(sha384),"
2720 "cbc(des3_ede))",
2721 .cra_driver_name = "authenc-hmac-sha384-"
2722 "cbc-des3_ede-caam",
2723 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2724 },
2725 .setkey = aead_setkey,
2726 .setauthsize = aead_setauthsize,
2727 .encrypt = aead_encrypt,
2728 .decrypt = aead_decrypt,
2729 .ivsize = DES3_EDE_BLOCK_SIZE,
2730 .maxauthsize = SHA384_DIGEST_SIZE,
2731 },
2732 .caam = {
2733 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2734 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2735 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002736 },
2737 },
2738 {
2739 .aead = {
2740 .base = {
2741 .cra_name = "echainiv(authenc(hmac(sha384),"
2742 "cbc(des3_ede)))",
2743 .cra_driver_name = "echainiv-authenc-"
2744 "hmac-sha384-"
2745 "cbc-des3_ede-caam",
2746 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2747 },
2748 .setkey = aead_setkey,
2749 .setauthsize = aead_setauthsize,
2750 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002751 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002752 .ivsize = DES3_EDE_BLOCK_SIZE,
2753 .maxauthsize = SHA384_DIGEST_SIZE,
2754 },
2755 .caam = {
2756 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2757 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2758 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002759 .geniv = true,
2760 },
2761 },
2762 {
2763 .aead = {
2764 .base = {
2765 .cra_name = "authenc(hmac(sha512),"
2766 "cbc(des3_ede))",
2767 .cra_driver_name = "authenc-hmac-sha512-"
2768 "cbc-des3_ede-caam",
2769 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2770 },
2771 .setkey = aead_setkey,
2772 .setauthsize = aead_setauthsize,
2773 .encrypt = aead_encrypt,
2774 .decrypt = aead_decrypt,
2775 .ivsize = DES3_EDE_BLOCK_SIZE,
2776 .maxauthsize = SHA512_DIGEST_SIZE,
2777 },
2778 .caam = {
2779 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2780 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2781 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002782 },
2783 },
2784 {
2785 .aead = {
2786 .base = {
2787 .cra_name = "echainiv(authenc(hmac(sha512),"
2788 "cbc(des3_ede)))",
2789 .cra_driver_name = "echainiv-authenc-"
2790 "hmac-sha512-"
2791 "cbc-des3_ede-caam",
2792 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2793 },
2794 .setkey = aead_setkey,
2795 .setauthsize = aead_setauthsize,
2796 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002797 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002798 .ivsize = DES3_EDE_BLOCK_SIZE,
2799 .maxauthsize = SHA512_DIGEST_SIZE,
2800 },
2801 .caam = {
2802 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2803 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2804 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002805 .geniv = true,
2806 },
2807 },
2808 {
2809 .aead = {
2810 .base = {
2811 .cra_name = "authenc(hmac(md5),cbc(des))",
2812 .cra_driver_name = "authenc-hmac-md5-"
2813 "cbc-des-caam",
2814 .cra_blocksize = DES_BLOCK_SIZE,
2815 },
2816 .setkey = aead_setkey,
2817 .setauthsize = aead_setauthsize,
2818 .encrypt = aead_encrypt,
2819 .decrypt = aead_decrypt,
2820 .ivsize = DES_BLOCK_SIZE,
2821 .maxauthsize = MD5_DIGEST_SIZE,
2822 },
2823 .caam = {
2824 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2825 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2826 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002827 },
2828 },
2829 {
2830 .aead = {
2831 .base = {
2832 .cra_name = "echainiv(authenc(hmac(md5),"
2833 "cbc(des)))",
2834 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2835 "cbc-des-caam",
2836 .cra_blocksize = DES_BLOCK_SIZE,
2837 },
2838 .setkey = aead_setkey,
2839 .setauthsize = aead_setauthsize,
2840 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002841 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002842 .ivsize = DES_BLOCK_SIZE,
2843 .maxauthsize = MD5_DIGEST_SIZE,
2844 },
2845 .caam = {
2846 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2847 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2848 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002849 .geniv = true,
2850 },
2851 },
2852 {
2853 .aead = {
2854 .base = {
2855 .cra_name = "authenc(hmac(sha1),cbc(des))",
2856 .cra_driver_name = "authenc-hmac-sha1-"
2857 "cbc-des-caam",
2858 .cra_blocksize = DES_BLOCK_SIZE,
2859 },
2860 .setkey = aead_setkey,
2861 .setauthsize = aead_setauthsize,
2862 .encrypt = aead_encrypt,
2863 .decrypt = aead_decrypt,
2864 .ivsize = DES_BLOCK_SIZE,
2865 .maxauthsize = SHA1_DIGEST_SIZE,
2866 },
2867 .caam = {
2868 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2869 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2870 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002871 },
2872 },
2873 {
2874 .aead = {
2875 .base = {
2876 .cra_name = "echainiv(authenc(hmac(sha1),"
2877 "cbc(des)))",
2878 .cra_driver_name = "echainiv-authenc-"
2879 "hmac-sha1-cbc-des-caam",
2880 .cra_blocksize = DES_BLOCK_SIZE,
2881 },
2882 .setkey = aead_setkey,
2883 .setauthsize = aead_setauthsize,
2884 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002885 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002886 .ivsize = DES_BLOCK_SIZE,
2887 .maxauthsize = SHA1_DIGEST_SIZE,
2888 },
2889 .caam = {
2890 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2891 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2892 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002893 .geniv = true,
2894 },
2895 },
2896 {
2897 .aead = {
2898 .base = {
2899 .cra_name = "authenc(hmac(sha224),cbc(des))",
2900 .cra_driver_name = "authenc-hmac-sha224-"
2901 "cbc-des-caam",
2902 .cra_blocksize = DES_BLOCK_SIZE,
2903 },
2904 .setkey = aead_setkey,
2905 .setauthsize = aead_setauthsize,
2906 .encrypt = aead_encrypt,
2907 .decrypt = aead_decrypt,
2908 .ivsize = DES_BLOCK_SIZE,
2909 .maxauthsize = SHA224_DIGEST_SIZE,
2910 },
2911 .caam = {
2912 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2913 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2914 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002915 },
2916 },
2917 {
2918 .aead = {
2919 .base = {
2920 .cra_name = "echainiv(authenc(hmac(sha224),"
2921 "cbc(des)))",
2922 .cra_driver_name = "echainiv-authenc-"
2923 "hmac-sha224-cbc-des-caam",
2924 .cra_blocksize = DES_BLOCK_SIZE,
2925 },
2926 .setkey = aead_setkey,
2927 .setauthsize = aead_setauthsize,
2928 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002929 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002930 .ivsize = DES_BLOCK_SIZE,
2931 .maxauthsize = SHA224_DIGEST_SIZE,
2932 },
2933 .caam = {
2934 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2935 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2936 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002937 .geniv = true,
2938 },
2939 },
2940 {
2941 .aead = {
2942 .base = {
2943 .cra_name = "authenc(hmac(sha256),cbc(des))",
2944 .cra_driver_name = "authenc-hmac-sha256-"
2945 "cbc-des-caam",
2946 .cra_blocksize = DES_BLOCK_SIZE,
2947 },
2948 .setkey = aead_setkey,
2949 .setauthsize = aead_setauthsize,
2950 .encrypt = aead_encrypt,
2951 .decrypt = aead_decrypt,
2952 .ivsize = DES_BLOCK_SIZE,
2953 .maxauthsize = SHA256_DIGEST_SIZE,
2954 },
2955 .caam = {
2956 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2957 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2958 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002959 },
2960 },
2961 {
2962 .aead = {
2963 .base = {
2964 .cra_name = "echainiv(authenc(hmac(sha256),"
2965 "cbc(des)))",
2966 .cra_driver_name = "echainiv-authenc-"
2967 "hmac-sha256-cbc-des-caam",
2968 .cra_blocksize = DES_BLOCK_SIZE,
2969 },
2970 .setkey = aead_setkey,
2971 .setauthsize = aead_setauthsize,
2972 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03002973 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08002974 .ivsize = DES_BLOCK_SIZE,
2975 .maxauthsize = SHA256_DIGEST_SIZE,
2976 },
2977 .caam = {
2978 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2979 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2980 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08002981 .geniv = true,
2982 },
2983 },
2984 {
2985 .aead = {
2986 .base = {
2987 .cra_name = "authenc(hmac(sha384),cbc(des))",
2988 .cra_driver_name = "authenc-hmac-sha384-"
2989 "cbc-des-caam",
2990 .cra_blocksize = DES_BLOCK_SIZE,
2991 },
2992 .setkey = aead_setkey,
2993 .setauthsize = aead_setauthsize,
2994 .encrypt = aead_encrypt,
2995 .decrypt = aead_decrypt,
2996 .ivsize = DES_BLOCK_SIZE,
2997 .maxauthsize = SHA384_DIGEST_SIZE,
2998 },
2999 .caam = {
3000 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3001 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3002 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003003 },
3004 },
3005 {
3006 .aead = {
3007 .base = {
3008 .cra_name = "echainiv(authenc(hmac(sha384),"
3009 "cbc(des)))",
3010 .cra_driver_name = "echainiv-authenc-"
3011 "hmac-sha384-cbc-des-caam",
3012 .cra_blocksize = DES_BLOCK_SIZE,
3013 },
3014 .setkey = aead_setkey,
3015 .setauthsize = aead_setauthsize,
3016 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03003017 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08003018 .ivsize = DES_BLOCK_SIZE,
3019 .maxauthsize = SHA384_DIGEST_SIZE,
3020 },
3021 .caam = {
3022 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3023 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3024 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003025 .geniv = true,
3026 },
3027 },
3028 {
3029 .aead = {
3030 .base = {
3031 .cra_name = "authenc(hmac(sha512),cbc(des))",
3032 .cra_driver_name = "authenc-hmac-sha512-"
3033 "cbc-des-caam",
3034 .cra_blocksize = DES_BLOCK_SIZE,
3035 },
3036 .setkey = aead_setkey,
3037 .setauthsize = aead_setauthsize,
3038 .encrypt = aead_encrypt,
3039 .decrypt = aead_decrypt,
3040 .ivsize = DES_BLOCK_SIZE,
3041 .maxauthsize = SHA512_DIGEST_SIZE,
3042 },
3043 .caam = {
3044 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3045 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3046 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003047 },
3048 },
3049 {
3050 .aead = {
3051 .base = {
3052 .cra_name = "echainiv(authenc(hmac(sha512),"
3053 "cbc(des)))",
3054 .cra_driver_name = "echainiv-authenc-"
3055 "hmac-sha512-cbc-des-caam",
3056 .cra_blocksize = DES_BLOCK_SIZE,
3057 },
3058 .setkey = aead_setkey,
3059 .setauthsize = aead_setauthsize,
3060 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03003061 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08003062 .ivsize = DES_BLOCK_SIZE,
3063 .maxauthsize = SHA512_DIGEST_SIZE,
3064 },
3065 .caam = {
3066 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3067 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3068 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003069 .geniv = true,
3070 },
3071 },
3072 {
3073 .aead = {
3074 .base = {
3075 .cra_name = "authenc(hmac(md5),"
3076 "rfc3686(ctr(aes)))",
3077 .cra_driver_name = "authenc-hmac-md5-"
3078 "rfc3686-ctr-aes-caam",
3079 .cra_blocksize = 1,
3080 },
3081 .setkey = aead_setkey,
3082 .setauthsize = aead_setauthsize,
3083 .encrypt = aead_encrypt,
3084 .decrypt = aead_decrypt,
3085 .ivsize = CTR_RFC3686_IV_SIZE,
3086 .maxauthsize = MD5_DIGEST_SIZE,
3087 },
3088 .caam = {
3089 .class1_alg_type = OP_ALG_ALGSEL_AES |
3090 OP_ALG_AAI_CTR_MOD128,
3091 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3092 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003093 .rfc3686 = true,
3094 },
3095 },
3096 {
3097 .aead = {
3098 .base = {
3099 .cra_name = "seqiv(authenc("
3100 "hmac(md5),rfc3686(ctr(aes))))",
3101 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3102 "rfc3686-ctr-aes-caam",
3103 .cra_blocksize = 1,
3104 },
3105 .setkey = aead_setkey,
3106 .setauthsize = aead_setauthsize,
3107 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03003108 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08003109 .ivsize = CTR_RFC3686_IV_SIZE,
3110 .maxauthsize = MD5_DIGEST_SIZE,
3111 },
3112 .caam = {
3113 .class1_alg_type = OP_ALG_ALGSEL_AES |
3114 OP_ALG_AAI_CTR_MOD128,
3115 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3116 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003117 .rfc3686 = true,
3118 .geniv = true,
3119 },
3120 },
3121 {
3122 .aead = {
3123 .base = {
3124 .cra_name = "authenc(hmac(sha1),"
3125 "rfc3686(ctr(aes)))",
3126 .cra_driver_name = "authenc-hmac-sha1-"
3127 "rfc3686-ctr-aes-caam",
3128 .cra_blocksize = 1,
3129 },
3130 .setkey = aead_setkey,
3131 .setauthsize = aead_setauthsize,
3132 .encrypt = aead_encrypt,
3133 .decrypt = aead_decrypt,
3134 .ivsize = CTR_RFC3686_IV_SIZE,
3135 .maxauthsize = SHA1_DIGEST_SIZE,
3136 },
3137 .caam = {
3138 .class1_alg_type = OP_ALG_ALGSEL_AES |
3139 OP_ALG_AAI_CTR_MOD128,
3140 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3141 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003142 .rfc3686 = true,
3143 },
3144 },
3145 {
3146 .aead = {
3147 .base = {
3148 .cra_name = "seqiv(authenc("
3149 "hmac(sha1),rfc3686(ctr(aes))))",
3150 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3151 "rfc3686-ctr-aes-caam",
3152 .cra_blocksize = 1,
3153 },
3154 .setkey = aead_setkey,
3155 .setauthsize = aead_setauthsize,
3156 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03003157 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08003158 .ivsize = CTR_RFC3686_IV_SIZE,
3159 .maxauthsize = SHA1_DIGEST_SIZE,
3160 },
3161 .caam = {
3162 .class1_alg_type = OP_ALG_ALGSEL_AES |
3163 OP_ALG_AAI_CTR_MOD128,
3164 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3165 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003166 .rfc3686 = true,
3167 .geniv = true,
3168 },
3169 },
3170 {
3171 .aead = {
3172 .base = {
3173 .cra_name = "authenc(hmac(sha224),"
3174 "rfc3686(ctr(aes)))",
3175 .cra_driver_name = "authenc-hmac-sha224-"
3176 "rfc3686-ctr-aes-caam",
3177 .cra_blocksize = 1,
3178 },
3179 .setkey = aead_setkey,
3180 .setauthsize = aead_setauthsize,
3181 .encrypt = aead_encrypt,
3182 .decrypt = aead_decrypt,
3183 .ivsize = CTR_RFC3686_IV_SIZE,
3184 .maxauthsize = SHA224_DIGEST_SIZE,
3185 },
3186 .caam = {
3187 .class1_alg_type = OP_ALG_ALGSEL_AES |
3188 OP_ALG_AAI_CTR_MOD128,
3189 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3190 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003191 .rfc3686 = true,
3192 },
3193 },
3194 {
3195 .aead = {
3196 .base = {
3197 .cra_name = "seqiv(authenc("
3198 "hmac(sha224),rfc3686(ctr(aes))))",
3199 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3200 "rfc3686-ctr-aes-caam",
3201 .cra_blocksize = 1,
3202 },
3203 .setkey = aead_setkey,
3204 .setauthsize = aead_setauthsize,
3205 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03003206 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08003207 .ivsize = CTR_RFC3686_IV_SIZE,
3208 .maxauthsize = SHA224_DIGEST_SIZE,
3209 },
3210 .caam = {
3211 .class1_alg_type = OP_ALG_ALGSEL_AES |
3212 OP_ALG_AAI_CTR_MOD128,
3213 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3214 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003215 .rfc3686 = true,
3216 .geniv = true,
3217 },
3218 },
3219 {
3220 .aead = {
3221 .base = {
3222 .cra_name = "authenc(hmac(sha256),"
3223 "rfc3686(ctr(aes)))",
3224 .cra_driver_name = "authenc-hmac-sha256-"
3225 "rfc3686-ctr-aes-caam",
3226 .cra_blocksize = 1,
3227 },
3228 .setkey = aead_setkey,
3229 .setauthsize = aead_setauthsize,
3230 .encrypt = aead_encrypt,
3231 .decrypt = aead_decrypt,
3232 .ivsize = CTR_RFC3686_IV_SIZE,
3233 .maxauthsize = SHA256_DIGEST_SIZE,
3234 },
3235 .caam = {
3236 .class1_alg_type = OP_ALG_ALGSEL_AES |
3237 OP_ALG_AAI_CTR_MOD128,
3238 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3239 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003240 .rfc3686 = true,
3241 },
3242 },
3243 {
3244 .aead = {
3245 .base = {
3246 .cra_name = "seqiv(authenc(hmac(sha256),"
3247 "rfc3686(ctr(aes))))",
3248 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3249 "rfc3686-ctr-aes-caam",
3250 .cra_blocksize = 1,
3251 },
3252 .setkey = aead_setkey,
3253 .setauthsize = aead_setauthsize,
3254 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03003255 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08003256 .ivsize = CTR_RFC3686_IV_SIZE,
3257 .maxauthsize = SHA256_DIGEST_SIZE,
3258 },
3259 .caam = {
3260 .class1_alg_type = OP_ALG_ALGSEL_AES |
3261 OP_ALG_AAI_CTR_MOD128,
3262 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3263 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003264 .rfc3686 = true,
3265 .geniv = true,
3266 },
3267 },
3268 {
3269 .aead = {
3270 .base = {
3271 .cra_name = "authenc(hmac(sha384),"
3272 "rfc3686(ctr(aes)))",
3273 .cra_driver_name = "authenc-hmac-sha384-"
3274 "rfc3686-ctr-aes-caam",
3275 .cra_blocksize = 1,
3276 },
3277 .setkey = aead_setkey,
3278 .setauthsize = aead_setauthsize,
3279 .encrypt = aead_encrypt,
3280 .decrypt = aead_decrypt,
3281 .ivsize = CTR_RFC3686_IV_SIZE,
3282 .maxauthsize = SHA384_DIGEST_SIZE,
3283 },
3284 .caam = {
3285 .class1_alg_type = OP_ALG_ALGSEL_AES |
3286 OP_ALG_AAI_CTR_MOD128,
3287 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3288 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003289 .rfc3686 = true,
3290 },
3291 },
3292 {
3293 .aead = {
3294 .base = {
3295 .cra_name = "seqiv(authenc(hmac(sha384),"
3296 "rfc3686(ctr(aes))))",
3297 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3298 "rfc3686-ctr-aes-caam",
3299 .cra_blocksize = 1,
3300 },
3301 .setkey = aead_setkey,
3302 .setauthsize = aead_setauthsize,
3303 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03003304 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08003305 .ivsize = CTR_RFC3686_IV_SIZE,
3306 .maxauthsize = SHA384_DIGEST_SIZE,
3307 },
3308 .caam = {
3309 .class1_alg_type = OP_ALG_ALGSEL_AES |
3310 OP_ALG_AAI_CTR_MOD128,
3311 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3312 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003313 .rfc3686 = true,
3314 .geniv = true,
3315 },
3316 },
3317 {
3318 .aead = {
3319 .base = {
3320 .cra_name = "authenc(hmac(sha512),"
3321 "rfc3686(ctr(aes)))",
3322 .cra_driver_name = "authenc-hmac-sha512-"
3323 "rfc3686-ctr-aes-caam",
3324 .cra_blocksize = 1,
3325 },
3326 .setkey = aead_setkey,
3327 .setauthsize = aead_setauthsize,
3328 .encrypt = aead_encrypt,
3329 .decrypt = aead_decrypt,
3330 .ivsize = CTR_RFC3686_IV_SIZE,
3331 .maxauthsize = SHA512_DIGEST_SIZE,
3332 },
3333 .caam = {
3334 .class1_alg_type = OP_ALG_ALGSEL_AES |
3335 OP_ALG_AAI_CTR_MOD128,
3336 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3337 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003338 .rfc3686 = true,
3339 },
3340 },
3341 {
3342 .aead = {
3343 .base = {
3344 .cra_name = "seqiv(authenc(hmac(sha512),"
3345 "rfc3686(ctr(aes))))",
3346 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3347 "rfc3686-ctr-aes-caam",
3348 .cra_blocksize = 1,
3349 },
3350 .setkey = aead_setkey,
3351 .setauthsize = aead_setauthsize,
3352 .encrypt = aead_encrypt,
Horia Geantă8b18e232016-08-29 14:52:14 +03003353 .decrypt = aead_decrypt,
Herbert Xu479bcc72015-07-30 17:53:17 +08003354 .ivsize = CTR_RFC3686_IV_SIZE,
3355 .maxauthsize = SHA512_DIGEST_SIZE,
3356 },
3357 .caam = {
3358 .class1_alg_type = OP_ALG_ALGSEL_AES |
3359 OP_ALG_AAI_CTR_MOD128,
3360 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3361 OP_ALG_AAI_HMAC_PRECOMP,
Herbert Xu479bcc72015-07-30 17:53:17 +08003362 .rfc3686 = true,
3363 .geniv = true,
3364 },
3365 },
Herbert Xuf2147b82015-06-16 13:54:23 +08003366};
3367
3368struct caam_crypto_alg {
3369 struct crypto_alg crypto_alg;
3370 struct list_head entry;
3371 struct caam_alg_entry caam;
3372};
3373
3374static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
3375{
3376 ctx->jrdev = caam_jr_alloc();
3377 if (IS_ERR(ctx->jrdev)) {
3378 pr_err("Job Ring Device allocation for transform failed\n");
3379 return PTR_ERR(ctx->jrdev);
3380 }
3381
3382 /* copy descriptor header template value */
Horia Geantădb576562016-11-22 15:44:04 +02003383 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3384 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
Herbert Xuf2147b82015-06-16 13:54:23 +08003385
3386 return 0;
3387}
3388
Kim Phillips8e8ec592011-03-13 16:54:26 +08003389static int caam_cra_init(struct crypto_tfm *tfm)
3390{
3391 struct crypto_alg *alg = tfm->__crt_alg;
3392 struct caam_crypto_alg *caam_alg =
3393 container_of(alg, struct caam_crypto_alg, crypto_alg);
3394 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
Kim Phillips8e8ec592011-03-13 16:54:26 +08003395
Herbert Xuf2147b82015-06-16 13:54:23 +08003396 return caam_init_common(ctx, &caam_alg->caam);
Kim Phillips8e8ec592011-03-13 16:54:26 +08003397}
3398
Herbert Xuf2147b82015-06-16 13:54:23 +08003399static int caam_aead_init(struct crypto_aead *tfm)
Kim Phillips8e8ec592011-03-13 16:54:26 +08003400{
Herbert Xuf2147b82015-06-16 13:54:23 +08003401 struct aead_alg *alg = crypto_aead_alg(tfm);
3402 struct caam_aead_alg *caam_alg =
3403 container_of(alg, struct caam_aead_alg, aead);
3404 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
Kim Phillips8e8ec592011-03-13 16:54:26 +08003405
Herbert Xuf2147b82015-06-16 13:54:23 +08003406 return caam_init_common(ctx, &caam_alg->caam);
3407}
3408
3409static void caam_exit_common(struct caam_ctx *ctx)
3410{
Yuan Kang1acebad2011-07-15 11:21:42 +08003411 if (ctx->sh_desc_enc_dma &&
3412 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma))
3413 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma,
3414 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
3415 if (ctx->sh_desc_dec_dma &&
3416 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma))
3417 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma,
3418 desc_bytes(ctx->sh_desc_dec), DMA_TO_DEVICE);
3419 if (ctx->sh_desc_givenc_dma &&
3420 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma))
3421 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma,
3422 desc_bytes(ctx->sh_desc_givenc),
Kim Phillips4427b1b2011-05-14 22:08:17 -05003423 DMA_TO_DEVICE);
Horia Geantaec31eed2014-03-14 17:48:30 +02003424 if (ctx->key_dma &&
3425 !dma_mapping_error(ctx->jrdev, ctx->key_dma))
3426 dma_unmap_single(ctx->jrdev, ctx->key_dma,
Horia Geantădb576562016-11-22 15:44:04 +02003427 ctx->cdata.keylen + ctx->adata.keylen_pad,
Horia Geantaec31eed2014-03-14 17:48:30 +02003428 DMA_TO_DEVICE);
Ruchika Guptacfc6f112013-10-25 12:01:03 +05303429
3430 caam_jr_free(ctx->jrdev);
Kim Phillips8e8ec592011-03-13 16:54:26 +08003431}
3432
Herbert Xuf2147b82015-06-16 13:54:23 +08003433static void caam_cra_exit(struct crypto_tfm *tfm)
3434{
3435 caam_exit_common(crypto_tfm_ctx(tfm));
3436}
3437
3438static void caam_aead_exit(struct crypto_aead *tfm)
3439{
3440 caam_exit_common(crypto_aead_ctx(tfm));
3441}
3442
Kim Phillips8e8ec592011-03-13 16:54:26 +08003443static void __exit caam_algapi_exit(void)
3444{
3445
Kim Phillips8e8ec592011-03-13 16:54:26 +08003446 struct caam_crypto_alg *t_alg, *n;
Herbert Xuf2147b82015-06-16 13:54:23 +08003447 int i;
3448
3449 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3450 struct caam_aead_alg *t_alg = driver_aeads + i;
3451
3452 if (t_alg->registered)
3453 crypto_unregister_aead(&t_alg->aead);
3454 }
Kim Phillips8e8ec592011-03-13 16:54:26 +08003455
Ruchika Guptacfc6f112013-10-25 12:01:03 +05303456 if (!alg_list.next)
Kim Phillips8e8ec592011-03-13 16:54:26 +08003457 return;
3458
Ruchika Guptacfc6f112013-10-25 12:01:03 +05303459 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
Kim Phillips8e8ec592011-03-13 16:54:26 +08003460 crypto_unregister_alg(&t_alg->crypto_alg);
3461 list_del(&t_alg->entry);
3462 kfree(t_alg);
3463 }
Kim Phillips8e8ec592011-03-13 16:54:26 +08003464}
3465
Ruchika Guptacfc6f112013-10-25 12:01:03 +05303466static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
Kim Phillips8e8ec592011-03-13 16:54:26 +08003467 *template)
3468{
3469 struct caam_crypto_alg *t_alg;
3470 struct crypto_alg *alg;
3471
Fabio Estevam9c4f9732015-08-21 13:52:00 -03003472 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
Kim Phillips8e8ec592011-03-13 16:54:26 +08003473 if (!t_alg) {
Ruchika Guptacfc6f112013-10-25 12:01:03 +05303474 pr_err("failed to allocate t_alg\n");
Kim Phillips8e8ec592011-03-13 16:54:26 +08003475 return ERR_PTR(-ENOMEM);
3476 }
3477
3478 alg = &t_alg->crypto_alg;
3479
3480 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
3481 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
3482 template->driver_name);
3483 alg->cra_module = THIS_MODULE;
3484 alg->cra_init = caam_cra_init;
3485 alg->cra_exit = caam_cra_exit;
3486 alg->cra_priority = CAAM_CRA_PRIORITY;
Kim Phillips8e8ec592011-03-13 16:54:26 +08003487 alg->cra_blocksize = template->blocksize;
3488 alg->cra_alignmask = 0;
Kim Phillips8e8ec592011-03-13 16:54:26 +08003489 alg->cra_ctxsize = sizeof(struct caam_ctx);
Nikos Mavrogiannopoulosd912bb72011-11-01 13:39:56 +01003490 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
3491 template->type;
Yuan Kang885e9e22011-07-15 11:21:41 +08003492 switch (template->type) {
Catalin Vasile7222d1a2014-10-31 12:45:38 +02003493 case CRYPTO_ALG_TYPE_GIVCIPHER:
3494 alg->cra_type = &crypto_givcipher_type;
3495 alg->cra_ablkcipher = template->template_ablkcipher;
3496 break;
Yuan Kangacdca312011-07-15 11:21:42 +08003497 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3498 alg->cra_type = &crypto_ablkcipher_type;
3499 alg->cra_ablkcipher = template->template_ablkcipher;
3500 break;
Yuan Kang885e9e22011-07-15 11:21:41 +08003501 }
Kim Phillips8e8ec592011-03-13 16:54:26 +08003502
Herbert Xuf2147b82015-06-16 13:54:23 +08003503 t_alg->caam.class1_alg_type = template->class1_alg_type;
3504 t_alg->caam.class2_alg_type = template->class2_alg_type;
Kim Phillips8e8ec592011-03-13 16:54:26 +08003505
3506 return t_alg;
3507}
3508
Herbert Xuf2147b82015-06-16 13:54:23 +08003509static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3510{
3511 struct aead_alg *alg = &t_alg->aead;
3512
3513 alg->base.cra_module = THIS_MODULE;
3514 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3515 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
Herbert Xu5e4b8c12015-08-13 17:29:06 +08003516 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
Herbert Xuf2147b82015-06-16 13:54:23 +08003517
3518 alg->init = caam_aead_init;
3519 alg->exit = caam_aead_exit;
3520}
3521
Kim Phillips8e8ec592011-03-13 16:54:26 +08003522static int __init caam_algapi_init(void)
3523{
Ruchika Gupta35af6402014-07-07 10:42:12 +05303524 struct device_node *dev_node;
3525 struct platform_device *pdev;
3526 struct device *ctrldev;
Victoria Milhoanbf834902015-08-05 11:28:48 -07003527 struct caam_drv_private *priv;
Kim Phillips8e8ec592011-03-13 16:54:26 +08003528 int i = 0, err = 0;
Victoria Milhoanbf834902015-08-05 11:28:48 -07003529 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3530 unsigned int md_limit = SHA512_DIGEST_SIZE;
Herbert Xuf2147b82015-06-16 13:54:23 +08003531 bool registered = false;
Kim Phillips8e8ec592011-03-13 16:54:26 +08003532
Ruchika Gupta35af6402014-07-07 10:42:12 +05303533 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3534 if (!dev_node) {
3535 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3536 if (!dev_node)
3537 return -ENODEV;
3538 }
3539
3540 pdev = of_find_device_by_node(dev_node);
3541 if (!pdev) {
3542 of_node_put(dev_node);
3543 return -ENODEV;
3544 }
3545
3546 ctrldev = &pdev->dev;
3547 priv = dev_get_drvdata(ctrldev);
3548 of_node_put(dev_node);
3549
3550 /*
3551 * If priv is NULL, it's probably because the caam driver wasn't
3552 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3553 */
3554 if (!priv)
3555 return -ENODEV;
3556
3557
Ruchika Guptacfc6f112013-10-25 12:01:03 +05303558 INIT_LIST_HEAD(&alg_list);
Kim Phillips8e8ec592011-03-13 16:54:26 +08003559
Victoria Milhoanbf834902015-08-05 11:28:48 -07003560 /*
3561 * Register crypto algorithms the device supports.
3562 * First, detect presence and attributes of DES, AES, and MD blocks.
3563 */
3564 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3565 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3566 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3567 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3568 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
Kim Phillips8e8ec592011-03-13 16:54:26 +08003569
Victoria Milhoanbf834902015-08-05 11:28:48 -07003570 /* If MD is present, limit digest size based on LP256 */
3571 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3572 md_limit = SHA256_DIGEST_SIZE;
3573
3574 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3575 struct caam_crypto_alg *t_alg;
3576 struct caam_alg_template *alg = driver_algs + i;
3577 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
3578
3579 /* Skip DES algorithms if not supported by device */
3580 if (!des_inst &&
3581 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3582 (alg_sel == OP_ALG_ALGSEL_DES)))
3583 continue;
3584
3585 /* Skip AES algorithms if not supported by device */
3586 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3587 continue;
3588
3589 t_alg = caam_alg_alloc(alg);
Kim Phillips8e8ec592011-03-13 16:54:26 +08003590 if (IS_ERR(t_alg)) {
3591 err = PTR_ERR(t_alg);
Victoria Milhoanbf834902015-08-05 11:28:48 -07003592 pr_warn("%s alg allocation failed\n", alg->driver_name);
Kim Phillips8e8ec592011-03-13 16:54:26 +08003593 continue;
3594 }
3595
3596 err = crypto_register_alg(&t_alg->crypto_alg);
3597 if (err) {
Ruchika Guptacfc6f112013-10-25 12:01:03 +05303598 pr_warn("%s alg registration failed\n",
Kim Phillips8e8ec592011-03-13 16:54:26 +08003599 t_alg->crypto_alg.cra_driver_name);
3600 kfree(t_alg);
Herbert Xuf2147b82015-06-16 13:54:23 +08003601 continue;
3602 }
3603
3604 list_add_tail(&t_alg->entry, &alg_list);
3605 registered = true;
Kim Phillips8e8ec592011-03-13 16:54:26 +08003606 }
Herbert Xuf2147b82015-06-16 13:54:23 +08003607
3608 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3609 struct caam_aead_alg *t_alg = driver_aeads + i;
Victoria Milhoanbf834902015-08-05 11:28:48 -07003610 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3611 OP_ALG_ALGSEL_MASK;
3612 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3613 OP_ALG_ALGSEL_MASK;
3614 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3615
3616 /* Skip DES algorithms if not supported by device */
3617 if (!des_inst &&
3618 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3619 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3620 continue;
3621
3622 /* Skip AES algorithms if not supported by device */
3623 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3624 continue;
3625
3626 /*
3627 * Check support for AES algorithms not available
3628 * on LP devices.
3629 */
3630 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3631 if (alg_aai == OP_ALG_AAI_GCM)
3632 continue;
3633
3634 /*
3635 * Skip algorithms requiring message digests
3636 * if MD or MD size is not supported by device.
3637 */
3638 if (c2_alg_sel &&
3639 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3640 continue;
Herbert Xuf2147b82015-06-16 13:54:23 +08003641
3642 caam_aead_alg_init(t_alg);
3643
3644 err = crypto_register_aead(&t_alg->aead);
3645 if (err) {
3646 pr_warn("%s alg registration failed\n",
3647 t_alg->aead.base.cra_driver_name);
3648 continue;
3649 }
3650
3651 t_alg->registered = true;
3652 registered = true;
3653 }
3654
3655 if (registered)
Ruchika Guptacfc6f112013-10-25 12:01:03 +05303656 pr_info("caam algorithms registered in /proc/crypto\n");
Kim Phillips8e8ec592011-03-13 16:54:26 +08003657
3658 return err;
3659}
3660
3661module_init(caam_algapi_init);
3662module_exit(caam_algapi_exit);
3663
3664MODULE_LICENSE("GPL");
3665MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3666MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");