blob: d679c8675f4ad1994b8e3e71b47fe318c611babe [file] [log] [blame]
Jussi Kivilinna937c30d2011-11-09 16:26:25 +02001/*
2 * Glue Code for SSE2 assembler versions of Serpent Cipher
3 *
4 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5 *
6 * Glue code based on aesni-intel_glue.c by:
7 * Copyright (C) 2008, Intel Corp.
8 * Author: Huang Ying <ying.huang@intel.com>
9 *
10 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
11 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
12 * CTR part based on code (crypto/ctr.c) by:
13 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
14 *
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
19 *
20 * This program is distributed in the hope that it will be useful,
21 * but WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23 * GNU General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, write to the Free Software
27 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
28 * USA
29 *
30 */
31
32#include <linux/module.h>
33#include <linux/hardirq.h>
34#include <linux/types.h>
35#include <linux/crypto.h>
36#include <linux/err.h>
37#include <crypto/algapi.h>
38#include <crypto/serpent.h>
39#include <crypto/cryptd.h>
40#include <crypto/b128ops.h>
41#include <crypto/ctr.h>
Jussi Kivilinna18482052011-11-09 16:26:36 +020042#include <crypto/lrw.h>
Jussi Kivilinna5962f8b2011-11-09 16:26:41 +020043#include <crypto/xts.h>
Jussi Kivilinnad4af0e92012-06-18 14:07:45 +030044#include <asm/crypto/serpent-sse2.h>
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +030045#include <asm/crypto/ablk_helper.h>
Jussi Kivilinna596d8752012-06-18 14:07:19 +030046#include <asm/crypto/glue_helper.h>
Jussi Kivilinna937c30d2011-11-09 16:26:25 +020047
Jussi Kivilinnae81792f2012-06-18 14:07:14 +030048static void serpent_decrypt_cbc_xway(void *ctx, u128 *dst, const u128 *src)
49{
50 u128 ivs[SERPENT_PARALLEL_BLOCKS - 1];
51 unsigned int j;
52
53 for (j = 0; j < SERPENT_PARALLEL_BLOCKS - 1; j++)
54 ivs[j] = src[j];
55
56 serpent_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src);
57
58 for (j = 0; j < SERPENT_PARALLEL_BLOCKS - 1; j++)
59 u128_xor(dst + (j + 1), dst + (j + 1), ivs + j);
60}
61
62static void serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, u128 *iv)
63{
64 be128 ctrblk;
65
66 u128_to_be128(&ctrblk, iv);
67 u128_inc(iv);
68
69 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
70 u128_xor(dst, src, (u128 *)&ctrblk);
71}
72
73static void serpent_crypt_ctr_xway(void *ctx, u128 *dst, const u128 *src,
74 u128 *iv)
75{
76 be128 ctrblks[SERPENT_PARALLEL_BLOCKS];
77 unsigned int i;
78
79 for (i = 0; i < SERPENT_PARALLEL_BLOCKS; i++) {
80 if (dst != src)
81 dst[i] = src[i];
82
83 u128_to_be128(&ctrblks[i], iv);
84 u128_inc(iv);
85 }
86
87 serpent_enc_blk_xway_xor(ctx, (u8 *)dst, (u8 *)ctrblks);
88}
89
90static const struct common_glue_ctx serpent_enc = {
91 .num_funcs = 2,
92 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
93
94 .funcs = { {
95 .num_blocks = SERPENT_PARALLEL_BLOCKS,
96 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_enc_blk_xway) }
97 }, {
98 .num_blocks = 1,
99 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
100 } }
101};
102
103static const struct common_glue_ctx serpent_ctr = {
104 .num_funcs = 2,
105 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
106
107 .funcs = { {
108 .num_blocks = SERPENT_PARALLEL_BLOCKS,
109 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_crypt_ctr_xway) }
110 }, {
111 .num_blocks = 1,
112 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_crypt_ctr) }
113 } }
114};
115
116static const struct common_glue_ctx serpent_dec = {
117 .num_funcs = 2,
118 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
119
120 .funcs = { {
121 .num_blocks = SERPENT_PARALLEL_BLOCKS,
122 .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_dec_blk_xway) }
123 }, {
124 .num_blocks = 1,
125 .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
126 } }
127};
128
129static const struct common_glue_ctx serpent_dec_cbc = {
130 .num_funcs = 2,
131 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
132
133 .funcs = { {
134 .num_blocks = SERPENT_PARALLEL_BLOCKS,
135 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_decrypt_cbc_xway) }
136 }, {
137 .num_blocks = 1,
138 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
139 } }
140};
141
142static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
143 struct scatterlist *src, unsigned int nbytes)
144{
145 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
146}
147
148static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
149 struct scatterlist *src, unsigned int nbytes)
150{
151 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
152}
153
154static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
155 struct scatterlist *src, unsigned int nbytes)
156{
157 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
158 dst, src, nbytes);
159}
160
161static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
162 struct scatterlist *src, unsigned int nbytes)
163{
164 return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
165 nbytes);
166}
167
168static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
169 struct scatterlist *src, unsigned int nbytes)
170{
171 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
172}
173
174static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
175{
176 return glue_fpu_begin(SERPENT_BLOCK_SIZE, SERPENT_PARALLEL_BLOCKS,
177 NULL, fpu_enabled, nbytes);
178}
179
180static inline void serpent_fpu_end(bool fpu_enabled)
181{
182 glue_fpu_end(fpu_enabled);
183}
184
Jussi Kivilinna18482052011-11-09 16:26:36 +0200185struct crypt_priv {
186 struct serpent_ctx *ctx;
187 bool fpu_enabled;
188};
189
190static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
191{
192 const unsigned int bsize = SERPENT_BLOCK_SIZE;
193 struct crypt_priv *ctx = priv;
194 int i;
195
196 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
197
198 if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
199 serpent_enc_blk_xway(ctx->ctx, srcdst, srcdst);
200 return;
201 }
202
203 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
204 __serpent_encrypt(ctx->ctx, srcdst, srcdst);
205}
206
207static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
208{
209 const unsigned int bsize = SERPENT_BLOCK_SIZE;
210 struct crypt_priv *ctx = priv;
211 int i;
212
213 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
214
215 if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
216 serpent_dec_blk_xway(ctx->ctx, srcdst, srcdst);
217 return;
218 }
219
220 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
221 __serpent_decrypt(ctx->ctx, srcdst, srcdst);
222}
223
224struct serpent_lrw_ctx {
225 struct lrw_table_ctx lrw_table;
226 struct serpent_ctx serpent_ctx;
227};
228
229static int lrw_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
230 unsigned int keylen)
231{
232 struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
233 int err;
234
235 err = __serpent_setkey(&ctx->serpent_ctx, key, keylen -
236 SERPENT_BLOCK_SIZE);
237 if (err)
238 return err;
239
240 return lrw_init_table(&ctx->lrw_table, key + keylen -
241 SERPENT_BLOCK_SIZE);
242}
243
244static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
245 struct scatterlist *src, unsigned int nbytes)
246{
247 struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
248 be128 buf[SERPENT_PARALLEL_BLOCKS];
249 struct crypt_priv crypt_ctx = {
250 .ctx = &ctx->serpent_ctx,
251 .fpu_enabled = false,
252 };
253 struct lrw_crypt_req req = {
254 .tbuf = buf,
255 .tbuflen = sizeof(buf),
256
257 .table_ctx = &ctx->lrw_table,
258 .crypt_ctx = &crypt_ctx,
259 .crypt_fn = encrypt_callback,
260 };
261 int ret;
262
Jussi Kivilinnad3564332011-11-09 19:44:12 +0200263 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
Jussi Kivilinna18482052011-11-09 16:26:36 +0200264 ret = lrw_crypt(desc, dst, src, nbytes, &req);
265 serpent_fpu_end(crypt_ctx.fpu_enabled);
266
267 return ret;
268}
269
270static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
271 struct scatterlist *src, unsigned int nbytes)
272{
273 struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
274 be128 buf[SERPENT_PARALLEL_BLOCKS];
275 struct crypt_priv crypt_ctx = {
276 .ctx = &ctx->serpent_ctx,
277 .fpu_enabled = false,
278 };
279 struct lrw_crypt_req req = {
280 .tbuf = buf,
281 .tbuflen = sizeof(buf),
282
283 .table_ctx = &ctx->lrw_table,
284 .crypt_ctx = &crypt_ctx,
285 .crypt_fn = decrypt_callback,
286 };
287 int ret;
288
Jussi Kivilinnad3564332011-11-09 19:44:12 +0200289 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
Jussi Kivilinna18482052011-11-09 16:26:36 +0200290 ret = lrw_crypt(desc, dst, src, nbytes, &req);
291 serpent_fpu_end(crypt_ctx.fpu_enabled);
292
293 return ret;
294}
295
296static void lrw_exit_tfm(struct crypto_tfm *tfm)
297{
298 struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
299
300 lrw_free_table(&ctx->lrw_table);
301}
302
Jussi Kivilinna5962f8b2011-11-09 16:26:41 +0200303struct serpent_xts_ctx {
304 struct serpent_ctx tweak_ctx;
305 struct serpent_ctx crypt_ctx;
306};
307
308static int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
309 unsigned int keylen)
310{
311 struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm);
312 u32 *flags = &tfm->crt_flags;
313 int err;
314
315 /* key consists of keys of equal size concatenated, therefore
316 * the length must be even
317 */
318 if (keylen % 2) {
319 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
320 return -EINVAL;
321 }
322
323 /* first half of xts-key is for crypt */
324 err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
325 if (err)
326 return err;
327
328 /* second half of xts-key is for tweak */
329 return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
330}
331
332static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
333 struct scatterlist *src, unsigned int nbytes)
334{
335 struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
336 be128 buf[SERPENT_PARALLEL_BLOCKS];
337 struct crypt_priv crypt_ctx = {
338 .ctx = &ctx->crypt_ctx,
339 .fpu_enabled = false,
340 };
341 struct xts_crypt_req req = {
342 .tbuf = buf,
343 .tbuflen = sizeof(buf),
344
345 .tweak_ctx = &ctx->tweak_ctx,
346 .tweak_fn = XTS_TWEAK_CAST(__serpent_encrypt),
347 .crypt_ctx = &crypt_ctx,
348 .crypt_fn = encrypt_callback,
349 };
350 int ret;
351
Jussi Kivilinnad3564332011-11-09 19:44:12 +0200352 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
Jussi Kivilinna5962f8b2011-11-09 16:26:41 +0200353 ret = xts_crypt(desc, dst, src, nbytes, &req);
354 serpent_fpu_end(crypt_ctx.fpu_enabled);
355
356 return ret;
357}
358
359static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
360 struct scatterlist *src, unsigned int nbytes)
361{
362 struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
363 be128 buf[SERPENT_PARALLEL_BLOCKS];
364 struct crypt_priv crypt_ctx = {
365 .ctx = &ctx->crypt_ctx,
366 .fpu_enabled = false,
367 };
368 struct xts_crypt_req req = {
369 .tbuf = buf,
370 .tbuflen = sizeof(buf),
371
372 .tweak_ctx = &ctx->tweak_ctx,
373 .tweak_fn = XTS_TWEAK_CAST(__serpent_encrypt),
374 .crypt_ctx = &crypt_ctx,
375 .crypt_fn = decrypt_callback,
376 };
377 int ret;
378
Jussi Kivilinnad3564332011-11-09 19:44:12 +0200379 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
Jussi Kivilinna5962f8b2011-11-09 16:26:41 +0200380 ret = xts_crypt(desc, dst, src, nbytes, &req);
381 serpent_fpu_end(crypt_ctx.fpu_enabled);
382
383 return ret;
384}
385
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200386static struct crypto_alg serpent_algs[10] = { {
387 .cra_name = "__ecb-serpent-sse2",
388 .cra_driver_name = "__driver-ecb-serpent-sse2",
389 .cra_priority = 0,
390 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
391 .cra_blocksize = SERPENT_BLOCK_SIZE,
392 .cra_ctxsize = sizeof(struct serpent_ctx),
393 .cra_alignmask = 0,
394 .cra_type = &crypto_blkcipher_type,
395 .cra_module = THIS_MODULE,
396 .cra_list = LIST_HEAD_INIT(serpent_algs[0].cra_list),
397 .cra_u = {
398 .blkcipher = {
399 .min_keysize = SERPENT_MIN_KEY_SIZE,
400 .max_keysize = SERPENT_MAX_KEY_SIZE,
401 .setkey = serpent_setkey,
402 .encrypt = ecb_encrypt,
403 .decrypt = ecb_decrypt,
404 },
405 },
406}, {
407 .cra_name = "__cbc-serpent-sse2",
408 .cra_driver_name = "__driver-cbc-serpent-sse2",
409 .cra_priority = 0,
410 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
411 .cra_blocksize = SERPENT_BLOCK_SIZE,
412 .cra_ctxsize = sizeof(struct serpent_ctx),
413 .cra_alignmask = 0,
414 .cra_type = &crypto_blkcipher_type,
415 .cra_module = THIS_MODULE,
416 .cra_list = LIST_HEAD_INIT(serpent_algs[1].cra_list),
417 .cra_u = {
418 .blkcipher = {
419 .min_keysize = SERPENT_MIN_KEY_SIZE,
420 .max_keysize = SERPENT_MAX_KEY_SIZE,
421 .setkey = serpent_setkey,
422 .encrypt = cbc_encrypt,
423 .decrypt = cbc_decrypt,
424 },
425 },
426}, {
427 .cra_name = "__ctr-serpent-sse2",
428 .cra_driver_name = "__driver-ctr-serpent-sse2",
429 .cra_priority = 0,
430 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
431 .cra_blocksize = 1,
432 .cra_ctxsize = sizeof(struct serpent_ctx),
433 .cra_alignmask = 0,
434 .cra_type = &crypto_blkcipher_type,
435 .cra_module = THIS_MODULE,
436 .cra_list = LIST_HEAD_INIT(serpent_algs[2].cra_list),
437 .cra_u = {
438 .blkcipher = {
439 .min_keysize = SERPENT_MIN_KEY_SIZE,
440 .max_keysize = SERPENT_MAX_KEY_SIZE,
441 .ivsize = SERPENT_BLOCK_SIZE,
442 .setkey = serpent_setkey,
443 .encrypt = ctr_crypt,
444 .decrypt = ctr_crypt,
445 },
446 },
447}, {
448 .cra_name = "__lrw-serpent-sse2",
449 .cra_driver_name = "__driver-lrw-serpent-sse2",
450 .cra_priority = 0,
451 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
452 .cra_blocksize = SERPENT_BLOCK_SIZE,
453 .cra_ctxsize = sizeof(struct serpent_lrw_ctx),
454 .cra_alignmask = 0,
455 .cra_type = &crypto_blkcipher_type,
456 .cra_module = THIS_MODULE,
457 .cra_list = LIST_HEAD_INIT(serpent_algs[3].cra_list),
458 .cra_exit = lrw_exit_tfm,
459 .cra_u = {
460 .blkcipher = {
461 .min_keysize = SERPENT_MIN_KEY_SIZE +
462 SERPENT_BLOCK_SIZE,
463 .max_keysize = SERPENT_MAX_KEY_SIZE +
464 SERPENT_BLOCK_SIZE,
465 .ivsize = SERPENT_BLOCK_SIZE,
466 .setkey = lrw_serpent_setkey,
467 .encrypt = lrw_encrypt,
468 .decrypt = lrw_decrypt,
469 },
470 },
471}, {
472 .cra_name = "__xts-serpent-sse2",
473 .cra_driver_name = "__driver-xts-serpent-sse2",
474 .cra_priority = 0,
475 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
476 .cra_blocksize = SERPENT_BLOCK_SIZE,
477 .cra_ctxsize = sizeof(struct serpent_xts_ctx),
478 .cra_alignmask = 0,
479 .cra_type = &crypto_blkcipher_type,
480 .cra_module = THIS_MODULE,
481 .cra_list = LIST_HEAD_INIT(serpent_algs[4].cra_list),
482 .cra_u = {
483 .blkcipher = {
484 .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
485 .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
486 .ivsize = SERPENT_BLOCK_SIZE,
487 .setkey = xts_serpent_setkey,
488 .encrypt = xts_encrypt,
489 .decrypt = xts_decrypt,
490 },
491 },
492}, {
493 .cra_name = "ecb(serpent)",
494 .cra_driver_name = "ecb-serpent-sse2",
495 .cra_priority = 400,
496 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
497 .cra_blocksize = SERPENT_BLOCK_SIZE,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300498 .cra_ctxsize = sizeof(struct async_helper_ctx),
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200499 .cra_alignmask = 0,
500 .cra_type = &crypto_ablkcipher_type,
501 .cra_module = THIS_MODULE,
502 .cra_list = LIST_HEAD_INIT(serpent_algs[5].cra_list),
Jussi Kivilinna435d3e52012-02-17 22:48:53 +0200503 .cra_init = ablk_init,
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200504 .cra_exit = ablk_exit,
505 .cra_u = {
506 .ablkcipher = {
507 .min_keysize = SERPENT_MIN_KEY_SIZE,
508 .max_keysize = SERPENT_MAX_KEY_SIZE,
509 .setkey = ablk_set_key,
510 .encrypt = ablk_encrypt,
511 .decrypt = ablk_decrypt,
512 },
513 },
514}, {
515 .cra_name = "cbc(serpent)",
516 .cra_driver_name = "cbc-serpent-sse2",
517 .cra_priority = 400,
518 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
519 .cra_blocksize = SERPENT_BLOCK_SIZE,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300520 .cra_ctxsize = sizeof(struct async_helper_ctx),
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200521 .cra_alignmask = 0,
522 .cra_type = &crypto_ablkcipher_type,
523 .cra_module = THIS_MODULE,
524 .cra_list = LIST_HEAD_INIT(serpent_algs[6].cra_list),
Jussi Kivilinna435d3e52012-02-17 22:48:53 +0200525 .cra_init = ablk_init,
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200526 .cra_exit = ablk_exit,
527 .cra_u = {
528 .ablkcipher = {
529 .min_keysize = SERPENT_MIN_KEY_SIZE,
530 .max_keysize = SERPENT_MAX_KEY_SIZE,
531 .ivsize = SERPENT_BLOCK_SIZE,
532 .setkey = ablk_set_key,
533 .encrypt = __ablk_encrypt,
534 .decrypt = ablk_decrypt,
535 },
536 },
537}, {
Jussi Kivilinna937c30d2011-11-09 16:26:25 +0200538 .cra_name = "ctr(serpent)",
539 .cra_driver_name = "ctr-serpent-sse2",
540 .cra_priority = 400,
541 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
542 .cra_blocksize = 1,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300543 .cra_ctxsize = sizeof(struct async_helper_ctx),
Jussi Kivilinna937c30d2011-11-09 16:26:25 +0200544 .cra_alignmask = 0,
545 .cra_type = &crypto_ablkcipher_type,
546 .cra_module = THIS_MODULE,
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200547 .cra_list = LIST_HEAD_INIT(serpent_algs[7].cra_list),
Jussi Kivilinna435d3e52012-02-17 22:48:53 +0200548 .cra_init = ablk_init,
Jussi Kivilinna937c30d2011-11-09 16:26:25 +0200549 .cra_exit = ablk_exit,
550 .cra_u = {
551 .ablkcipher = {
552 .min_keysize = SERPENT_MIN_KEY_SIZE,
553 .max_keysize = SERPENT_MAX_KEY_SIZE,
554 .ivsize = SERPENT_BLOCK_SIZE,
555 .setkey = ablk_set_key,
556 .encrypt = ablk_encrypt,
557 .decrypt = ablk_encrypt,
558 .geniv = "chainiv",
559 },
560 },
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200561}, {
Jussi Kivilinna18482052011-11-09 16:26:36 +0200562 .cra_name = "lrw(serpent)",
563 .cra_driver_name = "lrw-serpent-sse2",
564 .cra_priority = 400,
565 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
566 .cra_blocksize = SERPENT_BLOCK_SIZE,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300567 .cra_ctxsize = sizeof(struct async_helper_ctx),
Jussi Kivilinna18482052011-11-09 16:26:36 +0200568 .cra_alignmask = 0,
569 .cra_type = &crypto_ablkcipher_type,
570 .cra_module = THIS_MODULE,
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200571 .cra_list = LIST_HEAD_INIT(serpent_algs[8].cra_list),
Jussi Kivilinna435d3e52012-02-17 22:48:53 +0200572 .cra_init = ablk_init,
Jussi Kivilinna18482052011-11-09 16:26:36 +0200573 .cra_exit = ablk_exit,
574 .cra_u = {
575 .ablkcipher = {
576 .min_keysize = SERPENT_MIN_KEY_SIZE +
577 SERPENT_BLOCK_SIZE,
578 .max_keysize = SERPENT_MAX_KEY_SIZE +
579 SERPENT_BLOCK_SIZE,
580 .ivsize = SERPENT_BLOCK_SIZE,
581 .setkey = ablk_set_key,
582 .encrypt = ablk_encrypt,
583 .decrypt = ablk_decrypt,
584 },
585 },
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200586}, {
Jussi Kivilinna5962f8b2011-11-09 16:26:41 +0200587 .cra_name = "xts(serpent)",
588 .cra_driver_name = "xts-serpent-sse2",
589 .cra_priority = 400,
590 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
591 .cra_blocksize = SERPENT_BLOCK_SIZE,
Jussi Kivilinnaffaf9152012-06-18 14:06:58 +0300592 .cra_ctxsize = sizeof(struct async_helper_ctx),
Jussi Kivilinna5962f8b2011-11-09 16:26:41 +0200593 .cra_alignmask = 0,
594 .cra_type = &crypto_ablkcipher_type,
595 .cra_module = THIS_MODULE,
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200596 .cra_list = LIST_HEAD_INIT(serpent_algs[9].cra_list),
Jussi Kivilinna435d3e52012-02-17 22:48:53 +0200597 .cra_init = ablk_init,
Jussi Kivilinna5962f8b2011-11-09 16:26:41 +0200598 .cra_exit = ablk_exit,
599 .cra_u = {
600 .ablkcipher = {
601 .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
602 .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
603 .ivsize = SERPENT_BLOCK_SIZE,
604 .setkey = ablk_set_key,
605 .encrypt = ablk_encrypt,
606 .decrypt = ablk_decrypt,
607 },
608 },
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200609} };
Jussi Kivilinna5962f8b2011-11-09 16:26:41 +0200610
Jussi Kivilinna937c30d2011-11-09 16:26:25 +0200611static int __init serpent_sse2_init(void)
612{
Jussi Kivilinna937c30d2011-11-09 16:26:25 +0200613 if (!cpu_has_xmm2) {
614 printk(KERN_INFO "SSE2 instructions are not detected.\n");
615 return -ENODEV;
616 }
617
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200618 return crypto_register_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
Jussi Kivilinna937c30d2011-11-09 16:26:25 +0200619}
620
621static void __exit serpent_sse2_exit(void)
622{
Jussi Kivilinna35474c32012-02-17 22:48:37 +0200623 crypto_unregister_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
Jussi Kivilinna937c30d2011-11-09 16:26:25 +0200624}
625
626module_init(serpent_sse2_init);
627module_exit(serpent_sse2_exit);
628
629MODULE_DESCRIPTION("Serpent Cipher Algorithm, SSE2 optimized");
630MODULE_LICENSE("GPL");
631MODULE_ALIAS("serpent");