Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Huang Ying | 1cac2cb | 2009-01-18 16:19:46 +1100 | [diff] [blame] | 2 | /* |
| 3 | * Software async crypto daemon |
Adrian Hoban | 298c926 | 2010-09-20 16:05:12 +0800 | [diff] [blame] | 4 | * |
| 5 | * Added AEAD support to cryptd. |
| 6 | * Authors: Tadeusz Struk (tadeusz.struk@intel.com) |
| 7 | * Adrian Hoban <adrian.hoban@intel.com> |
| 8 | * Gabriele Paoloni <gabriele.paoloni@intel.com> |
| 9 | * Aidan O'Mahony (aidan.o.mahony@intel.com) |
| 10 | * Copyright (c) 2010, Intel Corporation. |
Huang Ying | 1cac2cb | 2009-01-18 16:19:46 +1100 | [diff] [blame] | 11 | */ |
| 12 | |
| 13 | #ifndef _CRYPTO_CRYPT_H |
| 14 | #define _CRYPTO_CRYPT_H |
| 15 | |
Huang Ying | 1cac2cb | 2009-01-18 16:19:46 +1100 | [diff] [blame] | 16 | #include <linux/kernel.h> |
Herbert Xu | 53033d4 | 2015-05-11 17:48:11 +0800 | [diff] [blame] | 17 | #include <crypto/aead.h> |
Huang Ying | ace1366 | 2009-08-06 15:35:20 +1000 | [diff] [blame] | 18 | #include <crypto/hash.h> |
Herbert Xu | 4e0958d | 2016-11-22 20:08:23 +0800 | [diff] [blame] | 19 | #include <crypto/skcipher.h> |
Huang Ying | 1cac2cb | 2009-01-18 16:19:46 +1100 | [diff] [blame] | 20 | |
Herbert Xu | 4e0958d | 2016-11-22 20:08:23 +0800 | [diff] [blame] | 21 | struct cryptd_skcipher { |
| 22 | struct crypto_skcipher base; |
| 23 | }; |
| 24 | |
Eric Biggers | 0a877e3 | 2019-04-12 21:23:52 -0700 | [diff] [blame] | 25 | /* alg_name should be algorithm to be cryptd-ed */ |
Herbert Xu | 4e0958d | 2016-11-22 20:08:23 +0800 | [diff] [blame] | 26 | struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, |
| 27 | u32 type, u32 mask); |
| 28 | struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm); |
| 29 | /* Must be called without moving CPUs. */ |
| 30 | bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm); |
| 31 | void cryptd_free_skcipher(struct cryptd_skcipher *tfm); |
| 32 | |
Huang Ying | ace1366 | 2009-08-06 15:35:20 +1000 | [diff] [blame] | 33 | struct cryptd_ahash { |
| 34 | struct crypto_ahash base; |
| 35 | }; |
| 36 | |
| 37 | static inline struct cryptd_ahash *__cryptd_ahash_cast( |
| 38 | struct crypto_ahash *tfm) |
| 39 | { |
| 40 | return (struct cryptd_ahash *)tfm; |
| 41 | } |
| 42 | |
| 43 | /* alg_name should be algorithm to be cryptd-ed */ |
| 44 | struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, |
| 45 | u32 type, u32 mask); |
| 46 | struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); |
Huang Ying | 0e1227d | 2009-10-19 11:53:06 +0900 | [diff] [blame] | 47 | struct shash_desc *cryptd_shash_desc(struct ahash_request *req); |
Herbert Xu | 81760ea | 2016-06-21 16:55:13 +0800 | [diff] [blame] | 48 | /* Must be called without moving CPUs. */ |
| 49 | bool cryptd_ahash_queued(struct cryptd_ahash *tfm); |
Huang Ying | ace1366 | 2009-08-06 15:35:20 +1000 | [diff] [blame] | 50 | void cryptd_free_ahash(struct cryptd_ahash *tfm); |
| 51 | |
Adrian Hoban | 298c926 | 2010-09-20 16:05:12 +0800 | [diff] [blame] | 52 | struct cryptd_aead { |
| 53 | struct crypto_aead base; |
| 54 | }; |
| 55 | |
| 56 | static inline struct cryptd_aead *__cryptd_aead_cast( |
| 57 | struct crypto_aead *tfm) |
| 58 | { |
| 59 | return (struct cryptd_aead *)tfm; |
| 60 | } |
| 61 | |
| 62 | struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, |
| 63 | u32 type, u32 mask); |
| 64 | |
| 65 | struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm); |
Herbert Xu | 81760ea | 2016-06-21 16:55:13 +0800 | [diff] [blame] | 66 | /* Must be called without moving CPUs. */ |
| 67 | bool cryptd_aead_queued(struct cryptd_aead *tfm); |
Adrian Hoban | 298c926 | 2010-09-20 16:05:12 +0800 | [diff] [blame] | 68 | |
| 69 | void cryptd_free_aead(struct cryptd_aead *tfm); |
| 70 | |
Huang Ying | 1cac2cb | 2009-01-18 16:19:46 +1100 | [diff] [blame] | 71 | #endif |