Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
1cac2cbc HY |
2 | /* |
3 | * Software async crypto daemon | |
298c926c AH |
4 | * |
5 | * Added AEAD support to cryptd. | |
6 | * Authors: Tadeusz Struk (tadeusz.struk@intel.com) | |
7 | * Adrian Hoban <adrian.hoban@intel.com> | |
8 | * Gabriele Paoloni <gabriele.paoloni@intel.com> | |
9 | * Aidan O'Mahony (aidan.o.mahony@intel.com) | |
10 | * Copyright (c) 2010, Intel Corporation. | |
1cac2cbc HY |
11 | */ |
12 | ||
13 | #ifndef _CRYPTO_CRYPT_H | |
14 | #define _CRYPTO_CRYPT_H | |
15 | ||
1cac2cbc | 16 | #include <linux/kernel.h> |
53033d4d | 17 | #include <crypto/aead.h> |
ace13663 | 18 | #include <crypto/hash.h> |
4e0958d1 | 19 | #include <crypto/skcipher.h> |
1cac2cbc HY |
20 | |
21 | struct cryptd_ablkcipher { | |
22 | struct crypto_ablkcipher base; | |
23 | }; | |
24 | ||
25 | static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast( | |
26 | struct crypto_ablkcipher *tfm) | |
27 | { | |
28 | return (struct cryptd_ablkcipher *)tfm; | |
29 | } | |
30 | ||
31 | /* alg_name should be algorithm to be cryptd-ed */ | |
32 | struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name, | |
33 | u32 type, u32 mask); | |
34 | struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm); | |
81760ea6 | 35 | bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm); |
1cac2cbc HY |
36 | void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm); |
37 | ||
4e0958d1 HX |
38 | struct cryptd_skcipher { |
39 | struct crypto_skcipher base; | |
40 | }; | |
41 | ||
42 | struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, | |
43 | u32 type, u32 mask); | |
44 | struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm); | |
45 | /* Must be called without moving CPUs. */ | |
46 | bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm); | |
47 | void cryptd_free_skcipher(struct cryptd_skcipher *tfm); | |
48 | ||
ace13663 HY |
49 | struct cryptd_ahash { |
50 | struct crypto_ahash base; | |
51 | }; | |
52 | ||
53 | static inline struct cryptd_ahash *__cryptd_ahash_cast( | |
54 | struct crypto_ahash *tfm) | |
55 | { | |
56 | return (struct cryptd_ahash *)tfm; | |
57 | } | |
58 | ||
59 | /* alg_name should be algorithm to be cryptd-ed */ | |
60 | struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, | |
61 | u32 type, u32 mask); | |
62 | struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm); | |
0e1227d3 | 63 | struct shash_desc *cryptd_shash_desc(struct ahash_request *req); |
81760ea6 HX |
64 | /* Must be called without moving CPUs. */ |
65 | bool cryptd_ahash_queued(struct cryptd_ahash *tfm); | |
ace13663 HY |
66 | void cryptd_free_ahash(struct cryptd_ahash *tfm); |
67 | ||
298c926c AH |
68 | struct cryptd_aead { |
69 | struct crypto_aead base; | |
70 | }; | |
71 | ||
72 | static inline struct cryptd_aead *__cryptd_aead_cast( | |
73 | struct crypto_aead *tfm) | |
74 | { | |
75 | return (struct cryptd_aead *)tfm; | |
76 | } | |
77 | ||
78 | struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, | |
79 | u32 type, u32 mask); | |
80 | ||
81 | struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm); | |
81760ea6 HX |
82 | /* Must be called without moving CPUs. */ |
83 | bool cryptd_aead_queued(struct cryptd_aead *tfm); | |
298c926c AH |
84 | |
85 | void cryptd_free_aead(struct cryptd_aead *tfm); | |
86 | ||
1cac2cbc | 87 | #endif |