Commit | Line | Data |
---|---|---|
2874c5fd | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
5b6d2d7f HX |
2 | /* |
3 | * AEAD: Authenticated Encryption with Associated Data | |
4 | * | |
b0d955ba | 5 | * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au> |
5b6d2d7f HX |
6 | */ |
7 | ||
8 | #ifndef _CRYPTO_INTERNAL_AEAD_H | |
9 | #define _CRYPTO_INTERNAL_AEAD_H | |
10 | ||
11 | #include <crypto/aead.h> | |
12 | #include <crypto/algapi.h> | |
f5d8660a | 13 | #include <linux/stddef.h> |
5b6d2d7f HX |
14 | #include <linux/types.h> |
15 | ||
16 | struct rtattr; | |
17 | ||
63293c61 | 18 | struct aead_instance { |
ba75e15f | 19 | void (*free)(struct aead_instance *inst); |
f5d8660a HX |
20 | union { |
21 | struct { | |
22 | char head[offsetof(struct aead_alg, base)]; | |
23 | struct crypto_instance base; | |
24 | } s; | |
25 | struct aead_alg alg; | |
26 | }; | |
63293c61 HX |
27 | }; |
28 | ||
5b6d2d7f HX |
29 | struct crypto_aead_spawn { |
30 | struct crypto_spawn base; | |
31 | }; | |
32 | ||
2c11a3f9 HX |
33 | struct aead_queue { |
34 | struct crypto_queue base; | |
35 | }; | |
36 | ||
5d1d65f8 HX |
37 | static inline void *crypto_aead_ctx(struct crypto_aead *tfm) |
38 | { | |
39 | return crypto_tfm_ctx(&tfm->base); | |
40 | } | |
41 | ||
f8e4d1d0 HX |
42 | static inline void *crypto_aead_ctx_dma(struct crypto_aead *tfm) |
43 | { | |
44 | return crypto_tfm_ctx_dma(&tfm->base); | |
45 | } | |
46 | ||
63293c61 HX |
47 | static inline struct crypto_instance *aead_crypto_instance( |
48 | struct aead_instance *inst) | |
49 | { | |
50 | return container_of(&inst->alg.base, struct crypto_instance, alg); | |
51 | } | |
52 | ||
53 | static inline struct aead_instance *aead_instance(struct crypto_instance *inst) | |
54 | { | |
55 | return container_of(&inst->alg, struct aead_instance, alg.base); | |
56 | } | |
57 | ||
5c98d620 HX |
58 | static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead) |
59 | { | |
b0d955ba | 60 | return aead_instance(crypto_tfm_alg_instance(&aead->base)); |
5c98d620 HX |
61 | } |
62 | ||
63293c61 HX |
63 | static inline void *aead_instance_ctx(struct aead_instance *inst) |
64 | { | |
65 | return crypto_instance_ctx(aead_crypto_instance(inst)); | |
66 | } | |
67 | ||
5d1d65f8 HX |
68 | static inline void *aead_request_ctx(struct aead_request *req) |
69 | { | |
70 | return req->__ctx; | |
71 | } | |
72 | ||
f8e4d1d0 HX |
73 | static inline void *aead_request_ctx_dma(struct aead_request *req) |
74 | { | |
75 | unsigned int align = crypto_dma_align(); | |
76 | ||
77 | if (align <= crypto_tfm_ctx_alignment()) | |
78 | align = 1; | |
79 | ||
80 | return PTR_ALIGN(aead_request_ctx(req), align); | |
81 | } | |
82 | ||
5d1d65f8 HX |
83 | static inline void aead_request_complete(struct aead_request *req, int err) |
84 | { | |
372e6b80 | 85 | crypto_request_complete(&req->base, err); |
5d1d65f8 HX |
86 | } |
87 | ||
88 | static inline u32 aead_request_flags(struct aead_request *req) | |
89 | { | |
90 | return req->base.flags; | |
91 | } | |
92 | ||
53a0bd71 TS |
93 | static inline struct aead_request *aead_request_cast( |
94 | struct crypto_async_request *req) | |
95 | { | |
96 | return container_of(req, struct aead_request, base); | |
97 | } | |
98 | ||
cd900f0c EB |
99 | int crypto_grab_aead(struct crypto_aead_spawn *spawn, |
100 | struct crypto_instance *inst, | |
101 | const char *name, u32 type, u32 mask); | |
d29ce988 | 102 | |
5b6d2d7f HX |
103 | static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn) |
104 | { | |
105 | crypto_drop_spawn(&spawn->base); | |
106 | } | |
107 | ||
63293c61 HX |
108 | static inline struct aead_alg *crypto_spawn_aead_alg( |
109 | struct crypto_aead_spawn *spawn) | |
110 | { | |
111 | return container_of(spawn->base.alg, struct aead_alg, base); | |
112 | } | |
113 | ||
5b6d2d7f HX |
114 | static inline struct crypto_aead *crypto_spawn_aead( |
115 | struct crypto_aead_spawn *spawn) | |
116 | { | |
5d1d65f8 | 117 | return crypto_spawn_tfm2(&spawn->base); |
5b6d2d7f HX |
118 | } |
119 | ||
21b70134 HX |
120 | static inline void crypto_aead_set_reqsize(struct crypto_aead *aead, |
121 | unsigned int reqsize) | |
122 | { | |
b0d955ba | 123 | aead->reqsize = reqsize; |
21b70134 HX |
124 | } |
125 | ||
f8e4d1d0 HX |
126 | static inline void crypto_aead_set_reqsize_dma(struct crypto_aead *aead, |
127 | unsigned int reqsize) | |
128 | { | |
129 | reqsize += crypto_dma_align() & ~(crypto_tfm_ctx_alignment() - 1); | |
130 | aead->reqsize = reqsize; | |
131 | } | |
132 | ||
2c11a3f9 HX |
133 | static inline void aead_init_queue(struct aead_queue *queue, |
134 | unsigned int max_qlen) | |
135 | { | |
136 | crypto_init_queue(&queue->base, max_qlen); | |
137 | } | |
138 | ||
7a530aa9 HX |
139 | static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg) |
140 | { | |
141 | return alg->chunksize; | |
142 | } | |
143 | ||
144 | /** | |
145 | * crypto_aead_chunksize() - obtain chunk size | |
146 | * @tfm: cipher handle | |
147 | * | |
148 | * The block size is set to one for ciphers such as CCM. However, | |
149 | * you still need to provide incremental updates in multiples of | |
150 | * the underlying block size as the IV does not have sub-block | |
151 | * granularity. This is known in this API as the chunk size. | |
152 | * | |
153 | * Return: chunk size in bytes | |
154 | */ | |
155 | static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm) | |
156 | { | |
157 | return crypto_aead_alg_chunksize(crypto_aead_alg(tfm)); | |
158 | } | |
159 | ||
63293c61 | 160 | int crypto_register_aead(struct aead_alg *alg); |
43615369 | 161 | void crypto_unregister_aead(struct aead_alg *alg); |
caab9461 HX |
162 | int crypto_register_aeads(struct aead_alg *algs, int count); |
163 | void crypto_unregister_aeads(struct aead_alg *algs, int count); | |
63293c61 HX |
164 | int aead_register_instance(struct crypto_template *tmpl, |
165 | struct aead_instance *inst); | |
166 | ||
5b6d2d7f HX |
167 | #endif /* _CRYPTO_INTERNAL_AEAD_H */ |
168 |