Merge tag 'metag-for-v4.8' of git://git.kernel.org/pub/scm/linux/kernel/git/jhogan...
[linux-block.git] / include / crypto / internal / aead.h
CommitLineData
5b6d2d7f
HX
1/*
2 * AEAD: Authenticated Encryption with Associated Data
3 *
b0d955ba 4 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
5b6d2d7f
HX
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#ifndef _CRYPTO_INTERNAL_AEAD_H
14#define _CRYPTO_INTERNAL_AEAD_H
15
16#include <crypto/aead.h>
17#include <crypto/algapi.h>
f5d8660a 18#include <linux/stddef.h>
5b6d2d7f
HX
19#include <linux/types.h>
20
21struct rtattr;
22
63293c61 23struct aead_instance {
ba75e15f 24 void (*free)(struct aead_instance *inst);
f5d8660a
HX
25 union {
26 struct {
27 char head[offsetof(struct aead_alg, base)];
28 struct crypto_instance base;
29 } s;
30 struct aead_alg alg;
31 };
63293c61
HX
32};
33
5b6d2d7f
HX
34struct crypto_aead_spawn {
35 struct crypto_spawn base;
36};
37
2c11a3f9
HX
38struct aead_queue {
39 struct crypto_queue base;
40};
41
5d1d65f8
HX
42static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
43{
44 return crypto_tfm_ctx(&tfm->base);
45}
46
63293c61
HX
47static inline struct crypto_instance *aead_crypto_instance(
48 struct aead_instance *inst)
49{
50 return container_of(&inst->alg.base, struct crypto_instance, alg);
51}
52
53static inline struct aead_instance *aead_instance(struct crypto_instance *inst)
54{
55 return container_of(&inst->alg, struct aead_instance, alg.base);
56}
57
5c98d620
HX
58static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead)
59{
b0d955ba 60 return aead_instance(crypto_tfm_alg_instance(&aead->base));
5c98d620
HX
61}
62
63293c61
HX
63static inline void *aead_instance_ctx(struct aead_instance *inst)
64{
65 return crypto_instance_ctx(aead_crypto_instance(inst));
66}
67
5d1d65f8
HX
68static inline void *aead_request_ctx(struct aead_request *req)
69{
70 return req->__ctx;
71}
72
73static inline void aead_request_complete(struct aead_request *req, int err)
74{
75 req->base.complete(&req->base, err);
76}
77
78static inline u32 aead_request_flags(struct aead_request *req)
79{
80 return req->base.flags;
81}
82
53a0bd71
TS
83static inline struct aead_request *aead_request_cast(
84 struct crypto_async_request *req)
85{
86 return container_of(req, struct aead_request, base);
87}
88
5b6d2d7f
HX
89static inline void crypto_set_aead_spawn(
90 struct crypto_aead_spawn *spawn, struct crypto_instance *inst)
91{
92 crypto_set_spawn(&spawn->base, inst);
93}
94
d29ce988
HX
95int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
96 u32 type, u32 mask);
97
5b6d2d7f
HX
98static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
99{
100 crypto_drop_spawn(&spawn->base);
101}
102
63293c61
HX
103static inline struct aead_alg *crypto_spawn_aead_alg(
104 struct crypto_aead_spawn *spawn)
105{
106 return container_of(spawn->base.alg, struct aead_alg, base);
107}
108
5b6d2d7f
HX
109static inline struct crypto_aead *crypto_spawn_aead(
110 struct crypto_aead_spawn *spawn)
111{
5d1d65f8 112 return crypto_spawn_tfm2(&spawn->base);
5b6d2d7f
HX
113}
114
21b70134
HX
115static inline void crypto_aead_set_reqsize(struct crypto_aead *aead,
116 unsigned int reqsize)
117{
b0d955ba 118 aead->reqsize = reqsize;
21b70134
HX
119}
120
30e4c010
HX
121static inline unsigned int crypto_aead_alg_maxauthsize(struct aead_alg *alg)
122{
b0d955ba 123 return alg->maxauthsize;
30e4c010
HX
124}
125
f5695259
HX
126static inline unsigned int crypto_aead_maxauthsize(struct crypto_aead *aead)
127{
30e4c010 128 return crypto_aead_alg_maxauthsize(crypto_aead_alg(aead));
f5695259
HX
129}
130
2c11a3f9
HX
131static inline void aead_init_queue(struct aead_queue *queue,
132 unsigned int max_qlen)
133{
134 crypto_init_queue(&queue->base, max_qlen);
135}
136
137static inline int aead_enqueue_request(struct aead_queue *queue,
138 struct aead_request *request)
139{
140 return crypto_enqueue_request(&queue->base, &request->base);
141}
142
143static inline struct aead_request *aead_dequeue_request(
144 struct aead_queue *queue)
145{
146 struct crypto_async_request *req;
147
148 req = crypto_dequeue_request(&queue->base);
149
150 return req ? container_of(req, struct aead_request, base) : NULL;
151}
152
153static inline struct aead_request *aead_get_backlog(struct aead_queue *queue)
154{
155 struct crypto_async_request *req;
156
157 req = crypto_get_backlog(&queue->base);
158
159 return req ? container_of(req, struct aead_request, base) : NULL;
160}
161
7a530aa9
HX
162static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg)
163{
164 return alg->chunksize;
165}
166
167/**
168 * crypto_aead_chunksize() - obtain chunk size
169 * @tfm: cipher handle
170 *
171 * The block size is set to one for ciphers such as CCM. However,
172 * you still need to provide incremental updates in multiples of
173 * the underlying block size as the IV does not have sub-block
174 * granularity. This is known in this API as the chunk size.
175 *
176 * Return: chunk size in bytes
177 */
178static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm)
179{
180 return crypto_aead_alg_chunksize(crypto_aead_alg(tfm));
181}
182
63293c61 183int crypto_register_aead(struct aead_alg *alg);
43615369 184void crypto_unregister_aead(struct aead_alg *alg);
caab9461
HX
185int crypto_register_aeads(struct aead_alg *algs, int count);
186void crypto_unregister_aeads(struct aead_alg *algs, int count);
63293c61
HX
187int aead_register_instance(struct crypto_template *tmpl,
188 struct aead_instance *inst);
189
5b6d2d7f
HX
190#endif /* _CRYPTO_INTERNAL_AEAD_H */
191