Merge branch 'x86-pti-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git...
[linux-2.6-block.git] / include / crypto / internal / skcipher.h
CommitLineData
2874c5fd 1/* SPDX-License-Identifier: GPL-2.0-or-later */
378f4f51
HX
2/*
3 * Symmetric key ciphers.
4 *
5 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
378f4f51
HX
6 */
7
8#ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9#define _CRYPTO_INTERNAL_SKCIPHER_H
10
11#include <crypto/algapi.h>
61da88e2 12#include <crypto/skcipher.h>
b286d8b1 13#include <linux/list.h>
ecfc4329
HX
14#include <linux/types.h>
15
b286d8b1 16struct aead_request;
ecfc4329 17struct rtattr;
378f4f51 18
4e6c3df4
HX
19struct skcipher_instance {
20 void (*free)(struct skcipher_instance *inst);
21 union {
22 struct {
23 char head[offsetof(struct skcipher_alg, base)];
24 struct crypto_instance base;
25 } s;
26 struct skcipher_alg alg;
27 };
28};
29
378f4f51
HX
30struct crypto_skcipher_spawn {
31 struct crypto_spawn base;
32};
33
b286d8b1
HX
34struct skcipher_walk {
35 union {
36 struct {
37 struct page *page;
38 unsigned long offset;
39 } phys;
40
41 struct {
42 u8 *page;
43 void *addr;
44 } virt;
45 } src, dst;
46
47 struct scatter_walk in;
48 unsigned int nbytes;
49
50 struct scatter_walk out;
51 unsigned int total;
52
53 struct list_head buffers;
54
55 u8 *page;
56 u8 *buffer;
57 u8 *oiv;
58 void *iv;
59
60 unsigned int ivsize;
61
62 int flags;
63 unsigned int blocksize;
c821f6ab 64 unsigned int stride;
b286d8b1
HX
65 unsigned int alignmask;
66};
67
4e6c3df4
HX
68static inline struct crypto_instance *skcipher_crypto_instance(
69 struct skcipher_instance *inst)
70{
71 return &inst->s.base;
72}
73
74static inline struct skcipher_instance *skcipher_alg_instance(
75 struct crypto_skcipher *skcipher)
76{
77 return container_of(crypto_skcipher_alg(skcipher),
78 struct skcipher_instance, alg);
79}
80
81static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
82{
83 return crypto_instance_ctx(skcipher_crypto_instance(inst));
84}
85
86static inline void skcipher_request_complete(struct skcipher_request *req, int err)
87{
88 req->base.complete(&req->base, err);
89}
90
378f4f51
HX
91static inline void crypto_set_skcipher_spawn(
92 struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
93{
94 crypto_set_spawn(&spawn->base, inst);
95}
96
97int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
98 u32 type, u32 mask);
3a01d0ee 99
378f4f51
HX
100static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
101{
102 crypto_drop_spawn(&spawn->base);
103}
104
3a01d0ee 105static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
378f4f51
HX
106 struct crypto_skcipher_spawn *spawn)
107{
3a01d0ee 108 return container_of(spawn->base.alg, struct skcipher_alg, base);
378f4f51
HX
109}
110
4e6c3df4
HX
111static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
112 struct crypto_skcipher_spawn *spawn)
113{
3a01d0ee 114 return crypto_skcipher_spawn_alg(spawn);
4e6c3df4
HX
115}
116
3a01d0ee 117static inline struct crypto_skcipher *crypto_spawn_skcipher(
378f4f51
HX
118 struct crypto_skcipher_spawn *spawn)
119{
3a01d0ee 120 return crypto_spawn_tfm2(&spawn->base);
378f4f51
HX
121}
122
4e6c3df4
HX
123static inline void crypto_skcipher_set_reqsize(
124 struct crypto_skcipher *skcipher, unsigned int reqsize)
125{
126 skcipher->reqsize = reqsize;
127}
128
129int crypto_register_skcipher(struct skcipher_alg *alg);
130void crypto_unregister_skcipher(struct skcipher_alg *alg);
131int crypto_register_skciphers(struct skcipher_alg *algs, int count);
132void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
133int skcipher_register_instance(struct crypto_template *tmpl,
134 struct skcipher_instance *inst);
135
b286d8b1
HX
136int skcipher_walk_done(struct skcipher_walk *walk, int err);
137int skcipher_walk_virt(struct skcipher_walk *walk,
138 struct skcipher_request *req,
139 bool atomic);
140void skcipher_walk_atomise(struct skcipher_walk *walk);
141int skcipher_walk_async(struct skcipher_walk *walk,
142 struct skcipher_request *req);
143int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
144 bool atomic);
34bc085c
HX
145int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
146 struct aead_request *req, bool atomic);
147int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
148 struct aead_request *req, bool atomic);
b286d8b1
HX
149void skcipher_walk_complete(struct skcipher_walk *walk, int err);
150
6017826b
AB
151static inline void skcipher_walk_abort(struct skcipher_walk *walk)
152{
153 skcipher_walk_done(walk, -ECANCELED);
154}
155
7a7ffe65
HX
156static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
157{
158 return crypto_tfm_ctx(&tfm->base);
159}
160
161static inline void *skcipher_request_ctx(struct skcipher_request *req)
162{
163 return req->__ctx;
164}
165
166static inline u32 skcipher_request_flags(struct skcipher_request *req)
167{
168 return req->base.flags;
169}
170
4e6c3df4
HX
171static inline unsigned int crypto_skcipher_alg_min_keysize(
172 struct skcipher_alg *alg)
173{
4e6c3df4
HX
174 return alg->min_keysize;
175}
176
177static inline unsigned int crypto_skcipher_alg_max_keysize(
178 struct skcipher_alg *alg)
179{
4e6c3df4
HX
180 return alg->max_keysize;
181}
182
314d0f0e
EB
183static inline unsigned int crypto_skcipher_alg_walksize(
184 struct skcipher_alg *alg)
185{
314d0f0e
EB
186 return alg->walksize;
187}
314d0f0e
EB
188
189/**
190 * crypto_skcipher_walksize() - obtain walk size
191 * @tfm: cipher handle
192 *
193 * In some cases, algorithms can only perform optimally when operating on
194 * multiple blocks in parallel. This is reflected by the walksize, which
195 * must be a multiple of the chunksize (or equal if the concern does not
196 * apply)
197 *
198 * Return: walk size in bytes
199 */
200static inline unsigned int crypto_skcipher_walksize(
201 struct crypto_skcipher *tfm)
202{
203 return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
204}
205
0872da16
EB
206/* Helpers for simple block cipher modes of operation */
207struct skcipher_ctx_simple {
208 struct crypto_cipher *cipher; /* underlying block cipher */
209};
210static inline struct crypto_cipher *
211skcipher_cipher_simple(struct crypto_skcipher *tfm)
212{
213 struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
214
215 return ctx->cipher;
216}
217struct skcipher_instance *
218skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb,
219 struct crypto_alg **cipher_alg_ret);
220
378f4f51
HX
221#endif /* _CRYPTO_INTERNAL_SKCIPHER_H */
222