crypto: remove CRYPTO_TFM_RES_WEAK_KEY
[linux-block.git] / crypto / ctr.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
23e353c8
JL
2/*
3 * CTR: Counter mode
4 *
5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
23e353c8
JL
6 */
7
8#include <crypto/algapi.h>
5311f248 9#include <crypto/ctr.h>
69d3150c 10#include <crypto/internal/skcipher.h>
23e353c8
JL
11#include <linux/err.h>
12#include <linux/init.h>
13#include <linux/kernel.h>
14#include <linux/module.h>
23e353c8
JL
15#include <linux/slab.h>
16
5311f248 17struct crypto_rfc3686_ctx {
b2b39c2f 18 struct crypto_skcipher *child;
5311f248 19 u8 nonce[CTR_RFC3686_NONCE_SIZE];
23e353c8
JL
20};
21
69d3150c
JK
22struct crypto_rfc3686_req_ctx {
23 u8 iv[CTR_RFC3686_BLOCK_SIZE];
b2b39c2f 24 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
69d3150c
JK
25};
26
11f14630 27static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
5311f248 28 struct crypto_cipher *tfm)
0971eb0d
HX
29{
30 unsigned int bsize = crypto_cipher_blocksize(tfm);
5311f248
HX
31 unsigned long alignmask = crypto_cipher_alignmask(tfm);
32 u8 *ctrblk = walk->iv;
6650c4de 33 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
5311f248 34 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
0971eb0d
HX
35 u8 *src = walk->src.virt.addr;
36 u8 *dst = walk->dst.virt.addr;
37 unsigned int nbytes = walk->nbytes;
38
39 crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
45fe93df 40 crypto_xor_cpy(dst, keystream, src, nbytes);
5311f248
HX
41
42 crypto_inc(ctrblk, bsize);
0971eb0d
HX
43}
44
11f14630 45static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
5311f248 46 struct crypto_cipher *tfm)
23e353c8
JL
47{
48 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
49 crypto_cipher_alg(tfm)->cia_encrypt;
50 unsigned int bsize = crypto_cipher_blocksize(tfm);
5311f248 51 u8 *ctrblk = walk->iv;
23e353c8
JL
52 u8 *src = walk->src.virt.addr;
53 u8 *dst = walk->dst.virt.addr;
54 unsigned int nbytes = walk->nbytes;
55
56 do {
57 /* create keystream */
0971eb0d
HX
58 fn(crypto_cipher_tfm(tfm), dst, ctrblk);
59 crypto_xor(dst, src, bsize);
23e353c8
JL
60
61 /* increment counter in counterblock */
5311f248 62 crypto_inc(ctrblk, bsize);
23e353c8 63
23e353c8
JL
64 src += bsize;
65 dst += bsize;
0971eb0d 66 } while ((nbytes -= bsize) >= bsize);
23e353c8 67
0971eb0d 68 return nbytes;
23e353c8
JL
69}
70
11f14630 71static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
5311f248 72 struct crypto_cipher *tfm)
23e353c8
JL
73{
74 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
75 crypto_cipher_alg(tfm)->cia_encrypt;
76 unsigned int bsize = crypto_cipher_blocksize(tfm);
5311f248 77 unsigned long alignmask = crypto_cipher_alignmask(tfm);
23e353c8 78 unsigned int nbytes = walk->nbytes;
5311f248 79 u8 *ctrblk = walk->iv;
23e353c8 80 u8 *src = walk->src.virt.addr;
6650c4de 81 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
5311f248 82 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
23e353c8
JL
83
84 do {
85 /* create keystream */
86 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
0971eb0d 87 crypto_xor(src, keystream, bsize);
23e353c8
JL
88
89 /* increment counter in counterblock */
5311f248 90 crypto_inc(ctrblk, bsize);
23e353c8 91
23e353c8 92 src += bsize;
0971eb0d 93 } while ((nbytes -= bsize) >= bsize);
23e353c8 94
0971eb0d 95 return nbytes;
23e353c8
JL
96}
97
11f14630 98static int crypto_ctr_crypt(struct skcipher_request *req)
23e353c8 99{
11f14630
EB
100 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
101 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
102 const unsigned int bsize = crypto_cipher_blocksize(cipher);
103 struct skcipher_walk walk;
104 unsigned int nbytes;
23e353c8
JL
105 int err;
106
11f14630 107 err = skcipher_walk_virt(&walk, req, false);
23e353c8 108
0971eb0d 109 while (walk.nbytes >= bsize) {
23e353c8 110 if (walk.src.virt.addr == walk.dst.virt.addr)
11f14630 111 nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
23e353c8 112 else
11f14630 113 nbytes = crypto_ctr_crypt_segment(&walk, cipher);
23e353c8 114
11f14630 115 err = skcipher_walk_done(&walk, nbytes);
23e353c8 116 }
0971eb0d
HX
117
118 if (walk.nbytes) {
11f14630
EB
119 crypto_ctr_crypt_final(&walk, cipher);
120 err = skcipher_walk_done(&walk, 0);
0971eb0d
HX
121 }
122
23e353c8
JL
123 return err;
124}
125
11f14630 126static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
23e353c8 127{
11f14630 128 struct skcipher_instance *inst;
23e353c8 129 struct crypto_alg *alg;
23e353c8
JL
130 int err;
131
b3c16bfc 132 inst = skcipher_alloc_instance_simple(tmpl, tb);
11f14630
EB
133 if (IS_ERR(inst))
134 return PTR_ERR(inst);
23e353c8 135
b3c16bfc
HX
136 alg = skcipher_ialg_simple(inst);
137
5311f248 138 /* Block size must be >= 4 bytes. */
23e353c8 139 err = -EINVAL;
5311f248 140 if (alg->cra_blocksize < 4)
11f14630 141 goto out_free_inst;
23e353c8 142
3f8214ea 143 /* If this is false we'd fail the alignment of crypto_inc. */
5311f248 144 if (alg->cra_blocksize % 4)
11f14630 145 goto out_free_inst;
23e353c8 146
11f14630
EB
147 /* CTR mode is a stream cipher. */
148 inst->alg.base.cra_blocksize = 1;
23e353c8 149
11f14630
EB
150 /*
151 * To simplify the implementation, configure the skcipher walk to only
152 * give a partial block at the very end, never earlier.
153 */
154 inst->alg.chunksize = alg->cra_blocksize;
23e353c8 155
11f14630
EB
156 inst->alg.encrypt = crypto_ctr_crypt;
157 inst->alg.decrypt = crypto_ctr_crypt;
23e353c8 158
11f14630 159 err = skcipher_register_instance(tmpl, inst);
b3c16bfc 160 if (err) {
11f14630 161out_free_inst:
b3c16bfc
HX
162 inst->free(inst);
163 }
164
11f14630 165 return err;
23e353c8
JL
166}
167
b2b39c2f 168static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
69d3150c 169 const u8 *key, unsigned int keylen)
5311f248 170{
b2b39c2f
HX
171 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
172 struct crypto_skcipher *child = ctx->child;
5311f248
HX
173 int err;
174
175 /* the nonce is stored in bytes at end of key */
176 if (keylen < CTR_RFC3686_NONCE_SIZE)
177 return -EINVAL;
178
179 memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
180 CTR_RFC3686_NONCE_SIZE);
181
182 keylen -= CTR_RFC3686_NONCE_SIZE;
183
b2b39c2f
HX
184 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
185 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
186 CRYPTO_TFM_REQ_MASK);
187 err = crypto_skcipher_setkey(child, key, keylen);
188 crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
189 CRYPTO_TFM_RES_MASK);
5311f248
HX
190
191 return err;
192}
193
b2b39c2f 194static int crypto_rfc3686_crypt(struct skcipher_request *req)
5311f248 195{
b2b39c2f
HX
196 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
197 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
198 struct crypto_skcipher *child = ctx->child;
199 unsigned long align = crypto_skcipher_alignmask(tfm);
69d3150c 200 struct crypto_rfc3686_req_ctx *rctx =
b2b39c2f
HX
201 (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
202 struct skcipher_request *subreq = &rctx->subreq;
69d3150c 203 u8 *iv = rctx->iv;
5311f248
HX
204
205 /* set up counter block */
206 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
b2b39c2f 207 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
5311f248
HX
208
209 /* initialize counter portion of counter block */
210 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
211 cpu_to_be32(1);
212
b2b39c2f
HX
213 skcipher_request_set_tfm(subreq, child);
214 skcipher_request_set_callback(subreq, req->base.flags,
215 req->base.complete, req->base.data);
216 skcipher_request_set_crypt(subreq, req->src, req->dst,
217 req->cryptlen, iv);
5311f248 218
b2b39c2f 219 return crypto_skcipher_encrypt(subreq);
5311f248
HX
220}
221
b2b39c2f 222static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
5311f248 223{
b2b39c2f
HX
224 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
225 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
226 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
227 struct crypto_skcipher *cipher;
69d3150c 228 unsigned long align;
b2b39c2f 229 unsigned int reqsize;
5311f248 230
60425a8b 231 cipher = crypto_spawn_skcipher(spawn);
5311f248
HX
232 if (IS_ERR(cipher))
233 return PTR_ERR(cipher);
234
235 ctx->child = cipher;
236
b2b39c2f 237 align = crypto_skcipher_alignmask(tfm);
69d3150c 238 align &= ~(crypto_tfm_ctx_alignment() - 1);
b2b39c2f
HX
239 reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
240 crypto_skcipher_reqsize(cipher);
241 crypto_skcipher_set_reqsize(tfm, reqsize);
69d3150c 242
5311f248
HX
243 return 0;
244}
245
b2b39c2f 246static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
5311f248 247{
b2b39c2f
HX
248 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
249
250 crypto_free_skcipher(ctx->child);
251}
5311f248 252
b2b39c2f
HX
253static void crypto_rfc3686_free(struct skcipher_instance *inst)
254{
255 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
256
257 crypto_drop_skcipher(spawn);
258 kfree(inst);
5311f248
HX
259}
260
b2b39c2f
HX
261static int crypto_rfc3686_create(struct crypto_template *tmpl,
262 struct rtattr **tb)
5311f248 263{
69d3150c 264 struct crypto_attr_type *algt;
b2b39c2f
HX
265 struct skcipher_instance *inst;
266 struct skcipher_alg *alg;
69d3150c
JK
267 struct crypto_skcipher_spawn *spawn;
268 const char *cipher_name;
d2c2a85c
MC
269 u32 mask;
270
5311f248
HX
271 int err;
272
69d3150c 273 algt = crypto_get_attr_type(tb);
69d3150c 274 if (IS_ERR(algt))
b2b39c2f 275 return PTR_ERR(algt);
5311f248 276
b2b39c2f
HX
277 if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
278 return -EINVAL;
69d3150c
JK
279
280 cipher_name = crypto_attr_alg_name(tb[1]);
69d3150c 281 if (IS_ERR(cipher_name))
b2b39c2f 282 return PTR_ERR(cipher_name);
5311f248 283
69d3150c
JK
284 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
285 if (!inst)
b2b39c2f 286 return -ENOMEM;
69d3150c 287
d2c2a85c
MC
288 mask = crypto_requires_sync(algt->type, algt->mask) |
289 crypto_requires_off(algt->type, algt->mask,
290 CRYPTO_ALG_NEED_FALLBACK);
291
b2b39c2f 292 spawn = skcipher_instance_ctx(inst);
69d3150c 293
b2b39c2f 294 crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
d2c2a85c 295 err = crypto_grab_skcipher(spawn, cipher_name, 0, mask);
69d3150c
JK
296 if (err)
297 goto err_free_inst;
298
b2b39c2f 299 alg = crypto_spawn_skcipher_alg(spawn);
69d3150c 300
5311f248
HX
301 /* We only support 16-byte blocks. */
302 err = -EINVAL;
b2b39c2f 303 if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
69d3150c 304 goto err_drop_spawn;
5311f248
HX
305
306 /* Not a stream cipher? */
b2b39c2f 307 if (alg->base.cra_blocksize != 1)
69d3150c 308 goto err_drop_spawn;
5311f248 309
69d3150c 310 err = -ENAMETOOLONG;
b2b39c2f
HX
311 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
312 "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
69d3150c 313 goto err_drop_spawn;
b2b39c2f
HX
314 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
315 "rfc3686(%s)", alg->base.cra_driver_name) >=
316 CRYPTO_MAX_ALG_NAME)
69d3150c 317 goto err_drop_spawn;
5311f248 318
b2b39c2f
HX
319 inst->alg.base.cra_priority = alg->base.cra_priority;
320 inst->alg.base.cra_blocksize = 1;
321 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
5311f248 322
b2b39c2f 323 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
69d3150c 324
b2b39c2f
HX
325 inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
326 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
327 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
328 CTR_RFC3686_NONCE_SIZE;
329 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
330 CTR_RFC3686_NONCE_SIZE;
5311f248 331
b2b39c2f
HX
332 inst->alg.setkey = crypto_rfc3686_setkey;
333 inst->alg.encrypt = crypto_rfc3686_crypt;
334 inst->alg.decrypt = crypto_rfc3686_crypt;
69d3150c 335
b2b39c2f 336 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
0a270321 337
b2b39c2f
HX
338 inst->alg.init = crypto_rfc3686_init_tfm;
339 inst->alg.exit = crypto_rfc3686_exit_tfm;
5311f248 340
b2b39c2f 341 inst->free = crypto_rfc3686_free;
5311f248 342
b2b39c2f
HX
343 err = skcipher_register_instance(tmpl, inst);
344 if (err)
345 goto err_drop_spawn;
346
347out:
348 return err;
5311f248 349
69d3150c
JK
350err_drop_spawn:
351 crypto_drop_skcipher(spawn);
352err_free_inst:
353 kfree(inst);
b2b39c2f 354 goto out;
5311f248
HX
355}
356
9f8ef365
XW
357static struct crypto_template crypto_ctr_tmpls[] = {
358 {
359 .name = "ctr",
360 .create = crypto_ctr_create,
361 .module = THIS_MODULE,
362 }, {
363 .name = "rfc3686",
364 .create = crypto_rfc3686_create,
365 .module = THIS_MODULE,
366 },
5311f248
HX
367};
368
23e353c8
JL
369static int __init crypto_ctr_module_init(void)
370{
9f8ef365
XW
371 return crypto_register_templates(crypto_ctr_tmpls,
372 ARRAY_SIZE(crypto_ctr_tmpls));
23e353c8
JL
373}
374
375static void __exit crypto_ctr_module_exit(void)
376{
9f8ef365
XW
377 crypto_unregister_templates(crypto_ctr_tmpls,
378 ARRAY_SIZE(crypto_ctr_tmpls));
23e353c8
JL
379}
380
c4741b23 381subsys_initcall(crypto_ctr_module_init);
23e353c8
JL
382module_exit(crypto_ctr_module_exit);
383
384MODULE_LICENSE("GPL");
11f14630 385MODULE_DESCRIPTION("CTR block cipher mode of operation");
5d26a105 386MODULE_ALIAS_CRYPTO("rfc3686");
4943ba16 387MODULE_ALIAS_CRYPTO("ctr");