crypto: seqiv - Remove seqniv
[linux-2.6-block.git] / crypto / seqiv.c
CommitLineData
0a270321
HX
1/*
2 * seqiv: Sequence Number IV Generator
3 *
4 * This generator generates an IV based on a sequence number by xoring it
5 * with a salt. This algorithm is mainly useful for CTR and similar modes.
6 *
7 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
661cfd0e 16#include <crypto/internal/geniv.h>
0a270321 17#include <crypto/internal/skcipher.h>
856e3f40 18#include <crypto/null.h>
a0f000ec 19#include <crypto/rng.h>
856e3f40 20#include <crypto/scatterwalk.h>
0a270321
HX
21#include <linux/err.h>
22#include <linux/init.h>
23#include <linux/kernel.h>
24#include <linux/module.h>
5a0e3ad6 25#include <linux/slab.h>
0a270321
HX
26#include <linux/spinlock.h>
27#include <linux/string.h>
28
29struct seqiv_ctx {
30 spinlock_t lock;
31 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
32};
33
856e3f40 34struct seqiv_aead_ctx {
661cfd0e
HX
35 /* aead_geniv_ctx must be first the element */
36 struct aead_geniv_ctx geniv;
856e3f40
HX
37 struct crypto_blkcipher *null;
38 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
39};
40
0677157b
HX
41static void seqiv_free(struct crypto_instance *inst);
42
0a270321
HX
43static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err)
44{
45 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
46 struct crypto_ablkcipher *geniv;
47
48 if (err == -EINPROGRESS)
49 return;
50
51 if (err)
52 goto out;
53
54 geniv = skcipher_givcrypt_reqtfm(req);
55 memcpy(req->creq.info, subreq->info, crypto_ablkcipher_ivsize(geniv));
56
57out:
58 kfree(subreq->info);
59}
60
61static void seqiv_complete(struct crypto_async_request *base, int err)
62{
63 struct skcipher_givcrypt_request *req = base->data;
64
65 seqiv_complete2(req, err);
66 skcipher_givcrypt_complete(req, err);
67}
68
14df4d80
HX
69static void seqiv_aead_complete2(struct aead_givcrypt_request *req, int err)
70{
71 struct aead_request *subreq = aead_givcrypt_reqctx(req);
72 struct crypto_aead *geniv;
73
74 if (err == -EINPROGRESS)
75 return;
76
77 if (err)
78 goto out;
79
80 geniv = aead_givcrypt_reqtfm(req);
81 memcpy(req->areq.iv, subreq->iv, crypto_aead_ivsize(geniv));
82
83out:
84 kfree(subreq->iv);
85}
86
87static void seqiv_aead_complete(struct crypto_async_request *base, int err)
88{
89 struct aead_givcrypt_request *req = base->data;
90
91 seqiv_aead_complete2(req, err);
92 aead_givcrypt_complete(req, err);
93}
94
856e3f40
HX
95static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
96{
97 struct aead_request *subreq = aead_request_ctx(req);
98 struct crypto_aead *geniv;
99
100 if (err == -EINPROGRESS)
101 return;
102
103 if (err)
104 goto out;
105
106 geniv = crypto_aead_reqtfm(req);
107 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
108
109out:
110 kzfree(subreq->iv);
111}
112
113static void seqiv_aead_encrypt_complete(struct crypto_async_request *base,
114 int err)
115{
116 struct aead_request *req = base->data;
117
118 seqiv_aead_encrypt_complete2(req, err);
119 aead_request_complete(req, err);
120}
121
14df4d80
HX
122static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq,
123 unsigned int ivsize)
124{
125 unsigned int len = ivsize;
126
127 if (ivsize > sizeof(u64)) {
128 memset(info, 0, ivsize - sizeof(u64));
129 len = sizeof(u64);
130 }
131 seq = cpu_to_be64(seq);
132 memcpy(info + ivsize - len, &seq, len);
133 crypto_xor(info, ctx->salt, ivsize);
134}
135
0a270321
HX
136static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
137{
138 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
139 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
140 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
3e3dc25f 141 crypto_completion_t compl;
0a270321
HX
142 void *data;
143 u8 *info;
0a270321 144 unsigned int ivsize;
0a270321
HX
145 int err;
146
147 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
148
3e3dc25f 149 compl = req->creq.base.complete;
0a270321
HX
150 data = req->creq.base.data;
151 info = req->creq.info;
152
153 ivsize = crypto_ablkcipher_ivsize(geniv);
154
155 if (unlikely(!IS_ALIGNED((unsigned long)info,
156 crypto_ablkcipher_alignmask(geniv) + 1))) {
157 info = kmalloc(ivsize, req->creq.base.flags &
158 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
159 GFP_ATOMIC);
160 if (!info)
161 return -ENOMEM;
162
3e3dc25f 163 compl = seqiv_complete;
0a270321
HX
164 data = req;
165 }
166
3e3dc25f 167 ablkcipher_request_set_callback(subreq, req->creq.base.flags, compl,
0a270321
HX
168 data);
169 ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
170 req->creq.nbytes, info);
171
14df4d80 172 seqiv_geniv(ctx, info, req->seq, ivsize);
0a270321
HX
173 memcpy(req->giv, info, ivsize);
174
175 err = crypto_ablkcipher_encrypt(subreq);
176 if (unlikely(info != req->creq.info))
177 seqiv_complete2(req, err);
178 return err;
179}
180
14df4d80
HX
181static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req)
182{
183 struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
184 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
185 struct aead_request *areq = &req->areq;
186 struct aead_request *subreq = aead_givcrypt_reqctx(req);
3e3dc25f 187 crypto_completion_t compl;
14df4d80
HX
188 void *data;
189 u8 *info;
190 unsigned int ivsize;
191 int err;
192
193 aead_request_set_tfm(subreq, aead_geniv_base(geniv));
194
3e3dc25f 195 compl = areq->base.complete;
14df4d80
HX
196 data = areq->base.data;
197 info = areq->iv;
198
199 ivsize = crypto_aead_ivsize(geniv);
200
201 if (unlikely(!IS_ALIGNED((unsigned long)info,
202 crypto_aead_alignmask(geniv) + 1))) {
203 info = kmalloc(ivsize, areq->base.flags &
204 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
205 GFP_ATOMIC);
206 if (!info)
207 return -ENOMEM;
208
3e3dc25f 209 compl = seqiv_aead_complete;
14df4d80
HX
210 data = req;
211 }
212
3e3dc25f 213 aead_request_set_callback(subreq, areq->base.flags, compl, data);
14df4d80
HX
214 aead_request_set_crypt(subreq, areq->src, areq->dst, areq->cryptlen,
215 info);
216 aead_request_set_assoc(subreq, areq->assoc, areq->assoclen);
217
218 seqiv_geniv(ctx, info, req->seq, ivsize);
219 memcpy(req->giv, info, ivsize);
220
221 err = crypto_aead_encrypt(subreq);
222 if (unlikely(info != areq->iv))
223 seqiv_aead_complete2(req, err);
224 return err;
225}
226
856e3f40
HX
227static int seqiv_aead_encrypt(struct aead_request *req)
228{
229 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
230 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
231 struct aead_request *subreq = aead_request_ctx(req);
232 crypto_completion_t compl;
233 void *data;
234 u8 *info;
dd04446e 235 unsigned int ivsize = 8;
856e3f40
HX
236 int err;
237
dd04446e
HX
238 if (req->cryptlen < ivsize)
239 return -EINVAL;
240
661cfd0e 241 aead_request_set_tfm(subreq, ctx->geniv.child);
856e3f40
HX
242
243 compl = req->base.complete;
244 data = req->base.data;
245 info = req->iv;
246
856e3f40 247 if (req->src != req->dst) {
856e3f40
HX
248 struct blkcipher_desc desc = {
249 .tfm = ctx->null,
250 };
251
d0ad1b24
HX
252 err = crypto_blkcipher_encrypt(&desc, req->dst, req->src,
253 req->assoclen + req->cryptlen);
856e3f40
HX
254 if (err)
255 return err;
256 }
257
258 if (unlikely(!IS_ALIGNED((unsigned long)info,
259 crypto_aead_alignmask(geniv) + 1))) {
260 info = kmalloc(ivsize, req->base.flags &
261 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
262 GFP_ATOMIC);
263 if (!info)
264 return -ENOMEM;
265
266 memcpy(info, req->iv, ivsize);
267 compl = seqiv_aead_encrypt_complete;
268 data = req;
269 }
270
271 aead_request_set_callback(subreq, req->base.flags, compl, data);
272 aead_request_set_crypt(subreq, req->dst, req->dst,
273 req->cryptlen - ivsize, info);
374d4ad1 274 aead_request_set_ad(subreq, req->assoclen + ivsize);
856e3f40
HX
275
276 crypto_xor(info, ctx->salt, ivsize);
277 scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
278
279 err = crypto_aead_encrypt(subreq);
280 if (unlikely(info != req->iv))
281 seqiv_aead_encrypt_complete2(req, err);
282 return err;
283}
284
856e3f40
HX
285static int seqiv_aead_decrypt(struct aead_request *req)
286{
287 struct crypto_aead *geniv = crypto_aead_reqtfm(req);
288 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
289 struct aead_request *subreq = aead_request_ctx(req);
290 crypto_completion_t compl;
291 void *data;
dd04446e
HX
292 unsigned int ivsize = 8;
293
294 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv))
295 return -EINVAL;
856e3f40 296
661cfd0e 297 aead_request_set_tfm(subreq, ctx->geniv.child);
856e3f40
HX
298
299 compl = req->base.complete;
300 data = req->base.data;
301
856e3f40
HX
302 aead_request_set_callback(subreq, req->base.flags, compl, data);
303 aead_request_set_crypt(subreq, req->src, req->dst,
304 req->cryptlen - ivsize, req->iv);
374d4ad1 305 aead_request_set_ad(subreq, req->assoclen + ivsize);
856e3f40
HX
306
307 scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
856e3f40
HX
308
309 return crypto_aead_decrypt(subreq);
310}
311
0a270321
HX
312static int seqiv_init(struct crypto_tfm *tfm)
313{
314 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
315 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
eeee12aa 316 int err;
0a270321
HX
317
318 spin_lock_init(&ctx->lock);
319
320 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request);
321
eeee12aa
HX
322 err = 0;
323 if (!crypto_get_default_rng()) {
324 crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt;
325 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
326 crypto_ablkcipher_ivsize(geniv));
327 crypto_put_default_rng();
328 }
329
330 return err ?: skcipher_geniv_init(tfm);
0a270321
HX
331}
332
856e3f40 333static int seqiv_old_aead_init(struct crypto_tfm *tfm)
14df4d80
HX
334{
335 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
336 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
eeee12aa 337 int err;
14df4d80
HX
338
339 spin_lock_init(&ctx->lock);
340
ba6d8e39
HX
341 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
342 sizeof(struct aead_request));
eeee12aa
HX
343 err = 0;
344 if (!crypto_get_default_rng()) {
345 geniv->givencrypt = seqiv_aead_givencrypt;
346 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
347 crypto_aead_ivsize(geniv));
348 crypto_put_default_rng();
349 }
14df4d80 350
eeee12aa 351 return err ?: aead_geniv_init(tfm);
14df4d80
HX
352}
353
5964f26c
HX
354static int seqiv_aead_init_common(struct crypto_aead *geniv,
355 unsigned int reqsize)
856e3f40 356{
856e3f40
HX
357 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(geniv);
358 int err;
359
661cfd0e 360 spin_lock_init(&ctx->geniv.lock);
856e3f40
HX
361
362 crypto_aead_set_reqsize(geniv, sizeof(struct aead_request));
363
eeee12aa
HX
364 err = crypto_get_default_rng();
365 if (err)
366 goto out;
367
b7dcfab4
HX
368 err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
369 crypto_aead_ivsize(geniv));
eeee12aa 370 crypto_put_default_rng();
b7dcfab4
HX
371 if (err)
372 goto out;
373
856e3f40
HX
374 ctx->null = crypto_get_default_null_skcipher();
375 err = PTR_ERR(ctx->null);
376 if (IS_ERR(ctx->null))
377 goto out;
378
5964f26c 379 err = aead_geniv_init(crypto_aead_tfm(geniv));
856e3f40
HX
380 if (err)
381 goto drop_null;
382
661cfd0e 383 ctx->geniv.child = geniv->child;
856e3f40
HX
384 geniv->child = geniv;
385
386out:
387 return err;
388
389drop_null:
390 crypto_put_default_null_skcipher();
391 goto out;
392}
393
5964f26c 394static int seqiv_aead_init(struct crypto_aead *tfm)
856e3f40 395{
dd04446e
HX
396 return seqiv_aead_init_common(tfm, sizeof(struct aead_request));
397}
856e3f40 398
5964f26c 399static void seqiv_aead_exit(struct crypto_aead *tfm)
856e3f40 400{
5964f26c 401 struct seqiv_aead_ctx *ctx = crypto_aead_ctx(tfm);
856e3f40 402
661cfd0e 403 crypto_free_aead(ctx->geniv.child);
856e3f40
HX
404 crypto_put_default_null_skcipher();
405}
406
0677157b
HX
407static int seqiv_ablkcipher_create(struct crypto_template *tmpl,
408 struct rtattr **tb)
0a270321
HX
409{
410 struct crypto_instance *inst;
0677157b 411 int err;
0a270321 412
0677157b 413 inst = skcipher_geniv_alloc(tmpl, tb, 0, 0);
14df4d80 414
0a270321 415 if (IS_ERR(inst))
0677157b 416 return PTR_ERR(inst);
0a270321 417
0677157b
HX
418 err = -EINVAL;
419 if (inst->alg.cra_ablkcipher.ivsize < sizeof(u64))
420 goto free_inst;
c0ecf891 421
0a270321
HX
422 inst->alg.cra_init = seqiv_init;
423 inst->alg.cra_exit = skcipher_geniv_exit;
424
0a270321 425 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
856e3f40 426 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
0a270321 427
0677157b
HX
428 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
429
430 err = crypto_register_instance(tmpl, inst);
431 if (err)
432 goto free_inst;
433
0a270321 434out:
0677157b
HX
435 return err;
436
437free_inst:
438 skcipher_geniv_free(inst);
439 goto out;
0a270321
HX
440}
441
0677157b
HX
442static int seqiv_old_aead_create(struct crypto_template *tmpl,
443 struct aead_instance *aead)
856e3f40
HX
444{
445 struct crypto_instance *inst = aead_crypto_instance(aead);
0677157b 446 int err = -EINVAL;
856e3f40 447
0677157b
HX
448 if (inst->alg.cra_aead.ivsize < sizeof(u64))
449 goto free_inst;
856e3f40 450
856e3f40
HX
451 inst->alg.cra_init = seqiv_old_aead_init;
452 inst->alg.cra_exit = aead_geniv_exit;
453
454 inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize;
455 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
456
0677157b
HX
457 err = crypto_register_instance(tmpl, inst);
458 if (err)
459 goto free_inst;
460
461out:
462 return err;
463
464free_inst:
465 aead_geniv_free(aead);
466 goto out;
856e3f40
HX
467}
468
0677157b 469static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb)
14df4d80 470{
856e3f40
HX
471 struct aead_instance *inst;
472 struct crypto_aead_spawn *spawn;
473 struct aead_alg *alg;
0677157b 474 int err;
14df4d80 475
0677157b 476 inst = aead_geniv_alloc(tmpl, tb, 0, 0);
14df4d80
HX
477
478 if (IS_ERR(inst))
0677157b
HX
479 return PTR_ERR(inst);
480
481 inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
14df4d80 482
856e3f40 483 if (inst->alg.base.cra_aead.encrypt)
0677157b 484 return seqiv_old_aead_create(tmpl, inst);
856e3f40 485
661cfd0e
HX
486 spawn = aead_instance_ctx(inst);
487 alg = crypto_spawn_aead_alg(spawn);
488
489 if (alg->base.cra_aead.encrypt)
490 goto done;
491
0677157b 492 err = -EINVAL;
dd04446e 493 if (inst->alg.ivsize != sizeof(u64))
0677157b 494 goto free_inst;
c0ecf891 495
b7dcfab4 496 inst->alg.encrypt = seqiv_aead_encrypt;
856e3f40 497 inst->alg.decrypt = seqiv_aead_decrypt;
14df4d80 498
5964f26c
HX
499 inst->alg.init = seqiv_aead_init;
500 inst->alg.exit = seqiv_aead_exit;
856e3f40
HX
501
502 inst->alg.base.cra_ctxsize = sizeof(struct seqiv_aead_ctx);
5964f26c 503 inst->alg.base.cra_ctxsize += inst->alg.ivsize;
856e3f40 504
661cfd0e 505done:
0677157b
HX
506 err = aead_register_instance(tmpl, inst);
507 if (err)
508 goto free_inst;
509
14df4d80 510out:
0677157b
HX
511 return err;
512
513free_inst:
514 aead_geniv_free(inst);
515 goto out;
14df4d80
HX
516}
517
0677157b 518static int seqiv_create(struct crypto_template *tmpl, struct rtattr **tb)
14df4d80
HX
519{
520 struct crypto_attr_type *algt;
14df4d80
HX
521 int err;
522
523 algt = crypto_get_attr_type(tb);
14df4d80 524 if (IS_ERR(algt))
0677157b 525 return PTR_ERR(algt);
14df4d80
HX
526
527 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
0677157b 528 err = seqiv_ablkcipher_create(tmpl, tb);
14df4d80 529 else
0677157b 530 err = seqiv_aead_create(tmpl, tb);
14df4d80 531
0677157b 532 return err;
14df4d80
HX
533}
534
535static void seqiv_free(struct crypto_instance *inst)
536{
537 if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
538 skcipher_geniv_free(inst);
539 else
856e3f40 540 aead_geniv_free(aead_instance(inst));
14df4d80
HX
541}
542
0a270321
HX
543static struct crypto_template seqiv_tmpl = {
544 .name = "seqiv",
0677157b 545 .create = seqiv_create,
14df4d80 546 .free = seqiv_free,
0a270321
HX
547 .module = THIS_MODULE,
548};
549
550static int __init seqiv_module_init(void)
551{
8a2cd1c4 552 return crypto_register_template(&seqiv_tmpl);
0a270321
HX
553}
554
555static void __exit seqiv_module_exit(void)
556{
557 crypto_unregister_template(&seqiv_tmpl);
558}
559
560module_init(seqiv_module_init);
561module_exit(seqiv_module_exit);
562
563MODULE_LICENSE("GPL");
564MODULE_DESCRIPTION("Sequence Number IV Generator");
4943ba16 565MODULE_ALIAS_CRYPTO("seqiv");