crypto: seqiv - Stop using cryptoff
[linux-2.6-block.git] / crypto / aead.c
CommitLineData
1ae97820
HX
1/*
2 * AEAD: Authenticated Encryption with Associated Data
3922538f 3 *
1ae97820
HX
4 * This file provides API support for AEAD algorithms.
5 *
6 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License as published by the Free
3922538f 10 * Software Foundation; either version 2 of the License, or (at your option)
1ae97820
HX
11 * any later version.
12 *
13 */
14
5b6d2d7f 15#include <crypto/internal/aead.h>
996d98d8 16#include <crypto/scatterwalk.h>
5b6d2d7f 17#include <linux/err.h>
1ae97820
HX
18#include <linux/init.h>
19#include <linux/kernel.h>
20#include <linux/module.h>
d29ce988 21#include <linux/rtnetlink.h>
d43c36dc 22#include <linux/sched.h>
1ae97820
HX
23#include <linux/slab.h>
24#include <linux/seq_file.h>
6ad414fe
SK
25#include <linux/cryptouser.h>
26#include <net/netlink.h>
1ae97820 27
5b6d2d7f
HX
28#include "internal.h"
29
5d1d65f8
HX
30static int aead_null_givencrypt(struct aead_givcrypt_request *req);
31static int aead_null_givdecrypt(struct aead_givcrypt_request *req);
32
1ae97820
HX
33static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
34 unsigned int keylen)
35{
1ae97820
HX
36 unsigned long alignmask = crypto_aead_alignmask(tfm);
37 int ret;
38 u8 *buffer, *alignbuffer;
39 unsigned long absize;
40
41 absize = keylen + alignmask;
42 buffer = kmalloc(absize, GFP_ATOMIC);
43 if (!buffer)
44 return -ENOMEM;
45
46 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47 memcpy(alignbuffer, key, keylen);
63293c61 48 ret = tfm->setkey(tfm, alignbuffer, keylen);
1ae97820
HX
49 memset(alignbuffer, 0, keylen);
50 kfree(buffer);
51 return ret;
52}
53
5d1d65f8
HX
54int crypto_aead_setkey(struct crypto_aead *tfm,
55 const u8 *key, unsigned int keylen)
1ae97820 56{
1ae97820
HX
57 unsigned long alignmask = crypto_aead_alignmask(tfm);
58
5d1d65f8
HX
59 tfm = tfm->child;
60
1ae97820
HX
61 if ((unsigned long)key & alignmask)
62 return setkey_unaligned(tfm, key, keylen);
63
63293c61 64 return tfm->setkey(tfm, key, keylen);
1ae97820 65}
5d1d65f8 66EXPORT_SYMBOL_GPL(crypto_aead_setkey);
1ae97820 67
7ba683a6
HX
68int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
69{
70 int err;
71
30e4c010 72 if (authsize > crypto_aead_maxauthsize(tfm))
7ba683a6
HX
73 return -EINVAL;
74
63293c61
HX
75 if (tfm->setauthsize) {
76 err = tfm->setauthsize(tfm->child, authsize);
7ba683a6
HX
77 if (err)
78 return err;
79 }
80
5d1d65f8
HX
81 tfm->child->authsize = authsize;
82 tfm->authsize = authsize;
7ba683a6
HX
83 return 0;
84}
85EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
86
996d98d8
HX
87struct aead_old_request {
88 struct scatterlist srcbuf[2];
89 struct scatterlist dstbuf[2];
90 struct aead_request subreq;
91};
92
93unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
94{
95 return tfm->reqsize + sizeof(struct aead_old_request);
96}
97EXPORT_SYMBOL_GPL(crypto_aead_reqsize);
98
99static int old_crypt(struct aead_request *req,
100 int (*crypt)(struct aead_request *req))
101{
102 struct aead_old_request *nreq = aead_request_ctx(req);
103 struct crypto_aead *aead = crypto_aead_reqtfm(req);
104 struct scatterlist *src, *dst;
105
106 if (req->old)
107 return crypt(req);
108
109 src = scatterwalk_ffwd(nreq->srcbuf, req->src,
110 req->assoclen + req->cryptoff);
111 dst = scatterwalk_ffwd(nreq->dstbuf, req->dst,
112 req->assoclen + req->cryptoff);
113
114 aead_request_set_tfm(&nreq->subreq, aead);
115 aead_request_set_callback(&nreq->subreq, aead_request_flags(req),
116 req->base.complete, req->base.data);
117 aead_request_set_crypt(&nreq->subreq, src, dst, req->cryptlen,
118 req->iv);
119 aead_request_set_assoc(&nreq->subreq, req->src, req->assoclen);
120
121 return crypt(&nreq->subreq);
122}
123
124static int old_encrypt(struct aead_request *req)
125{
126 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2d0f230f 127 struct old_aead_alg *alg = crypto_old_aead_alg(aead);
996d98d8
HX
128
129 return old_crypt(req, alg->encrypt);
130}
131
132static int old_decrypt(struct aead_request *req)
133{
134 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2d0f230f 135 struct old_aead_alg *alg = crypto_old_aead_alg(aead);
996d98d8
HX
136
137 return old_crypt(req, alg->decrypt);
138}
139
aedb30dc 140static int no_givcrypt(struct aead_givcrypt_request *req)
743edf57
HX
141{
142 return -ENOSYS;
143}
144
63293c61 145static int crypto_old_aead_init_tfm(struct crypto_tfm *tfm)
1ae97820 146{
2d0f230f 147 struct old_aead_alg *alg = &tfm->__crt_alg->cra_aead;
5d1d65f8 148 struct crypto_aead *crt = __crypto_aead_cast(tfm);
1ae97820 149
7ba683a6 150 if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
1ae97820
HX
151 return -EINVAL;
152
63293c61
HX
153 crt->setkey = alg->setkey;
154 crt->setauthsize = alg->setauthsize;
996d98d8
HX
155 crt->encrypt = old_encrypt;
156 crt->decrypt = old_decrypt;
5d1d65f8
HX
157 if (alg->ivsize) {
158 crt->givencrypt = alg->givencrypt ?: no_givcrypt;
159 crt->givdecrypt = alg->givdecrypt ?: no_givcrypt;
160 } else {
161 crt->givencrypt = aead_null_givencrypt;
162 crt->givdecrypt = aead_null_givdecrypt;
163 }
164 crt->child = __crypto_aead_cast(tfm);
7ba683a6 165 crt->authsize = alg->maxauthsize;
1ae97820
HX
166
167 return 0;
168}
169
63293c61
HX
170static int crypto_aead_init_tfm(struct crypto_tfm *tfm)
171{
172 struct crypto_aead *aead = __crypto_aead_cast(tfm);
173 struct aead_alg *alg = crypto_aead_alg(aead);
174
175 if (crypto_old_aead_alg(aead)->encrypt)
176 return crypto_old_aead_init_tfm(tfm);
177
178 aead->setkey = alg->setkey;
179 aead->setauthsize = alg->setauthsize;
180 aead->encrypt = alg->encrypt;
181 aead->decrypt = alg->decrypt;
182 aead->child = __crypto_aead_cast(tfm);
63293c61
HX
183 aead->authsize = alg->maxauthsize;
184
185 return 0;
186}
187
3acc8473 188#ifdef CONFIG_NET
63293c61 189static int crypto_old_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
6ad414fe
SK
190{
191 struct crypto_report_aead raead;
2d0f230f 192 struct old_aead_alg *aead = &alg->cra_aead;
6ad414fe 193
9a5467bf
MK
194 strncpy(raead.type, "aead", sizeof(raead.type));
195 strncpy(raead.geniv, aead->geniv ?: "<built-in>", sizeof(raead.geniv));
6ad414fe
SK
196
197 raead.blocksize = alg->cra_blocksize;
198 raead.maxauthsize = aead->maxauthsize;
199 raead.ivsize = aead->ivsize;
200
6662df33
DM
201 if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
202 sizeof(struct crypto_report_aead), &raead))
203 goto nla_put_failure;
6ad414fe
SK
204 return 0;
205
206nla_put_failure:
207 return -EMSGSIZE;
208}
3acc8473 209#else
63293c61 210static int crypto_old_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
3acc8473
HX
211{
212 return -ENOSYS;
213}
214#endif
6ad414fe 215
63293c61 216static void crypto_old_aead_show(struct seq_file *m, struct crypto_alg *alg)
1ae97820 217 __attribute__ ((unused));
63293c61 218static void crypto_old_aead_show(struct seq_file *m, struct crypto_alg *alg)
1ae97820 219{
2d0f230f 220 struct old_aead_alg *aead = &alg->cra_aead;
1ae97820
HX
221
222 seq_printf(m, "type : aead\n");
189ed66e
HX
223 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
224 "yes" : "no");
1ae97820
HX
225 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
226 seq_printf(m, "ivsize : %u\n", aead->ivsize);
7ba683a6 227 seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
5b6d2d7f 228 seq_printf(m, "geniv : %s\n", aead->geniv ?: "<built-in>");
1ae97820
HX
229}
230
231const struct crypto_type crypto_aead_type = {
5d1d65f8
HX
232 .extsize = crypto_alg_extsize,
233 .init_tfm = crypto_aead_init_tfm,
1ae97820 234#ifdef CONFIG_PROC_FS
63293c61 235 .show = crypto_old_aead_show,
1ae97820 236#endif
63293c61 237 .report = crypto_old_aead_report,
5d1d65f8
HX
238 .lookup = crypto_lookup_aead,
239 .maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
240 .maskset = CRYPTO_ALG_TYPE_MASK,
241 .type = CRYPTO_ALG_TYPE_AEAD,
242 .tfmsize = offsetof(struct crypto_aead, base),
1ae97820
HX
243};
244EXPORT_SYMBOL_GPL(crypto_aead_type);
245
63293c61
HX
246#ifdef CONFIG_NET
247static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
248{
249 struct crypto_report_aead raead;
250 struct aead_alg *aead = container_of(alg, struct aead_alg, base);
251
252 strncpy(raead.type, "aead", sizeof(raead.type));
253 strncpy(raead.geniv, "<none>", sizeof(raead.geniv));
254
255 raead.blocksize = alg->cra_blocksize;
256 raead.maxauthsize = aead->maxauthsize;
257 raead.ivsize = aead->ivsize;
258
259 if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
260 sizeof(struct crypto_report_aead), &raead))
261 goto nla_put_failure;
262 return 0;
263
264nla_put_failure:
265 return -EMSGSIZE;
266}
267#else
268static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
269{
270 return -ENOSYS;
271}
272#endif
273
274static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
275 __attribute__ ((unused));
276static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
277{
278 struct aead_alg *aead = container_of(alg, struct aead_alg, base);
279
280 seq_printf(m, "type : aead\n");
281 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
282 "yes" : "no");
283 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
284 seq_printf(m, "ivsize : %u\n", aead->ivsize);
285 seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
286 seq_printf(m, "geniv : <none>\n");
287}
288
289static const struct crypto_type crypto_new_aead_type = {
290 .extsize = crypto_alg_extsize,
291 .init_tfm = crypto_aead_init_tfm,
292#ifdef CONFIG_PROC_FS
293 .show = crypto_aead_show,
294#endif
295 .report = crypto_aead_report,
296 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
297 .maskset = CRYPTO_ALG_TYPE_MASK,
298 .type = CRYPTO_ALG_TYPE_AEAD,
299 .tfmsize = offsetof(struct crypto_aead, base),
300};
301
5b6d2d7f
HX
302static int aead_null_givencrypt(struct aead_givcrypt_request *req)
303{
304 return crypto_aead_encrypt(&req->areq);
305}
306
307static int aead_null_givdecrypt(struct aead_givcrypt_request *req)
308{
309 return crypto_aead_decrypt(&req->areq);
310}
311
3acc8473 312#ifdef CONFIG_NET
b735d0a9
SK
313static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
314{
315 struct crypto_report_aead raead;
2d0f230f 316 struct old_aead_alg *aead = &alg->cra_aead;
b735d0a9 317
9a5467bf
MK
318 strncpy(raead.type, "nivaead", sizeof(raead.type));
319 strncpy(raead.geniv, aead->geniv, sizeof(raead.geniv));
b735d0a9
SK
320
321 raead.blocksize = alg->cra_blocksize;
322 raead.maxauthsize = aead->maxauthsize;
323 raead.ivsize = aead->ivsize;
324
6662df33
DM
325 if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
326 sizeof(struct crypto_report_aead), &raead))
327 goto nla_put_failure;
b735d0a9
SK
328 return 0;
329
330nla_put_failure:
331 return -EMSGSIZE;
332}
3acc8473
HX
333#else
334static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
335{
336 return -ENOSYS;
337}
338#endif
b735d0a9
SK
339
340
5b6d2d7f
HX
341static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
342 __attribute__ ((unused));
343static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
344{
2d0f230f 345 struct old_aead_alg *aead = &alg->cra_aead;
5b6d2d7f
HX
346
347 seq_printf(m, "type : nivaead\n");
189ed66e
HX
348 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
349 "yes" : "no");
5b6d2d7f
HX
350 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
351 seq_printf(m, "ivsize : %u\n", aead->ivsize);
352 seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
353 seq_printf(m, "geniv : %s\n", aead->geniv);
354}
355
356const struct crypto_type crypto_nivaead_type = {
5d1d65f8
HX
357 .extsize = crypto_alg_extsize,
358 .init_tfm = crypto_aead_init_tfm,
5b6d2d7f
HX
359#ifdef CONFIG_PROC_FS
360 .show = crypto_nivaead_show,
361#endif
b735d0a9 362 .report = crypto_nivaead_report,
5d1d65f8
HX
363 .maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
364 .maskset = CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV,
365 .type = CRYPTO_ALG_TYPE_AEAD,
366 .tfmsize = offsetof(struct crypto_aead, base),
5b6d2d7f
HX
367};
368EXPORT_SYMBOL_GPL(crypto_nivaead_type);
369
370static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn,
371 const char *name, u32 type, u32 mask)
372{
5d1d65f8
HX
373 spawn->base.frontend = &crypto_nivaead_type;
374 return crypto_grab_spawn(&spawn->base, name, type, mask);
5b6d2d7f
HX
375}
376
856e3f40
HX
377struct aead_instance *aead_geniv_alloc(struct crypto_template *tmpl,
378 struct rtattr **tb, u32 type, u32 mask)
5b6d2d7f
HX
379{
380 const char *name;
381 struct crypto_aead_spawn *spawn;
382 struct crypto_attr_type *algt;
856e3f40
HX
383 struct aead_instance *inst;
384 struct aead_alg *alg;
385 unsigned int ivsize;
386 unsigned int maxauthsize;
5b6d2d7f
HX
387 int err;
388
389 algt = crypto_get_attr_type(tb);
5b6d2d7f 390 if (IS_ERR(algt))
3e8afe35 391 return ERR_CAST(algt);
5b6d2d7f
HX
392
393 if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
394 algt->mask)
395 return ERR_PTR(-EINVAL);
396
397 name = crypto_attr_alg_name(tb[1]);
5b6d2d7f 398 if (IS_ERR(name))
3e8afe35 399 return ERR_CAST(name);
5b6d2d7f
HX
400
401 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
402 if (!inst)
403 return ERR_PTR(-ENOMEM);
404
856e3f40 405 spawn = aead_instance_ctx(inst);
5b6d2d7f
HX
406
407 /* Ignore async algorithms if necessary. */
408 mask |= crypto_requires_sync(algt->type, algt->mask);
409
856e3f40 410 crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
5b6d2d7f
HX
411 err = crypto_grab_nivaead(spawn, name, type, mask);
412 if (err)
413 goto err_free_inst;
414
856e3f40
HX
415 alg = crypto_spawn_aead_alg(spawn);
416
30e4c010
HX
417 ivsize = crypto_aead_alg_ivsize(alg);
418 maxauthsize = crypto_aead_alg_maxauthsize(alg);
5b6d2d7f
HX
419
420 err = -EINVAL;
856e3f40 421 if (!ivsize)
5b6d2d7f
HX
422 goto err_drop_alg;
423
424 /*
425 * This is only true if we're constructing an algorithm with its
426 * default IV generator. For the default generator we elide the
427 * template name and double-check the IV generator.
428 */
429 if (algt->mask & CRYPTO_ALG_GENIV) {
856e3f40
HX
430 if (!alg->base.cra_aead.encrypt)
431 goto err_drop_alg;
432 if (strcmp(tmpl->name, alg->base.cra_aead.geniv))
5b6d2d7f
HX
433 goto err_drop_alg;
434
856e3f40 435 memcpy(inst->alg.base.cra_name, alg->base.cra_name,
5b6d2d7f 436 CRYPTO_MAX_ALG_NAME);
856e3f40
HX
437 memcpy(inst->alg.base.cra_driver_name,
438 alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME);
439
440 inst->alg.base.cra_flags = CRYPTO_ALG_TYPE_AEAD |
441 CRYPTO_ALG_GENIV;
442 inst->alg.base.cra_flags |= alg->base.cra_flags &
443 CRYPTO_ALG_ASYNC;
444 inst->alg.base.cra_priority = alg->base.cra_priority;
445 inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
446 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
447 inst->alg.base.cra_type = &crypto_aead_type;
448
449 inst->alg.base.cra_aead.ivsize = ivsize;
450 inst->alg.base.cra_aead.maxauthsize = maxauthsize;
451
452 inst->alg.base.cra_aead.setkey = alg->base.cra_aead.setkey;
453 inst->alg.base.cra_aead.setauthsize =
454 alg->base.cra_aead.setauthsize;
455 inst->alg.base.cra_aead.encrypt = alg->base.cra_aead.encrypt;
456 inst->alg.base.cra_aead.decrypt = alg->base.cra_aead.decrypt;
457
458 goto out;
5b6d2d7f
HX
459 }
460
856e3f40
HX
461 err = -ENAMETOOLONG;
462 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
463 "%s(%s)", tmpl->name, alg->base.cra_name) >=
464 CRYPTO_MAX_ALG_NAME)
465 goto err_drop_alg;
466 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
467 "%s(%s)", tmpl->name, alg->base.cra_driver_name) >=
468 CRYPTO_MAX_ALG_NAME)
469 goto err_drop_alg;
5b6d2d7f 470
d1ee1f02 471 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
856e3f40
HX
472 inst->alg.base.cra_priority = alg->base.cra_priority;
473 inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
474 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
5b6d2d7f 475
856e3f40
HX
476 inst->alg.ivsize = ivsize;
477 inst->alg.maxauthsize = maxauthsize;
5b6d2d7f
HX
478
479out:
480 return inst;
481
482err_drop_alg:
483 crypto_drop_aead(spawn);
484err_free_inst:
485 kfree(inst);
486 inst = ERR_PTR(err);
487 goto out;
488}
489EXPORT_SYMBOL_GPL(aead_geniv_alloc);
490
856e3f40 491void aead_geniv_free(struct aead_instance *inst)
5b6d2d7f 492{
856e3f40 493 crypto_drop_aead(aead_instance_ctx(inst));
5b6d2d7f
HX
494 kfree(inst);
495}
496EXPORT_SYMBOL_GPL(aead_geniv_free);
497
498int aead_geniv_init(struct crypto_tfm *tfm)
499{
500 struct crypto_instance *inst = (void *)tfm->__crt_alg;
5d1d65f8 501 struct crypto_aead *child;
5b6d2d7f
HX
502 struct crypto_aead *aead;
503
5d1d65f8 504 aead = __crypto_aead_cast(tfm);
5b6d2d7f 505
5d1d65f8
HX
506 child = crypto_spawn_aead(crypto_instance_ctx(inst));
507 if (IS_ERR(child))
508 return PTR_ERR(child);
509
510 aead->child = child;
511 aead->reqsize += crypto_aead_reqsize(child);
5b6d2d7f
HX
512
513 return 0;
514}
515EXPORT_SYMBOL_GPL(aead_geniv_init);
516
517void aead_geniv_exit(struct crypto_tfm *tfm)
518{
5d1d65f8 519 crypto_free_aead(__crypto_aead_cast(tfm)->child);
5b6d2d7f
HX
520}
521EXPORT_SYMBOL_GPL(aead_geniv_exit);
522
d29ce988
HX
523static int crypto_nivaead_default(struct crypto_alg *alg, u32 type, u32 mask)
524{
525 struct rtattr *tb[3];
526 struct {
527 struct rtattr attr;
528 struct crypto_attr_type data;
529 } ptype;
530 struct {
531 struct rtattr attr;
532 struct crypto_attr_alg data;
533 } palg;
534 struct crypto_template *tmpl;
535 struct crypto_instance *inst;
536 struct crypto_alg *larval;
537 const char *geniv;
538 int err;
539
540 larval = crypto_larval_lookup(alg->cra_driver_name,
541 CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV,
542 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
543 err = PTR_ERR(larval);
544 if (IS_ERR(larval))
545 goto out;
546
547 err = -EAGAIN;
548 if (!crypto_is_larval(larval))
549 goto drop_larval;
550
551 ptype.attr.rta_len = sizeof(ptype);
552 ptype.attr.rta_type = CRYPTOA_TYPE;
553 ptype.data.type = type | CRYPTO_ALG_GENIV;
554 /* GENIV tells the template that we're making a default geniv. */
555 ptype.data.mask = mask | CRYPTO_ALG_GENIV;
556 tb[0] = &ptype.attr;
557
558 palg.attr.rta_len = sizeof(palg);
559 palg.attr.rta_type = CRYPTOA_ALG;
560 /* Must use the exact name to locate ourselves. */
561 memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
562 tb[1] = &palg.attr;
563
564 tb[2] = NULL;
565
566 geniv = alg->cra_aead.geniv;
567
568 tmpl = crypto_lookup_template(geniv);
569 err = -ENOENT;
570 if (!tmpl)
571 goto kill_larval;
572
72af865d
HX
573 if (tmpl->create) {
574 err = tmpl->create(tmpl, tb);
575 if (err)
576 goto put_tmpl;
577 goto ok;
578 }
579
d29ce988
HX
580 inst = tmpl->alloc(tb);
581 err = PTR_ERR(inst);
582 if (IS_ERR(inst))
583 goto put_tmpl;
584
4fad478a
JJ
585 err = crypto_register_instance(tmpl, inst);
586 if (err) {
d29ce988
HX
587 tmpl->free(inst);
588 goto put_tmpl;
589 }
590
72af865d 591ok:
d29ce988
HX
592 /* Redo the lookup to use the instance we just registered. */
593 err = -EAGAIN;
594
595put_tmpl:
596 crypto_tmpl_put(tmpl);
597kill_larval:
598 crypto_larval_kill(larval);
599drop_larval:
600 crypto_mod_put(larval);
601out:
602 crypto_mod_put(alg);
603 return err;
604}
605
1e122994 606struct crypto_alg *crypto_lookup_aead(const char *name, u32 type, u32 mask)
d29ce988
HX
607{
608 struct crypto_alg *alg;
609
610 alg = crypto_alg_mod_lookup(name, type, mask);
611 if (IS_ERR(alg))
612 return alg;
613
614 if (alg->cra_type == &crypto_aead_type)
615 return alg;
616
617 if (!alg->cra_aead.ivsize)
618 return alg;
619
5852ae42
HX
620 crypto_mod_put(alg);
621 alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
622 mask & ~CRYPTO_ALG_TESTED);
623 if (IS_ERR(alg))
624 return alg;
625
626 if (alg->cra_type == &crypto_aead_type) {
80f7b355 627 if (~alg->cra_flags & (type ^ ~mask) & CRYPTO_ALG_TESTED) {
5852ae42
HX
628 crypto_mod_put(alg);
629 alg = ERR_PTR(-ENOENT);
630 }
631 return alg;
632 }
633
634 BUG_ON(!alg->cra_aead.ivsize);
635
d29ce988
HX
636 return ERR_PTR(crypto_nivaead_default(alg, type, mask));
637}
1e122994 638EXPORT_SYMBOL_GPL(crypto_lookup_aead);
d29ce988
HX
639
640int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
641 u32 type, u32 mask)
642{
5d1d65f8
HX
643 spawn->base.frontend = &crypto_aead_type;
644 return crypto_grab_spawn(&spawn->base, name, type, mask);
d29ce988
HX
645}
646EXPORT_SYMBOL_GPL(crypto_grab_aead);
647
648struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
649{
5d1d65f8 650 return crypto_alloc_tfm(alg_name, &crypto_aead_type, type, mask);
d29ce988
HX
651}
652EXPORT_SYMBOL_GPL(crypto_alloc_aead);
653
63293c61
HX
654static int aead_prepare_alg(struct aead_alg *alg)
655{
656 struct crypto_alg *base = &alg->base;
657
658 if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
659 return -EINVAL;
660
661 base->cra_type = &crypto_new_aead_type;
662 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
663 base->cra_flags |= CRYPTO_ALG_TYPE_AEAD;
664
665 return 0;
666}
667
668int crypto_register_aead(struct aead_alg *alg)
669{
670 struct crypto_alg *base = &alg->base;
671 int err;
672
673 err = aead_prepare_alg(alg);
674 if (err)
675 return err;
676
677 return crypto_register_alg(base);
678}
679EXPORT_SYMBOL_GPL(crypto_register_aead);
680
681int crypto_unregister_aead(struct aead_alg *alg)
682{
683 return crypto_unregister_alg(&alg->base);
684}
685EXPORT_SYMBOL_GPL(crypto_unregister_aead);
686
687int aead_register_instance(struct crypto_template *tmpl,
688 struct aead_instance *inst)
689{
690 int err;
691
692 err = aead_prepare_alg(&inst->alg);
693 if (err)
694 return err;
695
696 return crypto_register_instance(tmpl, aead_crypto_instance(inst));
697}
698EXPORT_SYMBOL_GPL(aead_register_instance);
699
1ae97820
HX
700MODULE_LICENSE("GPL");
701MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");