crypto: qat - Remove reference to crypto_aead_crt
[linux-2.6-block.git] / crypto / aead.c
1 /*
2  * AEAD: Authenticated Encryption with Associated Data
3  *
4  * This file provides API support for AEAD algorithms.
5  *
6  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
7  *
8  * This program is free software; you can redistribute it and/or modify it
9  * under the terms of the GNU General Public License as published by the Free
10  * Software Foundation; either version 2 of the License, or (at your option)
11  * any later version.
12  *
13  */
14
15 #include <crypto/internal/geniv.h>
16 #include <crypto/internal/rng.h>
17 #include <crypto/null.h>
18 #include <crypto/scatterwalk.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/rtnetlink.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/seq_file.h>
27 #include <linux/cryptouser.h>
28 #include <net/netlink.h>
29
30 #include "internal.h"
31
32 struct compat_request_ctx {
33         struct scatterlist src[2];
34         struct scatterlist dst[2];
35         struct scatterlist ivbuf[2];
36         struct scatterlist *ivsg;
37         struct aead_givcrypt_request subreq;
38 };
39
40 static int aead_null_givencrypt(struct aead_givcrypt_request *req);
41 static int aead_null_givdecrypt(struct aead_givcrypt_request *req);
42
43 static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
44                             unsigned int keylen)
45 {
46         unsigned long alignmask = crypto_aead_alignmask(tfm);
47         int ret;
48         u8 *buffer, *alignbuffer;
49         unsigned long absize;
50
51         absize = keylen + alignmask;
52         buffer = kmalloc(absize, GFP_ATOMIC);
53         if (!buffer)
54                 return -ENOMEM;
55
56         alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
57         memcpy(alignbuffer, key, keylen);
58         ret = tfm->setkey(tfm, alignbuffer, keylen);
59         memset(alignbuffer, 0, keylen);
60         kfree(buffer);
61         return ret;
62 }
63
64 int crypto_aead_setkey(struct crypto_aead *tfm,
65                        const u8 *key, unsigned int keylen)
66 {
67         unsigned long alignmask = crypto_aead_alignmask(tfm);
68
69         tfm = tfm->child;
70
71         if ((unsigned long)key & alignmask)
72                 return setkey_unaligned(tfm, key, keylen);
73
74         return tfm->setkey(tfm, key, keylen);
75 }
76 EXPORT_SYMBOL_GPL(crypto_aead_setkey);
77
78 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
79 {
80         int err;
81
82         if (authsize > crypto_aead_maxauthsize(tfm))
83                 return -EINVAL;
84
85         if (tfm->setauthsize) {
86                 err = tfm->setauthsize(tfm->child, authsize);
87                 if (err)
88                         return err;
89         }
90
91         tfm->child->authsize = authsize;
92         tfm->authsize = authsize;
93         return 0;
94 }
95 EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
96
97 struct aead_old_request {
98         struct scatterlist srcbuf[2];
99         struct scatterlist dstbuf[2];
100         struct aead_request subreq;
101 };
102
103 unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
104 {
105         return tfm->reqsize + sizeof(struct aead_old_request);
106 }
107 EXPORT_SYMBOL_GPL(crypto_aead_reqsize);
108
109 static int old_crypt(struct aead_request *req,
110                      int (*crypt)(struct aead_request *req))
111 {
112         struct aead_old_request *nreq = aead_request_ctx(req);
113         struct crypto_aead *aead = crypto_aead_reqtfm(req);
114         struct scatterlist *src, *dst;
115
116         if (req->old)
117                 return crypt(req);
118
119         src = scatterwalk_ffwd(nreq->srcbuf, req->src, req->assoclen);
120         dst = req->src == req->dst ?
121               src : scatterwalk_ffwd(nreq->dstbuf, req->dst, req->assoclen);
122
123         aead_request_set_tfm(&nreq->subreq, aead);
124         aead_request_set_callback(&nreq->subreq, aead_request_flags(req),
125                                   req->base.complete, req->base.data);
126         aead_request_set_crypt(&nreq->subreq, src, dst, req->cryptlen,
127                                req->iv);
128         aead_request_set_assoc(&nreq->subreq, req->src, req->assoclen);
129
130         return crypt(&nreq->subreq);
131 }
132
133 static int old_encrypt(struct aead_request *req)
134 {
135         struct crypto_aead *aead = crypto_aead_reqtfm(req);
136         struct old_aead_alg *alg = crypto_old_aead_alg(aead);
137
138         return old_crypt(req, alg->encrypt);
139 }
140
141 static int old_decrypt(struct aead_request *req)
142 {
143         struct crypto_aead *aead = crypto_aead_reqtfm(req);
144         struct old_aead_alg *alg = crypto_old_aead_alg(aead);
145
146         return old_crypt(req, alg->decrypt);
147 }
148
149 static int no_givcrypt(struct aead_givcrypt_request *req)
150 {
151         return -ENOSYS;
152 }
153
154 static int crypto_old_aead_init_tfm(struct crypto_tfm *tfm)
155 {
156         struct old_aead_alg *alg = &tfm->__crt_alg->cra_aead;
157         struct crypto_aead *crt = __crypto_aead_cast(tfm);
158
159         if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
160                 return -EINVAL;
161
162         crt->setkey = alg->setkey;
163         crt->setauthsize = alg->setauthsize;
164         crt->encrypt = old_encrypt;
165         crt->decrypt = old_decrypt;
166         if (alg->ivsize) {
167                 crt->givencrypt = alg->givencrypt ?: no_givcrypt;
168                 crt->givdecrypt = alg->givdecrypt ?: no_givcrypt;
169         } else {
170                 crt->givencrypt = aead_null_givencrypt;
171                 crt->givdecrypt = aead_null_givdecrypt;
172         }
173         crt->child = __crypto_aead_cast(tfm);
174         crt->authsize = alg->maxauthsize;
175
176         return 0;
177 }
178
179 static void crypto_aead_exit_tfm(struct crypto_tfm *tfm)
180 {
181         struct crypto_aead *aead = __crypto_aead_cast(tfm);
182         struct aead_alg *alg = crypto_aead_alg(aead);
183
184         alg->exit(aead);
185 }
186
187 static int crypto_aead_init_tfm(struct crypto_tfm *tfm)
188 {
189         struct crypto_aead *aead = __crypto_aead_cast(tfm);
190         struct aead_alg *alg = crypto_aead_alg(aead);
191
192         if (crypto_old_aead_alg(aead)->encrypt)
193                 return crypto_old_aead_init_tfm(tfm);
194
195         aead->setkey = alg->setkey;
196         aead->setauthsize = alg->setauthsize;
197         aead->encrypt = alg->encrypt;
198         aead->decrypt = alg->decrypt;
199         aead->child = __crypto_aead_cast(tfm);
200         aead->authsize = alg->maxauthsize;
201
202         if (alg->exit)
203                 aead->base.exit = crypto_aead_exit_tfm;
204
205         if (alg->init)
206                 return alg->init(aead);
207
208         return 0;
209 }
210
211 #ifdef CONFIG_NET
212 static int crypto_old_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
213 {
214         struct crypto_report_aead raead;
215         struct old_aead_alg *aead = &alg->cra_aead;
216
217         strncpy(raead.type, "aead", sizeof(raead.type));
218         strncpy(raead.geniv, aead->geniv ?: "<built-in>", sizeof(raead.geniv));
219
220         raead.blocksize = alg->cra_blocksize;
221         raead.maxauthsize = aead->maxauthsize;
222         raead.ivsize = aead->ivsize;
223
224         if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
225                     sizeof(struct crypto_report_aead), &raead))
226                 goto nla_put_failure;
227         return 0;
228
229 nla_put_failure:
230         return -EMSGSIZE;
231 }
232 #else
233 static int crypto_old_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
234 {
235         return -ENOSYS;
236 }
237 #endif
238
239 static void crypto_old_aead_show(struct seq_file *m, struct crypto_alg *alg)
240         __attribute__ ((unused));
241 static void crypto_old_aead_show(struct seq_file *m, struct crypto_alg *alg)
242 {
243         struct old_aead_alg *aead = &alg->cra_aead;
244
245         seq_printf(m, "type         : aead\n");
246         seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
247                                              "yes" : "no");
248         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
249         seq_printf(m, "ivsize       : %u\n", aead->ivsize);
250         seq_printf(m, "maxauthsize  : %u\n", aead->maxauthsize);
251         seq_printf(m, "geniv        : %s\n", aead->geniv ?: "<built-in>");
252 }
253
254 const struct crypto_type crypto_aead_type = {
255         .extsize = crypto_alg_extsize,
256         .init_tfm = crypto_aead_init_tfm,
257 #ifdef CONFIG_PROC_FS
258         .show = crypto_old_aead_show,
259 #endif
260         .report = crypto_old_aead_report,
261         .lookup = crypto_lookup_aead,
262         .maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
263         .maskset = CRYPTO_ALG_TYPE_MASK,
264         .type = CRYPTO_ALG_TYPE_AEAD,
265         .tfmsize = offsetof(struct crypto_aead, base),
266 };
267 EXPORT_SYMBOL_GPL(crypto_aead_type);
268
269 #ifdef CONFIG_NET
270 static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
271 {
272         struct crypto_report_aead raead;
273         struct aead_alg *aead = container_of(alg, struct aead_alg, base);
274
275         strncpy(raead.type, "aead", sizeof(raead.type));
276         strncpy(raead.geniv, "<none>", sizeof(raead.geniv));
277
278         raead.blocksize = alg->cra_blocksize;
279         raead.maxauthsize = aead->maxauthsize;
280         raead.ivsize = aead->ivsize;
281
282         if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
283                     sizeof(struct crypto_report_aead), &raead))
284                 goto nla_put_failure;
285         return 0;
286
287 nla_put_failure:
288         return -EMSGSIZE;
289 }
290 #else
291 static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
292 {
293         return -ENOSYS;
294 }
295 #endif
296
297 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
298         __attribute__ ((unused));
299 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
300 {
301         struct aead_alg *aead = container_of(alg, struct aead_alg, base);
302
303         seq_printf(m, "type         : aead\n");
304         seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
305                                              "yes" : "no");
306         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
307         seq_printf(m, "ivsize       : %u\n", aead->ivsize);
308         seq_printf(m, "maxauthsize  : %u\n", aead->maxauthsize);
309         seq_printf(m, "geniv        : <none>\n");
310 }
311
312 static void crypto_aead_free_instance(struct crypto_instance *inst)
313 {
314         struct aead_instance *aead = aead_instance(inst);
315
316         if (!aead->free) {
317                 inst->tmpl->free(inst);
318                 return;
319         }
320
321         aead->free(aead);
322 }
323
324 static const struct crypto_type crypto_new_aead_type = {
325         .extsize = crypto_alg_extsize,
326         .init_tfm = crypto_aead_init_tfm,
327         .free = crypto_aead_free_instance,
328 #ifdef CONFIG_PROC_FS
329         .show = crypto_aead_show,
330 #endif
331         .report = crypto_aead_report,
332         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
333         .maskset = CRYPTO_ALG_TYPE_MASK,
334         .type = CRYPTO_ALG_TYPE_AEAD,
335         .tfmsize = offsetof(struct crypto_aead, base),
336 };
337
338 static int aead_null_givencrypt(struct aead_givcrypt_request *req)
339 {
340         return crypto_aead_encrypt(&req->areq);
341 }
342
343 static int aead_null_givdecrypt(struct aead_givcrypt_request *req)
344 {
345         return crypto_aead_decrypt(&req->areq);
346 }
347
348 #ifdef CONFIG_NET
349 static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
350 {
351         struct crypto_report_aead raead;
352         struct old_aead_alg *aead = &alg->cra_aead;
353
354         strncpy(raead.type, "nivaead", sizeof(raead.type));
355         strncpy(raead.geniv, aead->geniv, sizeof(raead.geniv));
356
357         raead.blocksize = alg->cra_blocksize;
358         raead.maxauthsize = aead->maxauthsize;
359         raead.ivsize = aead->ivsize;
360
361         if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
362                     sizeof(struct crypto_report_aead), &raead))
363                 goto nla_put_failure;
364         return 0;
365
366 nla_put_failure:
367         return -EMSGSIZE;
368 }
369 #else
370 static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
371 {
372         return -ENOSYS;
373 }
374 #endif
375
376
377 static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
378         __attribute__ ((unused));
379 static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
380 {
381         struct old_aead_alg *aead = &alg->cra_aead;
382
383         seq_printf(m, "type         : nivaead\n");
384         seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
385                                              "yes" : "no");
386         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
387         seq_printf(m, "ivsize       : %u\n", aead->ivsize);
388         seq_printf(m, "maxauthsize  : %u\n", aead->maxauthsize);
389         seq_printf(m, "geniv        : %s\n", aead->geniv);
390 }
391
392 const struct crypto_type crypto_nivaead_type = {
393         .extsize = crypto_alg_extsize,
394         .init_tfm = crypto_aead_init_tfm,
395 #ifdef CONFIG_PROC_FS
396         .show = crypto_nivaead_show,
397 #endif
398         .report = crypto_nivaead_report,
399         .maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
400         .maskset = CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV,
401         .type = CRYPTO_ALG_TYPE_AEAD,
402         .tfmsize = offsetof(struct crypto_aead, base),
403 };
404 EXPORT_SYMBOL_GPL(crypto_nivaead_type);
405
406 static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn,
407                                const char *name, u32 type, u32 mask)
408 {
409         spawn->base.frontend = &crypto_nivaead_type;
410         return crypto_grab_spawn(&spawn->base, name, type, mask);
411 }
412
413 static int aead_geniv_setkey(struct crypto_aead *tfm,
414                              const u8 *key, unsigned int keylen)
415 {
416         struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
417
418         return crypto_aead_setkey(ctx->child, key, keylen);
419 }
420
421 static int aead_geniv_setauthsize(struct crypto_aead *tfm,
422                                   unsigned int authsize)
423 {
424         struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
425
426         return crypto_aead_setauthsize(ctx->child, authsize);
427 }
428
429 static void compat_encrypt_complete2(struct aead_request *req, int err)
430 {
431         struct compat_request_ctx *rctx = aead_request_ctx(req);
432         struct aead_givcrypt_request *subreq = &rctx->subreq;
433         struct crypto_aead *geniv;
434
435         if (err == -EINPROGRESS)
436                 return;
437
438         if (err)
439                 goto out;
440
441         geniv = crypto_aead_reqtfm(req);
442         scatterwalk_map_and_copy(subreq->giv, rctx->ivsg, 0,
443                                  crypto_aead_ivsize(geniv), 1);
444
445 out:
446         kzfree(subreq->giv);
447 }
448
449 static void compat_encrypt_complete(struct crypto_async_request *base, int err)
450 {
451         struct aead_request *req = base->data;
452
453         compat_encrypt_complete2(req, err);
454         aead_request_complete(req, err);
455 }
456
457 static int compat_encrypt(struct aead_request *req)
458 {
459         struct crypto_aead *geniv = crypto_aead_reqtfm(req);
460         struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
461         struct compat_request_ctx *rctx = aead_request_ctx(req);
462         struct aead_givcrypt_request *subreq = &rctx->subreq;
463         unsigned int ivsize = crypto_aead_ivsize(geniv);
464         struct scatterlist *src, *dst;
465         crypto_completion_t compl;
466         void *data;
467         u8 *info;
468         __be64 seq;
469         int err;
470
471         if (req->cryptlen < ivsize)
472                 return -EINVAL;
473
474         compl = req->base.complete;
475         data = req->base.data;
476
477         rctx->ivsg = scatterwalk_ffwd(rctx->ivbuf, req->dst, req->assoclen);
478         info = PageHighMem(sg_page(rctx->ivsg)) ? NULL : sg_virt(rctx->ivsg);
479
480         if (!info) {
481                 info = kmalloc(ivsize, req->base.flags &
482                                        CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
483                                                                   GFP_ATOMIC);
484                 if (!info)
485                         return -ENOMEM;
486
487                 compl = compat_encrypt_complete;
488                 data = req;
489         }
490
491         memcpy(&seq, req->iv + ivsize - sizeof(seq), sizeof(seq));
492
493         src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen + ivsize);
494         dst = req->src == req->dst ?
495               src : scatterwalk_ffwd(rctx->dst, rctx->ivsg, ivsize);
496
497         aead_givcrypt_set_tfm(subreq, ctx->child);
498         aead_givcrypt_set_callback(subreq, req->base.flags,
499                                    req->base.complete, req->base.data);
500         aead_givcrypt_set_crypt(subreq, src, dst,
501                                 req->cryptlen - ivsize, req->iv);
502         aead_givcrypt_set_assoc(subreq, req->src, req->assoclen);
503         aead_givcrypt_set_giv(subreq, info, be64_to_cpu(seq));
504
505         err = crypto_aead_givencrypt(subreq);
506         if (unlikely(PageHighMem(sg_page(rctx->ivsg))))
507                 compat_encrypt_complete2(req, err);
508         return err;
509 }
510
511 static int compat_decrypt(struct aead_request *req)
512 {
513         struct crypto_aead *geniv = crypto_aead_reqtfm(req);
514         struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
515         struct compat_request_ctx *rctx = aead_request_ctx(req);
516         struct aead_request *subreq = &rctx->subreq.areq;
517         unsigned int ivsize = crypto_aead_ivsize(geniv);
518         struct scatterlist *src, *dst;
519         crypto_completion_t compl;
520         void *data;
521
522         if (req->cryptlen < ivsize)
523                 return -EINVAL;
524
525         aead_request_set_tfm(subreq, ctx->child);
526
527         compl = req->base.complete;
528         data = req->base.data;
529
530         src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen + ivsize);
531         dst = req->src == req->dst ?
532               src : scatterwalk_ffwd(rctx->dst, req->dst,
533                                      req->assoclen + ivsize);
534
535         aead_request_set_callback(subreq, req->base.flags, compl, data);
536         aead_request_set_crypt(subreq, src, dst,
537                                req->cryptlen - ivsize, req->iv);
538         aead_request_set_assoc(subreq, req->src, req->assoclen);
539
540         scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
541
542         return crypto_aead_decrypt(subreq);
543 }
544
545 static int compat_encrypt_first(struct aead_request *req)
546 {
547         struct crypto_aead *geniv = crypto_aead_reqtfm(req);
548         struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
549         int err = 0;
550
551         spin_lock_bh(&ctx->lock);
552         if (geniv->encrypt != compat_encrypt_first)
553                 goto unlock;
554
555         geniv->encrypt = compat_encrypt;
556
557 unlock:
558         spin_unlock_bh(&ctx->lock);
559
560         if (err)
561                 return err;
562
563         return compat_encrypt(req);
564 }
565
566 static int aead_geniv_init_compat(struct crypto_tfm *tfm)
567 {
568         struct crypto_aead *geniv = __crypto_aead_cast(tfm);
569         struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
570         int err;
571
572         spin_lock_init(&ctx->lock);
573
574         crypto_aead_set_reqsize(geniv, sizeof(struct compat_request_ctx));
575
576         err = aead_geniv_init(tfm);
577
578         ctx->child = geniv->child;
579         geniv->child = geniv;
580
581         return err;
582 }
583
584 static void aead_geniv_exit_compat(struct crypto_tfm *tfm)
585 {
586         struct crypto_aead *geniv = __crypto_aead_cast(tfm);
587         struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
588
589         crypto_free_aead(ctx->child);
590 }
591
592 struct aead_instance *aead_geniv_alloc(struct crypto_template *tmpl,
593                                        struct rtattr **tb, u32 type, u32 mask)
594 {
595         const char *name;
596         struct crypto_aead_spawn *spawn;
597         struct crypto_attr_type *algt;
598         struct aead_instance *inst;
599         struct aead_alg *alg;
600         unsigned int ivsize;
601         unsigned int maxauthsize;
602         int err;
603
604         algt = crypto_get_attr_type(tb);
605         if (IS_ERR(algt))
606                 return ERR_CAST(algt);
607
608         if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
609             algt->mask & ~CRYPTO_ALG_AEAD_NEW)
610                 return ERR_PTR(-EINVAL);
611
612         name = crypto_attr_alg_name(tb[1]);
613         if (IS_ERR(name))
614                 return ERR_CAST(name);
615
616         inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
617         if (!inst)
618                 return ERR_PTR(-ENOMEM);
619
620         spawn = aead_instance_ctx(inst);
621
622         /* Ignore async algorithms if necessary. */
623         mask |= crypto_requires_sync(algt->type, algt->mask);
624
625         crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
626         err = (algt->mask & CRYPTO_ALG_GENIV) ?
627               crypto_grab_nivaead(spawn, name, type, mask) :
628               crypto_grab_aead(spawn, name, type, mask);
629         if (err)
630                 goto err_free_inst;
631
632         alg = crypto_spawn_aead_alg(spawn);
633
634         ivsize = crypto_aead_alg_ivsize(alg);
635         maxauthsize = crypto_aead_alg_maxauthsize(alg);
636
637         err = -EINVAL;
638         if (ivsize < sizeof(u64))
639                 goto err_drop_alg;
640
641         /*
642          * This is only true if we're constructing an algorithm with its
643          * default IV generator.  For the default generator we elide the
644          * template name and double-check the IV generator.
645          */
646         if (algt->mask & CRYPTO_ALG_GENIV) {
647                 if (!alg->base.cra_aead.encrypt)
648                         goto err_drop_alg;
649                 if (strcmp(tmpl->name, alg->base.cra_aead.geniv))
650                         goto err_drop_alg;
651
652                 memcpy(inst->alg.base.cra_name, alg->base.cra_name,
653                        CRYPTO_MAX_ALG_NAME);
654                 memcpy(inst->alg.base.cra_driver_name,
655                        alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME);
656
657                 inst->alg.base.cra_flags = CRYPTO_ALG_TYPE_AEAD |
658                                            CRYPTO_ALG_GENIV;
659                 inst->alg.base.cra_flags |= alg->base.cra_flags &
660                                             CRYPTO_ALG_ASYNC;
661                 inst->alg.base.cra_priority = alg->base.cra_priority;
662                 inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
663                 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
664                 inst->alg.base.cra_type = &crypto_aead_type;
665
666                 inst->alg.base.cra_aead.ivsize = ivsize;
667                 inst->alg.base.cra_aead.maxauthsize = maxauthsize;
668
669                 inst->alg.base.cra_aead.setkey = alg->base.cra_aead.setkey;
670                 inst->alg.base.cra_aead.setauthsize =
671                         alg->base.cra_aead.setauthsize;
672                 inst->alg.base.cra_aead.encrypt = alg->base.cra_aead.encrypt;
673                 inst->alg.base.cra_aead.decrypt = alg->base.cra_aead.decrypt;
674
675                 goto out;
676         }
677
678         err = -ENAMETOOLONG;
679         if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
680                      "%s(%s)", tmpl->name, alg->base.cra_name) >=
681             CRYPTO_MAX_ALG_NAME)
682                 goto err_drop_alg;
683         if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
684                      "%s(%s)", tmpl->name, alg->base.cra_driver_name) >=
685             CRYPTO_MAX_ALG_NAME)
686                 goto err_drop_alg;
687
688         inst->alg.base.cra_flags = alg->base.cra_flags &
689                                    (CRYPTO_ALG_ASYNC | CRYPTO_ALG_AEAD_NEW);
690         inst->alg.base.cra_priority = alg->base.cra_priority;
691         inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
692         inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
693         inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
694
695         inst->alg.setkey = aead_geniv_setkey;
696         inst->alg.setauthsize = aead_geniv_setauthsize;
697
698         inst->alg.ivsize = ivsize;
699         inst->alg.maxauthsize = maxauthsize;
700
701         inst->alg.encrypt = compat_encrypt_first;
702         inst->alg.decrypt = compat_decrypt;
703
704         inst->alg.base.cra_init = aead_geniv_init_compat;
705         inst->alg.base.cra_exit = aead_geniv_exit_compat;
706
707 out:
708         return inst;
709
710 err_drop_alg:
711         crypto_drop_aead(spawn);
712 err_free_inst:
713         kfree(inst);
714         inst = ERR_PTR(err);
715         goto out;
716 }
717 EXPORT_SYMBOL_GPL(aead_geniv_alloc);
718
719 void aead_geniv_free(struct aead_instance *inst)
720 {
721         crypto_drop_aead(aead_instance_ctx(inst));
722         kfree(inst);
723 }
724 EXPORT_SYMBOL_GPL(aead_geniv_free);
725
726 int aead_geniv_init(struct crypto_tfm *tfm)
727 {
728         struct crypto_instance *inst = (void *)tfm->__crt_alg;
729         struct crypto_aead *child;
730         struct crypto_aead *aead;
731
732         aead = __crypto_aead_cast(tfm);
733
734         child = crypto_spawn_aead(crypto_instance_ctx(inst));
735         if (IS_ERR(child))
736                 return PTR_ERR(child);
737
738         aead->child = child;
739         aead->reqsize += crypto_aead_reqsize(child);
740
741         return 0;
742 }
743 EXPORT_SYMBOL_GPL(aead_geniv_init);
744
745 void aead_geniv_exit(struct crypto_tfm *tfm)
746 {
747         crypto_free_aead(__crypto_aead_cast(tfm)->child);
748 }
749 EXPORT_SYMBOL_GPL(aead_geniv_exit);
750
751 int aead_init_geniv(struct crypto_aead *aead)
752 {
753         struct aead_geniv_ctx *ctx = crypto_aead_ctx(aead);
754         struct aead_instance *inst = aead_alg_instance(aead);
755         struct crypto_aead *child;
756         int err;
757
758         spin_lock_init(&ctx->lock);
759
760         err = crypto_get_default_rng();
761         if (err)
762                 goto out;
763
764         err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
765                                    crypto_aead_ivsize(aead));
766         crypto_put_default_rng();
767         if (err)
768                 goto out;
769
770         ctx->null = crypto_get_default_null_skcipher();
771         err = PTR_ERR(ctx->null);
772         if (IS_ERR(ctx->null))
773                 goto out;
774
775         child = crypto_spawn_aead(aead_instance_ctx(inst));
776         err = PTR_ERR(child);
777         if (IS_ERR(child))
778                 goto drop_null;
779
780         ctx->child = child;
781         crypto_aead_set_reqsize(aead, crypto_aead_reqsize(child) +
782                                       sizeof(struct aead_request));
783
784         err = 0;
785
786 out:
787         return err;
788
789 drop_null:
790         crypto_put_default_null_skcipher();
791         goto out;
792 }
793 EXPORT_SYMBOL_GPL(aead_init_geniv);
794
795 void aead_exit_geniv(struct crypto_aead *tfm)
796 {
797         struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
798
799         crypto_free_aead(ctx->child);
800         crypto_put_default_null_skcipher();
801 }
802 EXPORT_SYMBOL_GPL(aead_exit_geniv);
803
804 static int crypto_nivaead_default(struct crypto_alg *alg, u32 type, u32 mask)
805 {
806         struct rtattr *tb[3];
807         struct {
808                 struct rtattr attr;
809                 struct crypto_attr_type data;
810         } ptype;
811         struct {
812                 struct rtattr attr;
813                 struct crypto_attr_alg data;
814         } palg;
815         struct crypto_template *tmpl;
816         struct crypto_instance *inst;
817         struct crypto_alg *larval;
818         const char *geniv;
819         int err;
820
821         larval = crypto_larval_lookup(alg->cra_driver_name,
822                                       CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV,
823                                       CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
824         err = PTR_ERR(larval);
825         if (IS_ERR(larval))
826                 goto out;
827
828         err = -EAGAIN;
829         if (!crypto_is_larval(larval))
830                 goto drop_larval;
831
832         ptype.attr.rta_len = sizeof(ptype);
833         ptype.attr.rta_type = CRYPTOA_TYPE;
834         ptype.data.type = type | CRYPTO_ALG_GENIV;
835         /* GENIV tells the template that we're making a default geniv. */
836         ptype.data.mask = mask | CRYPTO_ALG_GENIV;
837         tb[0] = &ptype.attr;
838
839         palg.attr.rta_len = sizeof(palg);
840         palg.attr.rta_type = CRYPTOA_ALG;
841         /* Must use the exact name to locate ourselves. */
842         memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
843         tb[1] = &palg.attr;
844
845         tb[2] = NULL;
846
847         geniv = alg->cra_aead.geniv;
848
849         tmpl = crypto_lookup_template(geniv);
850         err = -ENOENT;
851         if (!tmpl)
852                 goto kill_larval;
853
854         if (tmpl->create) {
855                 err = tmpl->create(tmpl, tb);
856                 if (err)
857                         goto put_tmpl;
858                 goto ok;
859         }
860
861         inst = tmpl->alloc(tb);
862         err = PTR_ERR(inst);
863         if (IS_ERR(inst))
864                 goto put_tmpl;
865
866         err = crypto_register_instance(tmpl, inst);
867         if (err) {
868                 tmpl->free(inst);
869                 goto put_tmpl;
870         }
871
872 ok:
873         /* Redo the lookup to use the instance we just registered. */
874         err = -EAGAIN;
875
876 put_tmpl:
877         crypto_tmpl_put(tmpl);
878 kill_larval:
879         crypto_larval_kill(larval);
880 drop_larval:
881         crypto_mod_put(larval);
882 out:
883         crypto_mod_put(alg);
884         return err;
885 }
886
887 struct crypto_alg *crypto_lookup_aead(const char *name, u32 type, u32 mask)
888 {
889         struct crypto_alg *alg;
890
891         alg = crypto_alg_mod_lookup(name, type, mask);
892         if (IS_ERR(alg))
893                 return alg;
894
895         if (alg->cra_type == &crypto_aead_type)
896                 return alg;
897
898         if (!alg->cra_aead.ivsize)
899                 return alg;
900
901         crypto_mod_put(alg);
902         alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
903                                     mask & ~CRYPTO_ALG_TESTED);
904         if (IS_ERR(alg))
905                 return alg;
906
907         if (alg->cra_type == &crypto_aead_type) {
908                 if (~alg->cra_flags & (type ^ ~mask) & CRYPTO_ALG_TESTED) {
909                         crypto_mod_put(alg);
910                         alg = ERR_PTR(-ENOENT);
911                 }
912                 return alg;
913         }
914
915         BUG_ON(!alg->cra_aead.ivsize);
916
917         return ERR_PTR(crypto_nivaead_default(alg, type, mask));
918 }
919 EXPORT_SYMBOL_GPL(crypto_lookup_aead);
920
921 int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
922                      u32 type, u32 mask)
923 {
924         spawn->base.frontend = &crypto_aead_type;
925         return crypto_grab_spawn(&spawn->base, name, type, mask);
926 }
927 EXPORT_SYMBOL_GPL(crypto_grab_aead);
928
929 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
930 {
931         return crypto_alloc_tfm(alg_name, &crypto_aead_type, type, mask);
932 }
933 EXPORT_SYMBOL_GPL(crypto_alloc_aead);
934
935 static int aead_prepare_alg(struct aead_alg *alg)
936 {
937         struct crypto_alg *base = &alg->base;
938
939         if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
940                 return -EINVAL;
941
942         base->cra_type = &crypto_new_aead_type;
943         base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
944         base->cra_flags |= CRYPTO_ALG_TYPE_AEAD;
945
946         return 0;
947 }
948
949 int crypto_register_aead(struct aead_alg *alg)
950 {
951         struct crypto_alg *base = &alg->base;
952         int err;
953
954         err = aead_prepare_alg(alg);
955         if (err)
956                 return err;
957
958         return crypto_register_alg(base);
959 }
960 EXPORT_SYMBOL_GPL(crypto_register_aead);
961
962 void crypto_unregister_aead(struct aead_alg *alg)
963 {
964         crypto_unregister_alg(&alg->base);
965 }
966 EXPORT_SYMBOL_GPL(crypto_unregister_aead);
967
968 int crypto_register_aeads(struct aead_alg *algs, int count)
969 {
970         int i, ret;
971
972         for (i = 0; i < count; i++) {
973                 ret = crypto_register_aead(&algs[i]);
974                 if (ret)
975                         goto err;
976         }
977
978         return 0;
979
980 err:
981         for (--i; i >= 0; --i)
982                 crypto_unregister_aead(&algs[i]);
983
984         return ret;
985 }
986 EXPORT_SYMBOL_GPL(crypto_register_aeads);
987
988 void crypto_unregister_aeads(struct aead_alg *algs, int count)
989 {
990         int i;
991
992         for (i = count - 1; i >= 0; --i)
993                 crypto_unregister_aead(&algs[i]);
994 }
995 EXPORT_SYMBOL_GPL(crypto_unregister_aeads);
996
997 int aead_register_instance(struct crypto_template *tmpl,
998                            struct aead_instance *inst)
999 {
1000         int err;
1001
1002         err = aead_prepare_alg(&inst->alg);
1003         if (err)
1004                 return err;
1005
1006         return crypto_register_instance(tmpl, aead_crypto_instance(inst));
1007 }
1008 EXPORT_SYMBOL_GPL(aead_register_instance);
1009
1010 MODULE_LICENSE("GPL");
1011 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");