crypto: api - Do not clear type bits in crypto_larval_lookup
[linux-block.git] / crypto / cryptd.c
CommitLineData
124b53d0
HX
1/*
2 * Software async crypto daemon.
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
298c926c
AH
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
12 *
124b53d0
HX
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
16 * any later version.
17 *
18 */
19
20#include <crypto/algapi.h>
18e33e6d 21#include <crypto/internal/hash.h>
298c926c 22#include <crypto/internal/aead.h>
1cac2cbc 23#include <crypto/cryptd.h>
254eff77 24#include <crypto/crypto_wq.h>
81760ea6 25#include <linux/atomic.h>
124b53d0
HX
26#include <linux/err.h>
27#include <linux/init.h>
28#include <linux/kernel.h>
124b53d0
HX
29#include <linux/list.h>
30#include <linux/module.h>
124b53d0
HX
31#include <linux/scatterlist.h>
32#include <linux/sched.h>
33#include <linux/slab.h>
124b53d0 34
81760ea6 35#define CRYPTD_MAX_CPU_QLEN 1000
124b53d0 36
254eff77 37struct cryptd_cpu_queue {
124b53d0 38 struct crypto_queue queue;
254eff77
HY
39 struct work_struct work;
40};
41
42struct cryptd_queue {
a29d8b8e 43 struct cryptd_cpu_queue __percpu *cpu_queue;
124b53d0
HX
44};
45
46struct cryptd_instance_ctx {
47 struct crypto_spawn spawn;
254eff77 48 struct cryptd_queue *queue;
124b53d0
HX
49};
50
46309d89
HX
51struct hashd_instance_ctx {
52 struct crypto_shash_spawn spawn;
53 struct cryptd_queue *queue;
54};
55
298c926c
AH
56struct aead_instance_ctx {
57 struct crypto_aead_spawn aead_spawn;
58 struct cryptd_queue *queue;
59};
60
124b53d0 61struct cryptd_blkcipher_ctx {
81760ea6 62 atomic_t refcnt;
124b53d0
HX
63 struct crypto_blkcipher *child;
64};
65
66struct cryptd_blkcipher_request_ctx {
67 crypto_completion_t complete;
68};
69
b8a28251 70struct cryptd_hash_ctx {
81760ea6 71 atomic_t refcnt;
46309d89 72 struct crypto_shash *child;
b8a28251
LH
73};
74
75struct cryptd_hash_request_ctx {
76 crypto_completion_t complete;
46309d89 77 struct shash_desc desc;
b8a28251 78};
124b53d0 79
298c926c 80struct cryptd_aead_ctx {
81760ea6 81 atomic_t refcnt;
298c926c
AH
82 struct crypto_aead *child;
83};
84
85struct cryptd_aead_request_ctx {
86 crypto_completion_t complete;
87};
88
254eff77
HY
89static void cryptd_queue_worker(struct work_struct *work);
90
91static int cryptd_init_queue(struct cryptd_queue *queue,
92 unsigned int max_cpu_qlen)
93{
94 int cpu;
95 struct cryptd_cpu_queue *cpu_queue;
96
97 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
98 if (!queue->cpu_queue)
99 return -ENOMEM;
100 for_each_possible_cpu(cpu) {
101 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
102 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
103 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
104 }
105 return 0;
106}
107
108static void cryptd_fini_queue(struct cryptd_queue *queue)
109{
110 int cpu;
111 struct cryptd_cpu_queue *cpu_queue;
112
113 for_each_possible_cpu(cpu) {
114 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
115 BUG_ON(cpu_queue->queue.qlen);
116 }
117 free_percpu(queue->cpu_queue);
118}
119
120static int cryptd_enqueue_request(struct cryptd_queue *queue,
121 struct crypto_async_request *request)
122{
123 int cpu, err;
124 struct cryptd_cpu_queue *cpu_queue;
81760ea6
HX
125 atomic_t *refcnt;
126 bool may_backlog;
254eff77
HY
127
128 cpu = get_cpu();
0b44f486 129 cpu_queue = this_cpu_ptr(queue->cpu_queue);
254eff77 130 err = crypto_enqueue_request(&cpu_queue->queue, request);
81760ea6
HX
131
132 refcnt = crypto_tfm_ctx(request->tfm);
133 may_backlog = request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG;
134
135 if (err == -EBUSY && !may_backlog)
136 goto out_put_cpu;
137
254eff77 138 queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
81760ea6
HX
139
140 if (!atomic_read(refcnt))
141 goto out_put_cpu;
142
81760ea6
HX
143 atomic_inc(refcnt);
144
145out_put_cpu:
254eff77
HY
146 put_cpu();
147
148 return err;
149}
150
151/* Called in workqueue context, do one real cryption work (via
152 * req->complete) and reschedule itself if there are more work to
153 * do. */
154static void cryptd_queue_worker(struct work_struct *work)
155{
156 struct cryptd_cpu_queue *cpu_queue;
157 struct crypto_async_request *req, *backlog;
158
159 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
9efade1b
JK
160 /*
161 * Only handle one request at a time to avoid hogging crypto workqueue.
162 * preempt_disable/enable is used to prevent being preempted by
163 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
164 * cryptd_enqueue_request() being accessed from software interrupts.
165 */
166 local_bh_disable();
254eff77
HY
167 preempt_disable();
168 backlog = crypto_get_backlog(&cpu_queue->queue);
169 req = crypto_dequeue_request(&cpu_queue->queue);
170 preempt_enable();
9efade1b 171 local_bh_enable();
254eff77
HY
172
173 if (!req)
174 return;
175
176 if (backlog)
177 backlog->complete(backlog, -EINPROGRESS);
178 req->complete(req, 0);
179
180 if (cpu_queue->queue.qlen)
181 queue_work(kcrypto_wq, &cpu_queue->work);
182}
183
184static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
124b53d0
HX
185{
186 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
187 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
254eff77 188 return ictx->queue;
124b53d0
HX
189}
190
466a7b9e
SM
191static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
192 u32 *mask)
193{
194 struct crypto_attr_type *algt;
195
196 algt = crypto_get_attr_type(tb);
197 if (IS_ERR(algt))
198 return;
f6da3205 199
5e4b8c1f
HX
200 *type |= algt->type & CRYPTO_ALG_INTERNAL;
201 *mask |= algt->mask & CRYPTO_ALG_INTERNAL;
466a7b9e
SM
202}
203
124b53d0
HX
204static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
205 const u8 *key, unsigned int keylen)
206{
207 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
208 struct crypto_blkcipher *child = ctx->child;
209 int err;
210
211 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
212 crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
213 CRYPTO_TFM_REQ_MASK);
214 err = crypto_blkcipher_setkey(child, key, keylen);
215 crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
216 CRYPTO_TFM_RES_MASK);
217 return err;
218}
219
220static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
221 struct crypto_blkcipher *child,
222 int err,
223 int (*crypt)(struct blkcipher_desc *desc,
224 struct scatterlist *dst,
225 struct scatterlist *src,
226 unsigned int len))
227{
228 struct cryptd_blkcipher_request_ctx *rctx;
81760ea6
HX
229 struct cryptd_blkcipher_ctx *ctx;
230 struct crypto_ablkcipher *tfm;
124b53d0 231 struct blkcipher_desc desc;
81760ea6 232 int refcnt;
124b53d0
HX
233
234 rctx = ablkcipher_request_ctx(req);
235
93aa7f8a
HX
236 if (unlikely(err == -EINPROGRESS))
237 goto out;
124b53d0
HX
238
239 desc.tfm = child;
240 desc.info = req->info;
241 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
242
243 err = crypt(&desc, req->dst, req->src, req->nbytes);
244
245 req->base.complete = rctx->complete;
246
93aa7f8a 247out:
81760ea6
HX
248 tfm = crypto_ablkcipher_reqtfm(req);
249 ctx = crypto_ablkcipher_ctx(tfm);
250 refcnt = atomic_read(&ctx->refcnt);
251
124b53d0 252 local_bh_disable();
93aa7f8a 253 rctx->complete(&req->base, err);
124b53d0 254 local_bh_enable();
81760ea6
HX
255
256 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
257 crypto_free_ablkcipher(tfm);
124b53d0
HX
258}
259
260static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
261{
262 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
263 struct crypto_blkcipher *child = ctx->child;
264
265 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
266 crypto_blkcipher_crt(child)->encrypt);
267}
268
269static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
270{
271 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
272 struct crypto_blkcipher *child = ctx->child;
273
274 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
275 crypto_blkcipher_crt(child)->decrypt);
276}
277
278static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
3e3dc25f 279 crypto_completion_t compl)
124b53d0
HX
280{
281 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
282 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
254eff77 283 struct cryptd_queue *queue;
124b53d0 284
254eff77 285 queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
124b53d0 286 rctx->complete = req->base.complete;
3e3dc25f 287 req->base.complete = compl;
124b53d0 288
254eff77 289 return cryptd_enqueue_request(queue, &req->base);
124b53d0
HX
290}
291
292static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
293{
294 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
295}
296
297static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
298{
299 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
300}
301
302static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
303{
304 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
305 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
306 struct crypto_spawn *spawn = &ictx->spawn;
307 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
308 struct crypto_blkcipher *cipher;
309
310 cipher = crypto_spawn_blkcipher(spawn);
311 if (IS_ERR(cipher))
312 return PTR_ERR(cipher);
313
314 ctx->child = cipher;
315 tfm->crt_ablkcipher.reqsize =
316 sizeof(struct cryptd_blkcipher_request_ctx);
317 return 0;
318}
319
320static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
321{
322 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
124b53d0
HX
323
324 crypto_free_blkcipher(ctx->child);
325}
326
9b8c456e
HX
327static int cryptd_init_instance(struct crypto_instance *inst,
328 struct crypto_alg *alg)
329{
330 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
331 "cryptd(%s)",
332 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
333 return -ENAMETOOLONG;
334
335 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
336
337 inst->alg.cra_priority = alg->cra_priority + 50;
338 inst->alg.cra_blocksize = alg->cra_blocksize;
339 inst->alg.cra_alignmask = alg->cra_alignmask;
340
341 return 0;
342}
343
0b535adf
HX
344static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
345 unsigned int tail)
124b53d0 346{
0b535adf 347 char *p;
124b53d0 348 struct crypto_instance *inst;
124b53d0
HX
349 int err;
350
0b535adf
HX
351 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
352 if (!p)
353 return ERR_PTR(-ENOMEM);
354
355 inst = (void *)(p + head);
124b53d0 356
9b8c456e
HX
357 err = cryptd_init_instance(inst, alg);
358 if (err)
124b53d0
HX
359 goto out_free_inst;
360
124b53d0 361out:
0b535adf 362 return p;
124b53d0
HX
363
364out_free_inst:
0b535adf
HX
365 kfree(p);
366 p = ERR_PTR(err);
124b53d0
HX
367 goto out;
368}
369
9cd899a3
HX
370static int cryptd_create_blkcipher(struct crypto_template *tmpl,
371 struct rtattr **tb,
372 struct cryptd_queue *queue)
124b53d0 373{
46309d89 374 struct cryptd_instance_ctx *ctx;
124b53d0
HX
375 struct crypto_instance *inst;
376 struct crypto_alg *alg;
466a7b9e
SM
377 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
378 u32 mask = CRYPTO_ALG_TYPE_MASK;
46309d89 379 int err;
124b53d0 380
466a7b9e
SM
381 cryptd_check_internal(tb, &type, &mask);
382
383 alg = crypto_get_attr_alg(tb, type, mask);
124b53d0 384 if (IS_ERR(alg))
9cd899a3 385 return PTR_ERR(alg);
124b53d0 386
0b535adf 387 inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
05ed8758 388 err = PTR_ERR(inst);
124b53d0
HX
389 if (IS_ERR(inst))
390 goto out_put_alg;
391
46309d89
HX
392 ctx = crypto_instance_ctx(inst);
393 ctx->queue = queue;
394
395 err = crypto_init_spawn(&ctx->spawn, alg, inst,
396 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
397 if (err)
398 goto out_free_inst;
399
466a7b9e
SM
400 type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
401 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
402 type |= CRYPTO_ALG_INTERNAL;
403 inst->alg.cra_flags = type;
124b53d0
HX
404 inst->alg.cra_type = &crypto_ablkcipher_type;
405
406 inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
407 inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
408 inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
409
927eead5
HX
410 inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
411
124b53d0
HX
412 inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
413
414 inst->alg.cra_init = cryptd_blkcipher_init_tfm;
415 inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
416
417 inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
418 inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
419 inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
420
9cd899a3
HX
421 err = crypto_register_instance(tmpl, inst);
422 if (err) {
423 crypto_drop_spawn(&ctx->spawn);
424out_free_inst:
425 kfree(inst);
426 }
427
124b53d0
HX
428out_put_alg:
429 crypto_mod_put(alg);
9cd899a3 430 return err;
124b53d0
HX
431}
432
b8a28251
LH
433static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
434{
435 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
46309d89
HX
436 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
437 struct crypto_shash_spawn *spawn = &ictx->spawn;
b8a28251 438 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
46309d89 439 struct crypto_shash *hash;
b8a28251 440
46309d89
HX
441 hash = crypto_spawn_shash(spawn);
442 if (IS_ERR(hash))
443 return PTR_ERR(hash);
b8a28251 444
46309d89 445 ctx->child = hash;
0d6669e2
HX
446 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
447 sizeof(struct cryptd_hash_request_ctx) +
448 crypto_shash_descsize(hash));
b8a28251
LH
449 return 0;
450}
451
452static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
453{
454 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
b8a28251 455
46309d89 456 crypto_free_shash(ctx->child);
b8a28251
LH
457}
458
459static int cryptd_hash_setkey(struct crypto_ahash *parent,
460 const u8 *key, unsigned int keylen)
461{
462 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
46309d89 463 struct crypto_shash *child = ctx->child;
b8a28251
LH
464 int err;
465
46309d89
HX
466 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
467 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
468 CRYPTO_TFM_REQ_MASK);
469 err = crypto_shash_setkey(child, key, keylen);
470 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
471 CRYPTO_TFM_RES_MASK);
b8a28251
LH
472 return err;
473}
474
475static int cryptd_hash_enqueue(struct ahash_request *req,
3e3dc25f 476 crypto_completion_t compl)
b8a28251
LH
477{
478 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
479 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
254eff77
HY
480 struct cryptd_queue *queue =
481 cryptd_get_queue(crypto_ahash_tfm(tfm));
b8a28251
LH
482
483 rctx->complete = req->base.complete;
3e3dc25f 484 req->base.complete = compl;
b8a28251 485
254eff77 486 return cryptd_enqueue_request(queue, &req->base);
b8a28251
LH
487}
488
81760ea6
HX
489static void cryptd_hash_complete(struct ahash_request *req, int err)
490{
491 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
492 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
493 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
494 int refcnt = atomic_read(&ctx->refcnt);
495
496 local_bh_disable();
497 rctx->complete(&req->base, err);
498 local_bh_enable();
499
500 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
501 crypto_free_ahash(tfm);
502}
503
b8a28251
LH
504static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
505{
46309d89
HX
506 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
507 struct crypto_shash *child = ctx->child;
508 struct ahash_request *req = ahash_request_cast(req_async);
509 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
510 struct shash_desc *desc = &rctx->desc;
b8a28251
LH
511
512 if (unlikely(err == -EINPROGRESS))
513 goto out;
514
46309d89
HX
515 desc->tfm = child;
516 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251 517
46309d89 518 err = crypto_shash_init(desc);
b8a28251
LH
519
520 req->base.complete = rctx->complete;
521
522out:
81760ea6 523 cryptd_hash_complete(req, err);
b8a28251
LH
524}
525
526static int cryptd_hash_init_enqueue(struct ahash_request *req)
527{
528 return cryptd_hash_enqueue(req, cryptd_hash_init);
529}
530
531static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
532{
46309d89 533 struct ahash_request *req = ahash_request_cast(req_async);
b8a28251 534 struct cryptd_hash_request_ctx *rctx;
b8a28251
LH
535
536 rctx = ahash_request_ctx(req);
537
538 if (unlikely(err == -EINPROGRESS))
539 goto out;
540
46309d89 541 err = shash_ahash_update(req, &rctx->desc);
b8a28251
LH
542
543 req->base.complete = rctx->complete;
544
545out:
81760ea6 546 cryptd_hash_complete(req, err);
b8a28251
LH
547}
548
549static int cryptd_hash_update_enqueue(struct ahash_request *req)
550{
551 return cryptd_hash_enqueue(req, cryptd_hash_update);
552}
553
554static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
555{
46309d89
HX
556 struct ahash_request *req = ahash_request_cast(req_async);
557 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
b8a28251
LH
558
559 if (unlikely(err == -EINPROGRESS))
560 goto out;
561
46309d89 562 err = crypto_shash_final(&rctx->desc, req->result);
b8a28251
LH
563
564 req->base.complete = rctx->complete;
565
566out:
81760ea6 567 cryptd_hash_complete(req, err);
b8a28251
LH
568}
569
570static int cryptd_hash_final_enqueue(struct ahash_request *req)
571{
572 return cryptd_hash_enqueue(req, cryptd_hash_final);
573}
574
6fba00d1
HX
575static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
576{
577 struct ahash_request *req = ahash_request_cast(req_async);
578 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
579
580 if (unlikely(err == -EINPROGRESS))
581 goto out;
582
583 err = shash_ahash_finup(req, &rctx->desc);
584
585 req->base.complete = rctx->complete;
586
587out:
81760ea6 588 cryptd_hash_complete(req, err);
6fba00d1
HX
589}
590
591static int cryptd_hash_finup_enqueue(struct ahash_request *req)
592{
593 return cryptd_hash_enqueue(req, cryptd_hash_finup);
594}
595
b8a28251
LH
596static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
597{
46309d89
HX
598 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
599 struct crypto_shash *child = ctx->child;
600 struct ahash_request *req = ahash_request_cast(req_async);
601 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
602 struct shash_desc *desc = &rctx->desc;
b8a28251
LH
603
604 if (unlikely(err == -EINPROGRESS))
605 goto out;
606
46309d89
HX
607 desc->tfm = child;
608 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
b8a28251 609
46309d89 610 err = shash_ahash_digest(req, desc);
b8a28251
LH
611
612 req->base.complete = rctx->complete;
613
614out:
81760ea6 615 cryptd_hash_complete(req, err);
b8a28251
LH
616}
617
618static int cryptd_hash_digest_enqueue(struct ahash_request *req)
619{
620 return cryptd_hash_enqueue(req, cryptd_hash_digest);
621}
622
6fba00d1
HX
623static int cryptd_hash_export(struct ahash_request *req, void *out)
624{
625 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
626
627 return crypto_shash_export(&rctx->desc, out);
628}
629
630static int cryptd_hash_import(struct ahash_request *req, const void *in)
631{
0bd22235
AB
632 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
633 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
634 struct shash_desc *desc = cryptd_shash_desc(req);
635
636 desc->tfm = ctx->child;
637 desc->flags = req->base.flags;
6fba00d1 638
0bd22235 639 return crypto_shash_import(desc, in);
6fba00d1
HX
640}
641
9cd899a3
HX
642static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
643 struct cryptd_queue *queue)
b8a28251 644{
46309d89 645 struct hashd_instance_ctx *ctx;
0b535adf 646 struct ahash_instance *inst;
46309d89 647 struct shash_alg *salg;
b8a28251 648 struct crypto_alg *alg;
466a7b9e
SM
649 u32 type = 0;
650 u32 mask = 0;
46309d89 651 int err;
b8a28251 652
466a7b9e
SM
653 cryptd_check_internal(tb, &type, &mask);
654
655 salg = shash_attr_alg(tb[1], type, mask);
46309d89 656 if (IS_ERR(salg))
9cd899a3 657 return PTR_ERR(salg);
b8a28251 658
46309d89 659 alg = &salg->base;
0b535adf
HX
660 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
661 sizeof(*ctx));
05ed8758 662 err = PTR_ERR(inst);
b8a28251
LH
663 if (IS_ERR(inst))
664 goto out_put_alg;
665
0b535adf 666 ctx = ahash_instance_ctx(inst);
46309d89
HX
667 ctx->queue = queue;
668
0b535adf
HX
669 err = crypto_init_shash_spawn(&ctx->spawn, salg,
670 ahash_crypto_instance(inst));
46309d89
HX
671 if (err)
672 goto out_free_inst;
673
466a7b9e
SM
674 type = CRYPTO_ALG_ASYNC;
675 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
676 type |= CRYPTO_ALG_INTERNAL;
677 inst->alg.halg.base.cra_flags = type;
b8a28251 678
0b535adf 679 inst->alg.halg.digestsize = salg->digestsize;
1a078340 680 inst->alg.halg.statesize = salg->statesize;
0b535adf 681 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
b8a28251 682
0b535adf
HX
683 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
684 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
b8a28251 685
0b535adf
HX
686 inst->alg.init = cryptd_hash_init_enqueue;
687 inst->alg.update = cryptd_hash_update_enqueue;
688 inst->alg.final = cryptd_hash_final_enqueue;
6fba00d1
HX
689 inst->alg.finup = cryptd_hash_finup_enqueue;
690 inst->alg.export = cryptd_hash_export;
691 inst->alg.import = cryptd_hash_import;
0b535adf
HX
692 inst->alg.setkey = cryptd_hash_setkey;
693 inst->alg.digest = cryptd_hash_digest_enqueue;
b8a28251 694
0b535adf 695 err = ahash_register_instance(tmpl, inst);
9cd899a3
HX
696 if (err) {
697 crypto_drop_shash(&ctx->spawn);
698out_free_inst:
699 kfree(inst);
700 }
701
b8a28251
LH
702out_put_alg:
703 crypto_mod_put(alg);
9cd899a3 704 return err;
b8a28251
LH
705}
706
92b9876b
HX
707static int cryptd_aead_setkey(struct crypto_aead *parent,
708 const u8 *key, unsigned int keylen)
709{
710 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
711 struct crypto_aead *child = ctx->child;
712
713 return crypto_aead_setkey(child, key, keylen);
714}
715
716static int cryptd_aead_setauthsize(struct crypto_aead *parent,
717 unsigned int authsize)
718{
719 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
720 struct crypto_aead *child = ctx->child;
721
722 return crypto_aead_setauthsize(child, authsize);
723}
724
298c926c
AH
725static void cryptd_aead_crypt(struct aead_request *req,
726 struct crypto_aead *child,
727 int err,
728 int (*crypt)(struct aead_request *req))
729{
730 struct cryptd_aead_request_ctx *rctx;
81760ea6 731 struct cryptd_aead_ctx *ctx;
ec9f2006 732 crypto_completion_t compl;
81760ea6
HX
733 struct crypto_aead *tfm;
734 int refcnt;
ec9f2006 735
298c926c 736 rctx = aead_request_ctx(req);
ec9f2006 737 compl = rctx->complete;
298c926c 738
31bd44e7
HX
739 tfm = crypto_aead_reqtfm(req);
740
298c926c
AH
741 if (unlikely(err == -EINPROGRESS))
742 goto out;
743 aead_request_set_tfm(req, child);
744 err = crypt( req );
81760ea6 745
298c926c 746out:
81760ea6
HX
747 ctx = crypto_aead_ctx(tfm);
748 refcnt = atomic_read(&ctx->refcnt);
749
298c926c 750 local_bh_disable();
ec9f2006 751 compl(&req->base, err);
298c926c 752 local_bh_enable();
81760ea6
HX
753
754 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
755 crypto_free_aead(tfm);
298c926c
AH
756}
757
758static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
759{
760 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
761 struct crypto_aead *child = ctx->child;
762 struct aead_request *req;
763
764 req = container_of(areq, struct aead_request, base);
ba3749a7 765 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
298c926c
AH
766}
767
768static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
769{
770 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
771 struct crypto_aead *child = ctx->child;
772 struct aead_request *req;
773
774 req = container_of(areq, struct aead_request, base);
ba3749a7 775 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
298c926c
AH
776}
777
778static int cryptd_aead_enqueue(struct aead_request *req,
3e3dc25f 779 crypto_completion_t compl)
298c926c
AH
780{
781 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
782 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
783 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
784
785 rctx->complete = req->base.complete;
3e3dc25f 786 req->base.complete = compl;
298c926c
AH
787 return cryptd_enqueue_request(queue, &req->base);
788}
789
790static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
791{
792 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
793}
794
795static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
796{
797 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
798}
799
f614e546 800static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
298c926c 801{
f614e546
HX
802 struct aead_instance *inst = aead_alg_instance(tfm);
803 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
298c926c 804 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
f614e546 805 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
298c926c
AH
806 struct crypto_aead *cipher;
807
808 cipher = crypto_spawn_aead(spawn);
809 if (IS_ERR(cipher))
810 return PTR_ERR(cipher);
811
298c926c 812 ctx->child = cipher;
ec9f2006
HX
813 crypto_aead_set_reqsize(
814 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
815 crypto_aead_reqsize(cipher)));
298c926c
AH
816 return 0;
817}
818
f614e546 819static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
298c926c 820{
f614e546 821 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
298c926c
AH
822 crypto_free_aead(ctx->child);
823}
824
825static int cryptd_create_aead(struct crypto_template *tmpl,
826 struct rtattr **tb,
827 struct cryptd_queue *queue)
828{
829 struct aead_instance_ctx *ctx;
f614e546
HX
830 struct aead_instance *inst;
831 struct aead_alg *alg;
9b8c456e
HX
832 const char *name;
833 u32 type = 0;
ec9f2006 834 u32 mask = CRYPTO_ALG_ASYNC;
298c926c
AH
835 int err;
836
466a7b9e
SM
837 cryptd_check_internal(tb, &type, &mask);
838
9b8c456e
HX
839 name = crypto_attr_alg_name(tb[1]);
840 if (IS_ERR(name))
841 return PTR_ERR(name);
298c926c 842
9b8c456e
HX
843 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
844 if (!inst)
845 return -ENOMEM;
298c926c 846
f614e546 847 ctx = aead_instance_ctx(inst);
298c926c
AH
848 ctx->queue = queue;
849
f614e546 850 crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
9b8c456e 851 err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
298c926c
AH
852 if (err)
853 goto out_free_inst;
854
f614e546
HX
855 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
856 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
9b8c456e
HX
857 if (err)
858 goto out_drop_aead;
859
f614e546 860 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
5e4b8c1f 861 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
f614e546 862 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
298c926c 863
f614e546
HX
864 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
865 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
866
867 inst->alg.init = cryptd_aead_init_tfm;
868 inst->alg.exit = cryptd_aead_exit_tfm;
869 inst->alg.setkey = cryptd_aead_setkey;
870 inst->alg.setauthsize = cryptd_aead_setauthsize;
871 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
872 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
873
874 err = aead_register_instance(tmpl, inst);
298c926c 875 if (err) {
9b8c456e
HX
876out_drop_aead:
877 crypto_drop_aead(&ctx->aead_spawn);
298c926c
AH
878out_free_inst:
879 kfree(inst);
880 }
298c926c
AH
881 return err;
882}
883
254eff77 884static struct cryptd_queue queue;
124b53d0 885
9cd899a3 886static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
124b53d0
HX
887{
888 struct crypto_attr_type *algt;
889
890 algt = crypto_get_attr_type(tb);
891 if (IS_ERR(algt))
9cd899a3 892 return PTR_ERR(algt);
124b53d0
HX
893
894 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
895 case CRYPTO_ALG_TYPE_BLKCIPHER:
9cd899a3 896 return cryptd_create_blkcipher(tmpl, tb, &queue);
b8a28251 897 case CRYPTO_ALG_TYPE_DIGEST:
9cd899a3 898 return cryptd_create_hash(tmpl, tb, &queue);
298c926c
AH
899 case CRYPTO_ALG_TYPE_AEAD:
900 return cryptd_create_aead(tmpl, tb, &queue);
124b53d0
HX
901 }
902
9cd899a3 903 return -EINVAL;
124b53d0
HX
904}
905
906static void cryptd_free(struct crypto_instance *inst)
907{
908 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
0b535adf 909 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
298c926c 910 struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
0b535adf
HX
911
912 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
913 case CRYPTO_ALG_TYPE_AHASH:
914 crypto_drop_shash(&hctx->spawn);
915 kfree(ahash_instance(inst));
916 return;
298c926c 917 case CRYPTO_ALG_TYPE_AEAD:
f614e546
HX
918 crypto_drop_aead(&aead_ctx->aead_spawn);
919 kfree(aead_instance(inst));
298c926c
AH
920 return;
921 default:
922 crypto_drop_spawn(&ctx->spawn);
923 kfree(inst);
0b535adf 924 }
124b53d0
HX
925}
926
927static struct crypto_template cryptd_tmpl = {
928 .name = "cryptd",
9cd899a3 929 .create = cryptd_create,
124b53d0
HX
930 .free = cryptd_free,
931 .module = THIS_MODULE,
932};
933
1cac2cbc
HY
934struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
935 u32 type, u32 mask)
936{
937 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
81760ea6 938 struct cryptd_blkcipher_ctx *ctx;
505fd21d 939 struct crypto_tfm *tfm;
1cac2cbc
HY
940
941 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
942 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
943 return ERR_PTR(-EINVAL);
c012a79d 944 type = crypto_skcipher_type(type);
505fd21d
HY
945 mask &= ~CRYPTO_ALG_TYPE_MASK;
946 mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
947 tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
1cac2cbc
HY
948 if (IS_ERR(tfm))
949 return ERR_CAST(tfm);
505fd21d
HY
950 if (tfm->__crt_alg->cra_module != THIS_MODULE) {
951 crypto_free_tfm(tfm);
1cac2cbc
HY
952 return ERR_PTR(-EINVAL);
953 }
954
81760ea6
HX
955 ctx = crypto_tfm_ctx(tfm);
956 atomic_set(&ctx->refcnt, 1);
957
505fd21d 958 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
1cac2cbc
HY
959}
960EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
961
962struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
963{
964 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
965 return ctx->child;
966}
967EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
968
81760ea6
HX
969bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm)
970{
971 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
972
973 return atomic_read(&ctx->refcnt) - 1;
974}
975EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued);
976
1cac2cbc
HY
977void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
978{
81760ea6
HX
979 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
980
981 if (atomic_dec_and_test(&ctx->refcnt))
982 crypto_free_ablkcipher(&tfm->base);
1cac2cbc
HY
983}
984EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
985
ace13663
HY
986struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
987 u32 type, u32 mask)
988{
989 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
81760ea6 990 struct cryptd_hash_ctx *ctx;
ace13663
HY
991 struct crypto_ahash *tfm;
992
993 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
994 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
995 return ERR_PTR(-EINVAL);
996 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
997 if (IS_ERR(tfm))
998 return ERR_CAST(tfm);
999 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1000 crypto_free_ahash(tfm);
1001 return ERR_PTR(-EINVAL);
1002 }
1003
81760ea6
HX
1004 ctx = crypto_ahash_ctx(tfm);
1005 atomic_set(&ctx->refcnt, 1);
1006
ace13663
HY
1007 return __cryptd_ahash_cast(tfm);
1008}
1009EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
1010
1011struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
1012{
1013 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1014
1015 return ctx->child;
1016}
1017EXPORT_SYMBOL_GPL(cryptd_ahash_child);
1018
0e1227d3
HY
1019struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
1020{
1021 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
1022 return &rctx->desc;
1023}
1024EXPORT_SYMBOL_GPL(cryptd_shash_desc);
1025
81760ea6
HX
1026bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1027{
1028 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1029
1030 return atomic_read(&ctx->refcnt) - 1;
1031}
1032EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1033
ace13663
HY
1034void cryptd_free_ahash(struct cryptd_ahash *tfm)
1035{
81760ea6
HX
1036 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1037
1038 if (atomic_dec_and_test(&ctx->refcnt))
1039 crypto_free_ahash(&tfm->base);
ace13663
HY
1040}
1041EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1042
298c926c
AH
1043struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1044 u32 type, u32 mask)
1045{
1046 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
81760ea6 1047 struct cryptd_aead_ctx *ctx;
298c926c
AH
1048 struct crypto_aead *tfm;
1049
1050 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1051 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1052 return ERR_PTR(-EINVAL);
1053 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1054 if (IS_ERR(tfm))
1055 return ERR_CAST(tfm);
1056 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1057 crypto_free_aead(tfm);
1058 return ERR_PTR(-EINVAL);
1059 }
81760ea6
HX
1060
1061 ctx = crypto_aead_ctx(tfm);
1062 atomic_set(&ctx->refcnt, 1);
1063
298c926c
AH
1064 return __cryptd_aead_cast(tfm);
1065}
1066EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1067
1068struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1069{
1070 struct cryptd_aead_ctx *ctx;
1071 ctx = crypto_aead_ctx(&tfm->base);
1072 return ctx->child;
1073}
1074EXPORT_SYMBOL_GPL(cryptd_aead_child);
1075
81760ea6
HX
1076bool cryptd_aead_queued(struct cryptd_aead *tfm)
1077{
1078 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1079
1080 return atomic_read(&ctx->refcnt) - 1;
1081}
1082EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1083
298c926c
AH
1084void cryptd_free_aead(struct cryptd_aead *tfm)
1085{
81760ea6
HX
1086 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1087
1088 if (atomic_dec_and_test(&ctx->refcnt))
1089 crypto_free_aead(&tfm->base);
298c926c
AH
1090}
1091EXPORT_SYMBOL_GPL(cryptd_free_aead);
1092
124b53d0
HX
1093static int __init cryptd_init(void)
1094{
1095 int err;
1096
254eff77 1097 err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
124b53d0
HX
1098 if (err)
1099 return err;
1100
1101 err = crypto_register_template(&cryptd_tmpl);
1102 if (err)
254eff77 1103 cryptd_fini_queue(&queue);
124b53d0
HX
1104
1105 return err;
1106}
1107
1108static void __exit cryptd_exit(void)
1109{
254eff77 1110 cryptd_fini_queue(&queue);
124b53d0
HX
1111 crypto_unregister_template(&cryptd_tmpl);
1112}
1113
b2bac6ac 1114subsys_initcall(cryptd_init);
124b53d0
HX
1115module_exit(cryptd_exit);
1116
1117MODULE_LICENSE("GPL");
1118MODULE_DESCRIPTION("Software async crypto daemon");
4943ba16 1119MODULE_ALIAS_CRYPTO("cryptd");