crypto: aspeed - add HACE crypto driver
[linux-block.git] / crypto / api.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
1da177e4
LT
2/*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
5cb1454b 7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
1da177e4
LT
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
991d1740 10 * and Nettle, by Niels Möller.
1da177e4 11 */
a61cc448 12
6bfd4809 13#include <linux/err.h>
1da177e4 14#include <linux/errno.h>
adad556e 15#include <linux/jump_label.h>
5cb1454b 16#include <linux/kernel.h>
176c3652 17#include <linux/kmod.h>
2b8c19db 18#include <linux/module.h>
2825982d 19#include <linux/param.h>
174cd4b1 20#include <linux/sched/signal.h>
1da177e4 21#include <linux/slab.h>
5cb1454b 22#include <linux/string.h>
ada69a16 23#include <linux/completion.h>
1da177e4
LT
24#include "internal.h"
25
26LIST_HEAD(crypto_alg_list);
cce9e06d 27EXPORT_SYMBOL_GPL(crypto_alg_list);
1da177e4 28DECLARE_RWSEM(crypto_alg_sem);
cce9e06d 29EXPORT_SYMBOL_GPL(crypto_alg_sem);
1da177e4 30
2825982d
HX
31BLOCKING_NOTIFIER_HEAD(crypto_chain);
32EXPORT_SYMBOL_GPL(crypto_chain);
33
adad556e 34DEFINE_STATIC_KEY_FALSE(crypto_boot_test_finished);
e42dff46 35EXPORT_SYMBOL_GPL(crypto_boot_test_finished);
adad556e 36
77dbd7a9
HX
37static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
38
2825982d 39struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
6521f302
HX
40{
41 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
1da177e4 42}
2825982d 43EXPORT_SYMBOL_GPL(crypto_mod_get);
1da177e4 44
2825982d 45void crypto_mod_put(struct crypto_alg *alg)
1da177e4 46{
da7cd59a
HX
47 struct module *module = alg->cra_module;
48
6521f302 49 crypto_alg_put(alg);
da7cd59a 50 module_put(module);
1da177e4 51}
2825982d 52EXPORT_SYMBOL_GPL(crypto_mod_put);
1da177e4 53
c51b6c81
HX
54static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
55 u32 mask)
1da177e4
LT
56{
57 struct crypto_alg *q, *alg = NULL;
2825982d 58 int best = -2;
1da177e4 59
1da177e4 60 list_for_each_entry(q, &crypto_alg_list, cra_list) {
5cb1454b
HX
61 int exact, fuzzy;
62
6bfd4809
HX
63 if (crypto_is_moribund(q))
64 continue;
65
492e2b63
HX
66 if ((q->cra_flags ^ type) & mask)
67 continue;
68
69 if (crypto_is_larval(q) &&
73d3864a 70 !crypto_is_test_larval((struct crypto_larval *)q) &&
492e2b63
HX
71 ((struct crypto_larval *)q)->mask != mask)
72 continue;
73
5cb1454b
HX
74 exact = !strcmp(q->cra_driver_name, name);
75 fuzzy = !strcmp(q->cra_name, name);
76 if (!exact && !(fuzzy && q->cra_priority > best))
77 continue;
78
72fa4919 79 if (unlikely(!crypto_mod_get(q)))
5cb1454b
HX
80 continue;
81
82 best = q->cra_priority;
83 if (alg)
72fa4919 84 crypto_mod_put(alg);
5cb1454b
HX
85 alg = q;
86
87 if (exact)
1da177e4 88 break;
1da177e4 89 }
2825982d
HX
90
91 return alg;
92}
2825982d
HX
93
94static void crypto_larval_destroy(struct crypto_alg *alg)
95{
96 struct crypto_larval *larval = (void *)alg;
97
98 BUG_ON(!crypto_is_larval(alg));
2bbb3375 99 if (!IS_ERR_OR_NULL(larval->adult))
2825982d
HX
100 crypto_mod_put(larval->adult);
101 kfree(larval);
102}
103
73d3864a 104struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
2825982d 105{
2825982d
HX
106 struct crypto_larval *larval;
107
108 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
109 if (!larval)
6bfd4809 110 return ERR_PTR(-ENOMEM);
2825982d 111
492e2b63
HX
112 larval->mask = mask;
113 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
2825982d
HX
114 larval->alg.cra_priority = -1;
115 larval->alg.cra_destroy = crypto_larval_destroy;
116
2825982d
HX
117 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
118 init_completion(&larval->completion);
119
73d3864a
HX
120 return larval;
121}
122EXPORT_SYMBOL_GPL(crypto_larval_alloc);
123
124static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
125 u32 mask)
126{
127 struct crypto_alg *alg;
128 struct crypto_larval *larval;
129
130 larval = crypto_larval_alloc(name, type, mask);
131 if (IS_ERR(larval))
132 return ERR_CAST(larval);
133
ce8614a3 134 refcount_set(&larval->alg.cra_refcnt, 2);
73d3864a 135
2825982d 136 down_write(&crypto_alg_sem);
492e2b63 137 alg = __crypto_alg_lookup(name, type, mask);
2825982d
HX
138 if (!alg) {
139 alg = &larval->alg;
140 list_add(&alg->cra_list, &crypto_alg_list);
141 }
142 up_write(&crypto_alg_sem);
143
77dbd7a9 144 if (alg != &larval->alg) {
2825982d 145 kfree(larval);
77dbd7a9
HX
146 if (crypto_is_larval(alg))
147 alg = crypto_larval_wait(alg);
148 }
2825982d
HX
149
150 return alg;
151}
152
b9c55aa4 153void crypto_larval_kill(struct crypto_alg *alg)
2825982d
HX
154{
155 struct crypto_larval *larval = (void *)alg;
156
157 down_write(&crypto_alg_sem);
158 list_del(&alg->cra_list);
159 up_write(&crypto_alg_sem);
fe3c5206 160 complete_all(&larval->completion);
2825982d
HX
161 crypto_alg_put(alg);
162}
b9c55aa4 163EXPORT_SYMBOL_GPL(crypto_larval_kill);
2825982d 164
adad556e
HX
165void crypto_wait_for_test(struct crypto_larval *larval)
166{
167 int err;
168
169 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
cad439fc
HX
170 if (WARN_ON_ONCE(err != NOTIFY_STOP))
171 goto out;
adad556e
HX
172
173 err = wait_for_completion_killable(&larval->completion);
174 WARN_ON(err);
175 if (!err)
176 crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
177
178out:
179 crypto_larval_kill(&larval->alg);
180}
181EXPORT_SYMBOL_GPL(crypto_wait_for_test);
182
183static void crypto_start_test(struct crypto_larval *larval)
184{
185 if (!crypto_is_test_larval(larval))
186 return;
187
188 if (larval->test_started)
189 return;
190
191 down_write(&crypto_alg_sem);
192 if (larval->test_started) {
193 up_write(&crypto_alg_sem);
194 return;
195 }
196
197 larval->test_started = true;
198 up_write(&crypto_alg_sem);
199
200 crypto_wait_for_test(larval);
201}
202
2825982d
HX
203static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
204{
205 struct crypto_larval *larval = (void *)alg;
73d3864a
HX
206 long timeout;
207
adad556e
HX
208 if (!static_branch_likely(&crypto_boot_test_finished))
209 crypto_start_test(larval);
210
3fc89adb 211 timeout = wait_for_completion_killable_timeout(
73d3864a 212 &larval->completion, 60 * HZ);
2825982d 213
2825982d 214 alg = larval->adult;
73d3864a
HX
215 if (timeout < 0)
216 alg = ERR_PTR(-EINTR);
217 else if (!timeout)
218 alg = ERR_PTR(-ETIMEDOUT);
219 else if (!alg)
6bfd4809 220 alg = ERR_PTR(-ENOENT);
2bbb3375
HX
221 else if (IS_ERR(alg))
222 ;
73d3864a
HX
223 else if (crypto_is_test_larval(larval) &&
224 !(alg->cra_flags & CRYPTO_ALG_TESTED))
225 alg = ERR_PTR(-EAGAIN);
d6097b8d
NS
226 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
227 alg = ERR_PTR(-EAGAIN);
73d3864a
HX
228 else if (!crypto_mod_get(alg))
229 alg = ERR_PTR(-EAGAIN);
2825982d
HX
230 crypto_mod_put(&larval->alg);
231
232 return alg;
233}
234
3ca1e994
HX
235static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
236 u32 mask)
2825982d 237{
d6097b8d 238 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
2825982d 239 struct crypto_alg *alg;
eb02c38f
HX
240 u32 test = 0;
241
242 if (!((type | mask) & CRYPTO_ALG_TESTED))
243 test |= CRYPTO_ALG_TESTED;
2825982d 244
2825982d 245 down_read(&crypto_alg_sem);
d6097b8d
NS
246 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
247 (mask | test) & ~fips);
248 if (alg) {
249 if (((type | mask) ^ fips) & fips)
250 mask |= fips;
251 mask &= fips;
252
253 if (!crypto_is_larval(alg) &&
254 ((type ^ alg->cra_flags) & mask)) {
255 /* Algorithm is disallowed in FIPS mode. */
256 crypto_mod_put(alg);
257 alg = ERR_PTR(-ENOENT);
258 }
259 } else if (test) {
b346e492
EB
260 alg = __crypto_alg_lookup(name, type, mask);
261 if (alg && !crypto_is_larval(alg)) {
262 /* Test failed */
263 crypto_mod_put(alg);
264 alg = ERR_PTR(-ELIBBAD);
265 }
266 }
1da177e4 267 up_read(&crypto_alg_sem);
2825982d 268
1da177e4
LT
269 return alg;
270}
271
cadc9ab5
EB
272static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
273 u32 mask)
176c3652 274{
2825982d 275 struct crypto_alg *alg;
2825982d 276
6bfd4809
HX
277 if (!name)
278 return ERR_PTR(-ENOENT);
279
430b441c 280 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
6bfd4809 281 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
492e2b63 282
a760a665 283 alg = crypto_alg_lookup(name, type, mask);
e2861fa7 284 if (!alg && !(mask & CRYPTO_NOLOAD)) {
5d26a105 285 request_module("crypto-%s", name);
a760a665 286
37fc334c 287 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
aa07a699 288 CRYPTO_ALG_NEED_FALLBACK))
5d26a105 289 request_module("crypto-%s-all", name);
a760a665
HX
290
291 alg = crypto_alg_lookup(name, type, mask);
292 }
293
eb02c38f
HX
294 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
295 alg = crypto_larval_wait(alg);
296 else if (!alg)
297 alg = crypto_larval_add(name, type, mask);
2825982d 298
eb02c38f 299 return alg;
b9c55aa4 300}
b9c55aa4 301
73d3864a
HX
302int crypto_probing_notify(unsigned long val, void *v)
303{
304 int ok;
305
306 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
307 if (ok == NOTIFY_DONE) {
308 request_module("cryptomgr");
309 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
310 }
311
312 return ok;
313}
314EXPORT_SYMBOL_GPL(crypto_probing_notify);
315
b9c55aa4
HX
316struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
317{
318 struct crypto_alg *alg;
319 struct crypto_alg *larval;
320 int ok;
321
06ca7f68
SM
322 /*
323 * If the internal flag is set for a cipher, require a caller to
bc9d6dac 324 * invoke the cipher with the internal flag to use that cipher.
06ca7f68
SM
325 * Also, if a caller wants to allocate a cipher that may or may
326 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
327 * !(mask & CRYPTO_ALG_INTERNAL).
328 */
329 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
330 mask |= CRYPTO_ALG_INTERNAL;
331
b9c55aa4 332 larval = crypto_larval_lookup(name, type, mask);
6bfd4809 333 if (IS_ERR(larval) || !crypto_is_larval(larval))
2825982d
HX
334 return larval;
335
73d3864a 336 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
2b8c19db
HX
337
338 if (ok == NOTIFY_STOP)
2825982d
HX
339 alg = crypto_larval_wait(larval);
340 else {
341 crypto_mod_put(larval);
6bfd4809 342 alg = ERR_PTR(-ENOENT);
2825982d
HX
343 }
344 crypto_larval_kill(larval);
345 return alg;
176c3652 346}
492e2b63 347EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
176c3652 348
27d2a330 349static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
1da177e4 350{
27d2a330 351 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
e853c3cf 352
27d2a330
HX
353 if (type_obj)
354 return type_obj->init(tfm, type, mask);
e8cfed5e 355 return 0;
1da177e4
LT
356}
357
358static void crypto_exit_ops(struct crypto_tfm *tfm)
359{
e853c3cf
HX
360 const struct crypto_type *type = tfm->__crt_alg->cra_type;
361
9c8ae17b
EB
362 if (type && tfm->exit)
363 tfm->exit(tfm);
1da177e4
LT
364}
365
27d2a330 366static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
fbdae9f3 367{
27d2a330 368 const struct crypto_type *type_obj = alg->cra_type;
fbdae9f3
HX
369 unsigned int len;
370
e853c3cf 371 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
27d2a330
HX
372 if (type_obj)
373 return len + type_obj->ctxsize(alg, type, mask);
e853c3cf 374
fbdae9f3
HX
375 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
376 default:
377 BUG();
378
379 case CRYPTO_ALG_TYPE_CIPHER:
f1ddcaf3 380 len += crypto_cipher_ctxsize(alg);
fbdae9f3 381 break;
6941c3a0 382
fbdae9f3 383 case CRYPTO_ALG_TYPE_COMPRESS:
f1ddcaf3 384 len += crypto_compress_ctxsize(alg);
fbdae9f3
HX
385 break;
386 }
387
e853c3cf 388 return len;
fbdae9f3
HX
389}
390
6603523b 391void crypto_shoot_alg(struct crypto_alg *alg)
6bfd4809
HX
392{
393 down_write(&crypto_alg_sem);
394 alg->cra_flags |= CRYPTO_ALG_DYING;
395 up_write(&crypto_alg_sem);
396}
6603523b 397EXPORT_SYMBOL_GPL(crypto_shoot_alg);
6bfd4809 398
27d2a330
HX
399struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
400 u32 mask)
1da177e4
LT
401{
402 struct crypto_tfm *tfm = NULL;
fbdae9f3 403 unsigned int tfm_size;
6bfd4809 404 int err = -ENOMEM;
fbdae9f3 405
27d2a330 406 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
bbeb563f 407 tfm = kzalloc(tfm_size, GFP_KERNEL);
1da177e4 408 if (tfm == NULL)
9765d262 409 goto out_err;
1da177e4 410
1da177e4 411 tfm->__crt_alg = alg;
6bfd4809 412
27d2a330 413 err = crypto_init_ops(tfm, type, mask);
6bfd4809 414 if (err)
1da177e4 415 goto out_free_tfm;
c7fc0599 416
4a779486 417 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
c7fc0599 418 goto cra_init_failed;
1da177e4
LT
419
420 goto out;
421
c7fc0599
HX
422cra_init_failed:
423 crypto_exit_ops(tfm);
1da177e4 424out_free_tfm:
4a779486
HX
425 if (err == -EAGAIN)
426 crypto_shoot_alg(alg);
1da177e4 427 kfree(tfm);
9765d262 428out_err:
6bfd4809 429 tfm = ERR_PTR(err);
1da177e4
LT
430out:
431 return tfm;
432}
6bfd4809
HX
433EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
434
6d7d684d
HX
435/*
436 * crypto_alloc_base - Locate algorithm and allocate transform
437 * @alg_name: Name of algorithm
438 * @type: Type of algorithm
439 * @mask: Mask for type comparison
440 *
7b0bac64 441 * This function should not be used by new algorithm types.
fd1a1900 442 * Please use crypto_alloc_tfm instead.
7b0bac64 443 *
6d7d684d
HX
444 * crypto_alloc_base() will first attempt to locate an already loaded
445 * algorithm. If that fails and the kernel supports dynamically loadable
446 * modules, it will then attempt to load a module of the same name or
447 * alias. If that fails it will send a query to any loaded crypto manager
448 * to construct an algorithm on the fly. A refcount is grabbed on the
449 * algorithm which is then associated with the new transform.
450 *
451 * The returned transform is of a non-determinate type. Most people
452 * should use one of the more specific allocation functions such as
c65058b7 453 * crypto_alloc_skcipher().
6d7d684d
HX
454 *
455 * In case of error the return value is an error pointer.
456 */
457struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
458{
459 struct crypto_tfm *tfm;
460 int err;
461
462 for (;;) {
463 struct crypto_alg *alg;
464
465 alg = crypto_alg_mod_lookup(alg_name, type, mask);
9765d262
AM
466 if (IS_ERR(alg)) {
467 err = PTR_ERR(alg);
6d7d684d 468 goto err;
9765d262 469 }
6d7d684d 470
27d2a330 471 tfm = __crypto_alloc_tfm(alg, type, mask);
6d7d684d 472 if (!IS_ERR(tfm))
9765d262 473 return tfm;
6d7d684d
HX
474
475 crypto_mod_put(alg);
476 err = PTR_ERR(tfm);
477
478err:
479 if (err != -EAGAIN)
480 break;
3fc89adb 481 if (fatal_signal_pending(current)) {
6d7d684d
HX
482 err = -EINTR;
483 break;
484 }
9765d262 485 }
6d7d684d 486
9765d262 487 return ERR_PTR(err);
6d7d684d
HX
488}
489EXPORT_SYMBOL_GPL(crypto_alloc_base);
7b0bac64 490
7bc13b5b
BS
491void *crypto_create_tfm_node(struct crypto_alg *alg,
492 const struct crypto_type *frontend,
493 int node)
7b0bac64
HX
494{
495 char *mem;
496 struct crypto_tfm *tfm = NULL;
497 unsigned int tfmsize;
498 unsigned int total;
499 int err = -ENOMEM;
500
501 tfmsize = frontend->tfmsize;
2ca33da1 502 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
7b0bac64 503
7bc13b5b 504 mem = kzalloc_node(total, GFP_KERNEL, node);
7b0bac64
HX
505 if (mem == NULL)
506 goto out_err;
507
508 tfm = (struct crypto_tfm *)(mem + tfmsize);
509 tfm->__crt_alg = alg;
7bc13b5b 510 tfm->node = node;
7b0bac64 511
2ca33da1 512 err = frontend->init_tfm(tfm);
7b0bac64
HX
513 if (err)
514 goto out_free_tfm;
515
516 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
517 goto cra_init_failed;
518
519 goto out;
520
521cra_init_failed:
522 crypto_exit_ops(tfm);
523out_free_tfm:
524 if (err == -EAGAIN)
525 crypto_shoot_alg(alg);
526 kfree(mem);
527out_err:
3f683d61 528 mem = ERR_PTR(err);
7b0bac64 529out:
3f683d61 530 return mem;
7b0bac64 531}
7bc13b5b 532EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
7b0bac64 533
d06854f0
HX
534struct crypto_alg *crypto_find_alg(const char *alg_name,
535 const struct crypto_type *frontend,
536 u32 type, u32 mask)
537{
d06854f0
HX
538 if (frontend) {
539 type &= frontend->maskclear;
540 mask &= frontend->maskclear;
541 type |= frontend->type;
542 mask |= frontend->maskset;
d06854f0
HX
543 }
544
4989d4f0 545 return crypto_alg_mod_lookup(alg_name, type, mask);
d06854f0
HX
546}
547EXPORT_SYMBOL_GPL(crypto_find_alg);
548
7b0bac64 549/*
7bc13b5b 550 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
7b0bac64
HX
551 * @alg_name: Name of algorithm
552 * @frontend: Frontend algorithm type
553 * @type: Type of algorithm
554 * @mask: Mask for type comparison
7bc13b5b
BS
555 * @node: NUMA node in which users desire to put requests, if node is
556 * NUMA_NO_NODE, it means users have no special requirement.
7b0bac64
HX
557 *
558 * crypto_alloc_tfm() will first attempt to locate an already loaded
559 * algorithm. If that fails and the kernel supports dynamically loadable
560 * modules, it will then attempt to load a module of the same name or
561 * alias. If that fails it will send a query to any loaded crypto manager
562 * to construct an algorithm on the fly. A refcount is grabbed on the
563 * algorithm which is then associated with the new transform.
564 *
565 * The returned transform is of a non-determinate type. Most people
566 * should use one of the more specific allocation functions such as
0a940d4e 567 * crypto_alloc_skcipher().
7b0bac64
HX
568 *
569 * In case of error the return value is an error pointer.
570 */
7bc13b5b
BS
571
572void *crypto_alloc_tfm_node(const char *alg_name,
573 const struct crypto_type *frontend, u32 type, u32 mask,
574 int node)
7b0bac64 575{
3f683d61 576 void *tfm;
7b0bac64
HX
577 int err;
578
7b0bac64
HX
579 for (;;) {
580 struct crypto_alg *alg;
581
d06854f0 582 alg = crypto_find_alg(alg_name, frontend, type, mask);
7b0bac64
HX
583 if (IS_ERR(alg)) {
584 err = PTR_ERR(alg);
585 goto err;
586 }
587
7bc13b5b 588 tfm = crypto_create_tfm_node(alg, frontend, node);
7b0bac64
HX
589 if (!IS_ERR(tfm))
590 return tfm;
591
592 crypto_mod_put(alg);
593 err = PTR_ERR(tfm);
594
595err:
596 if (err != -EAGAIN)
597 break;
3fc89adb 598 if (fatal_signal_pending(current)) {
7b0bac64
HX
599 err = -EINTR;
600 break;
601 }
602 }
603
604 return ERR_PTR(err);
605}
7bc13b5b 606EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
7b2cd92a 607
6d7d684d 608/*
7b2cd92a
HX
609 * crypto_destroy_tfm - Free crypto transform
610 * @mem: Start of tfm slab
6d7d684d
HX
611 * @tfm: Transform to free
612 *
7b2cd92a 613 * This function frees up the transform and any associated resources,
6d7d684d
HX
614 * then drops the refcount on the associated algorithm.
615 */
7b2cd92a 616void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
1da177e4 617{
a61cc448 618 struct crypto_alg *alg;
a61cc448 619
83681f2b 620 if (IS_ERR_OR_NULL(mem))
a61cc448
JJ
621 return;
622
623 alg = tfm->__crt_alg;
1da177e4 624
4a779486 625 if (!tfm->exit && alg->cra_exit)
c7fc0599 626 alg->cra_exit(tfm);
1da177e4 627 crypto_exit_ops(tfm);
72fa4919 628 crypto_mod_put(alg);
453431a5 629 kfree_sensitive(mem);
1da177e4 630}
7b2cd92a 631EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
fce32d70
HX
632
633int crypto_has_alg(const char *name, u32 type, u32 mask)
634{
635 int ret = 0;
636 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
3d01a33b 637
fce32d70
HX
638 if (!IS_ERR(alg)) {
639 crypto_mod_put(alg);
640 ret = 1;
641 }
3d01a33b 642
fce32d70
HX
643 return ret;
644}
645EXPORT_SYMBOL_GPL(crypto_has_alg);
c3715cb9 646
ada69a16
GBY
647void crypto_req_done(struct crypto_async_request *req, int err)
648{
649 struct crypto_wait *wait = req->data;
650
651 if (err == -EINPROGRESS)
652 return;
653
654 wait->err = err;
655 complete(&wait->completion);
656}
657EXPORT_SYMBOL_GPL(crypto_req_done);
658
c3715cb9
SS
659MODULE_DESCRIPTION("Cryptographic core API");
660MODULE_LICENSE("GPL");