Merge tag 'sched_ext-for-6.12' of git://git.kernel.org/pub/scm/linux/kernel/git/tj...
[linux-2.6-block.git] / crypto / api.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
1da177e4
LT
2/*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
5cb1454b 7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
1da177e4
LT
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
991d1740 10 * and Nettle, by Niels Möller.
1da177e4 11 */
a61cc448 12
6bfd4809 13#include <linux/err.h>
1da177e4 14#include <linux/errno.h>
adad556e 15#include <linux/jump_label.h>
5cb1454b 16#include <linux/kernel.h>
176c3652 17#include <linux/kmod.h>
2b8c19db 18#include <linux/module.h>
2825982d 19#include <linux/param.h>
174cd4b1 20#include <linux/sched/signal.h>
1da177e4 21#include <linux/slab.h>
5cb1454b 22#include <linux/string.h>
ada69a16 23#include <linux/completion.h>
1da177e4
LT
24#include "internal.h"
25
26LIST_HEAD(crypto_alg_list);
cce9e06d 27EXPORT_SYMBOL_GPL(crypto_alg_list);
1da177e4 28DECLARE_RWSEM(crypto_alg_sem);
cce9e06d 29EXPORT_SYMBOL_GPL(crypto_alg_sem);
1da177e4 30
2825982d
HX
31BLOCKING_NOTIFIER_HEAD(crypto_chain);
32EXPORT_SYMBOL_GPL(crypto_chain);
33
f9110822
HX
34#if IS_BUILTIN(CONFIG_CRYPTO_ALGAPI) && \
35 !IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
06bd9c96 36DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
06bd9c96 37#endif
adad556e 38
77dbd7a9 39static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
96ad5955
HX
40static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
41 u32 mask);
77dbd7a9 42
2825982d 43struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
6521f302
HX
44{
45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
1da177e4 46}
2825982d 47EXPORT_SYMBOL_GPL(crypto_mod_get);
1da177e4 48
2825982d 49void crypto_mod_put(struct crypto_alg *alg)
1da177e4 50{
da7cd59a
HX
51 struct module *module = alg->cra_module;
52
6521f302 53 crypto_alg_put(alg);
da7cd59a 54 module_put(module);
1da177e4 55}
2825982d 56EXPORT_SYMBOL_GPL(crypto_mod_put);
1da177e4 57
c51b6c81
HX
58static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
59 u32 mask)
1da177e4
LT
60{
61 struct crypto_alg *q, *alg = NULL;
2825982d 62 int best = -2;
1da177e4 63
1da177e4 64 list_for_each_entry(q, &crypto_alg_list, cra_list) {
5cb1454b
HX
65 int exact, fuzzy;
66
6bfd4809
HX
67 if (crypto_is_moribund(q))
68 continue;
69
492e2b63
HX
70 if ((q->cra_flags ^ type) & mask)
71 continue;
72
5cb1454b
HX
73 exact = !strcmp(q->cra_driver_name, name);
74 fuzzy = !strcmp(q->cra_name, name);
75 if (!exact && !(fuzzy && q->cra_priority > best))
76 continue;
77
72fa4919 78 if (unlikely(!crypto_mod_get(q)))
5cb1454b
HX
79 continue;
80
81 best = q->cra_priority;
82 if (alg)
72fa4919 83 crypto_mod_put(alg);
5cb1454b
HX
84 alg = q;
85
86 if (exact)
1da177e4 87 break;
1da177e4 88 }
2825982d
HX
89
90 return alg;
91}
2825982d
HX
92
93static void crypto_larval_destroy(struct crypto_alg *alg)
94{
95 struct crypto_larval *larval = (void *)alg;
96
97 BUG_ON(!crypto_is_larval(alg));
2bbb3375 98 if (!IS_ERR_OR_NULL(larval->adult))
2825982d
HX
99 crypto_mod_put(larval->adult);
100 kfree(larval);
101}
102
73d3864a 103struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
2825982d 104{
2825982d
HX
105 struct crypto_larval *larval;
106
107 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
108 if (!larval)
6bfd4809 109 return ERR_PTR(-ENOMEM);
2825982d 110
e7a4142b
HX
111 type &= ~CRYPTO_ALG_TYPE_MASK | (mask ?: CRYPTO_ALG_TYPE_MASK);
112
492e2b63
HX
113 larval->mask = mask;
114 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
2825982d
HX
115 larval->alg.cra_priority = -1;
116 larval->alg.cra_destroy = crypto_larval_destroy;
117
dd4f8ee7 118 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
2825982d
HX
119 init_completion(&larval->completion);
120
73d3864a
HX
121 return larval;
122}
123EXPORT_SYMBOL_GPL(crypto_larval_alloc);
124
125static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
126 u32 mask)
127{
128 struct crypto_alg *alg;
129 struct crypto_larval *larval;
130
131 larval = crypto_larval_alloc(name, type, mask);
132 if (IS_ERR(larval))
133 return ERR_CAST(larval);
134
ce8614a3 135 refcount_set(&larval->alg.cra_refcnt, 2);
73d3864a 136
2825982d 137 down_write(&crypto_alg_sem);
492e2b63 138 alg = __crypto_alg_lookup(name, type, mask);
2825982d
HX
139 if (!alg) {
140 alg = &larval->alg;
141 list_add(&alg->cra_list, &crypto_alg_list);
142 }
143 up_write(&crypto_alg_sem);
144
77dbd7a9 145 if (alg != &larval->alg) {
2825982d 146 kfree(larval);
77dbd7a9
HX
147 if (crypto_is_larval(alg))
148 alg = crypto_larval_wait(alg);
149 }
2825982d
HX
150
151 return alg;
152}
153
37da5d0f 154static void crypto_larval_kill(struct crypto_larval *larval)
2825982d 155{
37da5d0f 156 bool unlinked;
2825982d
HX
157
158 down_write(&crypto_alg_sem);
37da5d0f
HX
159 unlinked = list_empty(&larval->alg.cra_list);
160 if (!unlinked)
161 list_del_init(&larval->alg.cra_list);
2825982d 162 up_write(&crypto_alg_sem);
37da5d0f
HX
163
164 if (unlinked)
165 return;
166
fe3c5206 167 complete_all(&larval->completion);
37da5d0f 168 crypto_alg_put(&larval->alg);
2825982d
HX
169}
170
37da5d0f 171void crypto_schedule_test(struct crypto_larval *larval)
adad556e
HX
172{
173 int err;
174
175 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
37da5d0f 176 WARN_ON_ONCE(err != NOTIFY_STOP);
adad556e 177}
37da5d0f 178EXPORT_SYMBOL_GPL(crypto_schedule_test);
adad556e
HX
179
180static void crypto_start_test(struct crypto_larval *larval)
181{
182 if (!crypto_is_test_larval(larval))
183 return;
184
185 if (larval->test_started)
186 return;
187
188 down_write(&crypto_alg_sem);
189 if (larval->test_started) {
190 up_write(&crypto_alg_sem);
191 return;
192 }
193
194 larval->test_started = true;
195 up_write(&crypto_alg_sem);
196
37da5d0f 197 crypto_schedule_test(larval);
adad556e
HX
198}
199
2825982d
HX
200static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
201{
96ad5955 202 struct crypto_larval *larval;
98f9e447 203 long time_left;
73d3864a 204
96ad5955
HX
205again:
206 larval = container_of(alg, struct crypto_larval, alg);
207
06bd9c96 208 if (!crypto_boot_test_finished())
adad556e
HX
209 crypto_start_test(larval);
210
98f9e447 211 time_left = wait_for_completion_killable_timeout(
73d3864a 212 &larval->completion, 60 * HZ);
2825982d 213
2825982d 214 alg = larval->adult;
98f9e447 215 if (time_left < 0)
73d3864a 216 alg = ERR_PTR(-EINTR);
37da5d0f
HX
217 else if (!time_left) {
218 if (crypto_is_test_larval(larval))
219 crypto_larval_kill(larval);
73d3864a 220 alg = ERR_PTR(-ETIMEDOUT);
37da5d0f 221 } else if (!alg) {
96ad5955
HX
222 u32 type;
223 u32 mask;
224
225 alg = &larval->alg;
226 type = alg->cra_flags & ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
227 mask = larval->mask;
228 alg = crypto_alg_lookup(alg->cra_name, type, mask) ?:
e7a4142b 229 ERR_PTR(-EAGAIN);
96ad5955 230 } else if (IS_ERR(alg))
2bbb3375 231 ;
73d3864a
HX
232 else if (crypto_is_test_larval(larval) &&
233 !(alg->cra_flags & CRYPTO_ALG_TESTED))
234 alg = ERR_PTR(-EAGAIN);
d6097b8d
NS
235 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
236 alg = ERR_PTR(-EAGAIN);
73d3864a
HX
237 else if (!crypto_mod_get(alg))
238 alg = ERR_PTR(-EAGAIN);
2825982d
HX
239 crypto_mod_put(&larval->alg);
240
96ad5955
HX
241 if (!IS_ERR(alg) && crypto_is_larval(alg))
242 goto again;
243
2825982d
HX
244 return alg;
245}
246
3ca1e994
HX
247static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
248 u32 mask)
2825982d 249{
d6097b8d 250 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
2825982d 251 struct crypto_alg *alg;
eb02c38f
HX
252 u32 test = 0;
253
254 if (!((type | mask) & CRYPTO_ALG_TESTED))
255 test |= CRYPTO_ALG_TESTED;
2825982d 256
2825982d 257 down_read(&crypto_alg_sem);
d6097b8d
NS
258 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
259 (mask | test) & ~fips);
260 if (alg) {
261 if (((type | mask) ^ fips) & fips)
262 mask |= fips;
263 mask &= fips;
264
265 if (!crypto_is_larval(alg) &&
266 ((type ^ alg->cra_flags) & mask)) {
267 /* Algorithm is disallowed in FIPS mode. */
268 crypto_mod_put(alg);
269 alg = ERR_PTR(-ENOENT);
270 }
271 } else if (test) {
b346e492
EB
272 alg = __crypto_alg_lookup(name, type, mask);
273 if (alg && !crypto_is_larval(alg)) {
274 /* Test failed */
275 crypto_mod_put(alg);
276 alg = ERR_PTR(-ELIBBAD);
277 }
278 }
1da177e4 279 up_read(&crypto_alg_sem);
2825982d 280
1da177e4
LT
281 return alg;
282}
283
cadc9ab5
EB
284static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
285 u32 mask)
176c3652 286{
2825982d 287 struct crypto_alg *alg;
2825982d 288
6bfd4809
HX
289 if (!name)
290 return ERR_PTR(-ENOENT);
291
430b441c 292 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
6bfd4809 293 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
492e2b63 294
a760a665 295 alg = crypto_alg_lookup(name, type, mask);
e2861fa7 296 if (!alg && !(mask & CRYPTO_NOLOAD)) {
5d26a105 297 request_module("crypto-%s", name);
a760a665 298
37fc334c 299 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
aa07a699 300 CRYPTO_ALG_NEED_FALLBACK))
5d26a105 301 request_module("crypto-%s-all", name);
a760a665
HX
302
303 alg = crypto_alg_lookup(name, type, mask);
304 }
305
eb02c38f
HX
306 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
307 alg = crypto_larval_wait(alg);
e7a4142b
HX
308 else if (alg)
309 ;
310 else if (!(mask & CRYPTO_ALG_TESTED))
eb02c38f 311 alg = crypto_larval_add(name, type, mask);
e7a4142b
HX
312 else
313 alg = ERR_PTR(-ENOENT);
2825982d 314
eb02c38f 315 return alg;
b9c55aa4 316}
b9c55aa4 317
73d3864a
HX
318int crypto_probing_notify(unsigned long val, void *v)
319{
320 int ok;
321
322 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
323 if (ok == NOTIFY_DONE) {
324 request_module("cryptomgr");
325 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
326 }
327
328 return ok;
329}
330EXPORT_SYMBOL_GPL(crypto_probing_notify);
331
b9c55aa4
HX
332struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
333{
334 struct crypto_alg *alg;
335 struct crypto_alg *larval;
336 int ok;
337
06ca7f68
SM
338 /*
339 * If the internal flag is set for a cipher, require a caller to
bc9d6dac 340 * invoke the cipher with the internal flag to use that cipher.
06ca7f68
SM
341 * Also, if a caller wants to allocate a cipher that may or may
342 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
343 * !(mask & CRYPTO_ALG_INTERNAL).
344 */
345 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
346 mask |= CRYPTO_ALG_INTERNAL;
347
b9c55aa4 348 larval = crypto_larval_lookup(name, type, mask);
6bfd4809 349 if (IS_ERR(larval) || !crypto_is_larval(larval))
2825982d
HX
350 return larval;
351
73d3864a 352 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
2b8c19db
HX
353
354 if (ok == NOTIFY_STOP)
2825982d
HX
355 alg = crypto_larval_wait(larval);
356 else {
357 crypto_mod_put(larval);
6bfd4809 358 alg = ERR_PTR(-ENOENT);
2825982d 359 }
37da5d0f 360 crypto_larval_kill(container_of(larval, struct crypto_larval, alg));
2825982d 361 return alg;
176c3652 362}
492e2b63 363EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
176c3652 364
1da177e4
LT
365static void crypto_exit_ops(struct crypto_tfm *tfm)
366{
e853c3cf
HX
367 const struct crypto_type *type = tfm->__crt_alg->cra_type;
368
9c8ae17b
EB
369 if (type && tfm->exit)
370 tfm->exit(tfm);
1da177e4
LT
371}
372
27d2a330 373static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
fbdae9f3 374{
27d2a330 375 const struct crypto_type *type_obj = alg->cra_type;
fbdae9f3
HX
376 unsigned int len;
377
e853c3cf 378 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
27d2a330
HX
379 if (type_obj)
380 return len + type_obj->ctxsize(alg, type, mask);
e853c3cf 381
fbdae9f3
HX
382 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
383 default:
384 BUG();
385
386 case CRYPTO_ALG_TYPE_CIPHER:
f1ddcaf3 387 len += crypto_cipher_ctxsize(alg);
fbdae9f3 388 break;
6941c3a0 389
fbdae9f3 390 case CRYPTO_ALG_TYPE_COMPRESS:
f1ddcaf3 391 len += crypto_compress_ctxsize(alg);
fbdae9f3
HX
392 break;
393 }
394
e853c3cf 395 return len;
fbdae9f3
HX
396}
397
6603523b 398void crypto_shoot_alg(struct crypto_alg *alg)
6bfd4809
HX
399{
400 down_write(&crypto_alg_sem);
401 alg->cra_flags |= CRYPTO_ALG_DYING;
402 up_write(&crypto_alg_sem);
403}
6603523b 404EXPORT_SYMBOL_GPL(crypto_shoot_alg);
6bfd4809 405
fa3b3565
HX
406struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
407 u32 mask, gfp_t gfp)
1da177e4 408{
17f7b983 409 struct crypto_tfm *tfm;
fbdae9f3 410 unsigned int tfm_size;
6bfd4809 411 int err = -ENOMEM;
fbdae9f3 412
27d2a330 413 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
fa3b3565 414 tfm = kzalloc(tfm_size, gfp);
1da177e4 415 if (tfm == NULL)
9765d262 416 goto out_err;
1da177e4 417
1da177e4 418 tfm->__crt_alg = alg;
ae131f49 419 refcount_set(&tfm->refcnt, 1);
6bfd4809 420
4a779486 421 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
c7fc0599 422 goto cra_init_failed;
1da177e4
LT
423
424 goto out;
425
c7fc0599
HX
426cra_init_failed:
427 crypto_exit_ops(tfm);
4a779486
HX
428 if (err == -EAGAIN)
429 crypto_shoot_alg(alg);
1da177e4 430 kfree(tfm);
9765d262 431out_err:
6bfd4809 432 tfm = ERR_PTR(err);
1da177e4
LT
433out:
434 return tfm;
435}
fa3b3565
HX
436EXPORT_SYMBOL_GPL(__crypto_alloc_tfmgfp);
437
438struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
439 u32 mask)
440{
441 return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL);
442}
6bfd4809
HX
443EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
444
6d7d684d
HX
445/*
446 * crypto_alloc_base - Locate algorithm and allocate transform
447 * @alg_name: Name of algorithm
448 * @type: Type of algorithm
449 * @mask: Mask for type comparison
450 *
7b0bac64 451 * This function should not be used by new algorithm types.
fd1a1900 452 * Please use crypto_alloc_tfm instead.
7b0bac64 453 *
6d7d684d
HX
454 * crypto_alloc_base() will first attempt to locate an already loaded
455 * algorithm. If that fails and the kernel supports dynamically loadable
456 * modules, it will then attempt to load a module of the same name or
457 * alias. If that fails it will send a query to any loaded crypto manager
458 * to construct an algorithm on the fly. A refcount is grabbed on the
459 * algorithm which is then associated with the new transform.
460 *
461 * The returned transform is of a non-determinate type. Most people
462 * should use one of the more specific allocation functions such as
c65058b7 463 * crypto_alloc_skcipher().
6d7d684d
HX
464 *
465 * In case of error the return value is an error pointer.
466 */
467struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
468{
469 struct crypto_tfm *tfm;
470 int err;
471
472 for (;;) {
473 struct crypto_alg *alg;
474
475 alg = crypto_alg_mod_lookup(alg_name, type, mask);
9765d262
AM
476 if (IS_ERR(alg)) {
477 err = PTR_ERR(alg);
6d7d684d 478 goto err;
9765d262 479 }
6d7d684d 480
27d2a330 481 tfm = __crypto_alloc_tfm(alg, type, mask);
6d7d684d 482 if (!IS_ERR(tfm))
9765d262 483 return tfm;
6d7d684d
HX
484
485 crypto_mod_put(alg);
486 err = PTR_ERR(tfm);
487
488err:
489 if (err != -EAGAIN)
490 break;
3fc89adb 491 if (fatal_signal_pending(current)) {
6d7d684d
HX
492 err = -EINTR;
493 break;
494 }
9765d262 495 }
6d7d684d 496
9765d262 497 return ERR_PTR(err);
6d7d684d
HX
498}
499EXPORT_SYMBOL_GPL(crypto_alloc_base);
7b0bac64 500
3c3a24cb
HX
501static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
502 const struct crypto_type *frontend, int node,
503 gfp_t gfp)
7b0bac64 504{
3c3a24cb 505 struct crypto_tfm *tfm;
7b0bac64
HX
506 unsigned int tfmsize;
507 unsigned int total;
3c3a24cb 508 char *mem;
7b0bac64
HX
509
510 tfmsize = frontend->tfmsize;
2ca33da1 511 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
7b0bac64 512
3c3a24cb 513 mem = kzalloc_node(total, gfp, node);
7b0bac64 514 if (mem == NULL)
3c3a24cb 515 return ERR_PTR(-ENOMEM);
7b0bac64
HX
516
517 tfm = (struct crypto_tfm *)(mem + tfmsize);
518 tfm->__crt_alg = alg;
7bc13b5b 519 tfm->node = node;
ae131f49 520 refcount_set(&tfm->refcnt, 1);
7b0bac64 521
3c3a24cb
HX
522 return mem;
523}
524
525void *crypto_create_tfm_node(struct crypto_alg *alg,
526 const struct crypto_type *frontend,
527 int node)
528{
529 struct crypto_tfm *tfm;
530 char *mem;
531 int err;
532
533 mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
534 if (IS_ERR(mem))
535 goto out;
536
537 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
538
2ca33da1 539 err = frontend->init_tfm(tfm);
7b0bac64
HX
540 if (err)
541 goto out_free_tfm;
542
543 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
544 goto cra_init_failed;
545
546 goto out;
547
548cra_init_failed:
549 crypto_exit_ops(tfm);
550out_free_tfm:
551 if (err == -EAGAIN)
552 crypto_shoot_alg(alg);
553 kfree(mem);
3f683d61 554 mem = ERR_PTR(err);
7b0bac64 555out:
3f683d61 556 return mem;
7b0bac64 557}
7bc13b5b 558EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
7b0bac64 559
3c3a24cb
HX
560void *crypto_clone_tfm(const struct crypto_type *frontend,
561 struct crypto_tfm *otfm)
562{
563 struct crypto_alg *alg = otfm->__crt_alg;
564 struct crypto_tfm *tfm;
565 char *mem;
566
567 mem = ERR_PTR(-ESTALE);
568 if (unlikely(!crypto_mod_get(alg)))
569 goto out;
570
571 mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
572 if (IS_ERR(mem)) {
573 crypto_mod_put(alg);
574 goto out;
575 }
576
577 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
578 tfm->crt_flags = otfm->crt_flags;
579 tfm->exit = otfm->exit;
580
581out:
582 return mem;
583}
584EXPORT_SYMBOL_GPL(crypto_clone_tfm);
585
d06854f0
HX
586struct crypto_alg *crypto_find_alg(const char *alg_name,
587 const struct crypto_type *frontend,
588 u32 type, u32 mask)
589{
d06854f0
HX
590 if (frontend) {
591 type &= frontend->maskclear;
592 mask &= frontend->maskclear;
593 type |= frontend->type;
594 mask |= frontend->maskset;
d06854f0
HX
595 }
596
4989d4f0 597 return crypto_alg_mod_lookup(alg_name, type, mask);
d06854f0
HX
598}
599EXPORT_SYMBOL_GPL(crypto_find_alg);
600
7b0bac64 601/*
7bc13b5b 602 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
7b0bac64
HX
603 * @alg_name: Name of algorithm
604 * @frontend: Frontend algorithm type
605 * @type: Type of algorithm
606 * @mask: Mask for type comparison
7bc13b5b
BS
607 * @node: NUMA node in which users desire to put requests, if node is
608 * NUMA_NO_NODE, it means users have no special requirement.
7b0bac64
HX
609 *
610 * crypto_alloc_tfm() will first attempt to locate an already loaded
611 * algorithm. If that fails and the kernel supports dynamically loadable
612 * modules, it will then attempt to load a module of the same name or
613 * alias. If that fails it will send a query to any loaded crypto manager
614 * to construct an algorithm on the fly. A refcount is grabbed on the
615 * algorithm which is then associated with the new transform.
616 *
617 * The returned transform is of a non-determinate type. Most people
618 * should use one of the more specific allocation functions such as
0a940d4e 619 * crypto_alloc_skcipher().
7b0bac64
HX
620 *
621 * In case of error the return value is an error pointer.
622 */
7bc13b5b
BS
623
624void *crypto_alloc_tfm_node(const char *alg_name,
625 const struct crypto_type *frontend, u32 type, u32 mask,
626 int node)
7b0bac64 627{
3f683d61 628 void *tfm;
7b0bac64
HX
629 int err;
630
7b0bac64
HX
631 for (;;) {
632 struct crypto_alg *alg;
633
d06854f0 634 alg = crypto_find_alg(alg_name, frontend, type, mask);
7b0bac64
HX
635 if (IS_ERR(alg)) {
636 err = PTR_ERR(alg);
637 goto err;
638 }
639
7bc13b5b 640 tfm = crypto_create_tfm_node(alg, frontend, node);
7b0bac64
HX
641 if (!IS_ERR(tfm))
642 return tfm;
643
644 crypto_mod_put(alg);
645 err = PTR_ERR(tfm);
646
647err:
648 if (err != -EAGAIN)
649 break;
3fc89adb 650 if (fatal_signal_pending(current)) {
7b0bac64
HX
651 err = -EINTR;
652 break;
653 }
654 }
655
656 return ERR_PTR(err);
657}
7bc13b5b 658EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
7b2cd92a 659
6d7d684d 660/*
7b2cd92a
HX
661 * crypto_destroy_tfm - Free crypto transform
662 * @mem: Start of tfm slab
6d7d684d
HX
663 * @tfm: Transform to free
664 *
7b2cd92a 665 * This function frees up the transform and any associated resources,
6d7d684d
HX
666 * then drops the refcount on the associated algorithm.
667 */
7b2cd92a 668void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
1da177e4 669{
a61cc448 670 struct crypto_alg *alg;
a61cc448 671
83681f2b 672 if (IS_ERR_OR_NULL(mem))
a61cc448
JJ
673 return;
674
ae131f49
HX
675 if (!refcount_dec_and_test(&tfm->refcnt))
676 return;
a61cc448 677 alg = tfm->__crt_alg;
1da177e4 678
4a779486 679 if (!tfm->exit && alg->cra_exit)
c7fc0599 680 alg->cra_exit(tfm);
1da177e4 681 crypto_exit_ops(tfm);
72fa4919 682 crypto_mod_put(alg);
453431a5 683 kfree_sensitive(mem);
1da177e4 684}
7b2cd92a 685EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
fce32d70
HX
686
687int crypto_has_alg(const char *name, u32 type, u32 mask)
688{
689 int ret = 0;
690 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
3d01a33b 691
fce32d70
HX
692 if (!IS_ERR(alg)) {
693 crypto_mod_put(alg);
694 ret = 1;
695 }
3d01a33b 696
fce32d70
HX
697 return ret;
698}
699EXPORT_SYMBOL_GPL(crypto_has_alg);
c3715cb9 700
255e48eb 701void crypto_req_done(void *data, int err)
ada69a16 702{
255e48eb 703 struct crypto_wait *wait = data;
ada69a16
GBY
704
705 if (err == -EINPROGRESS)
706 return;
707
708 wait->err = err;
709 complete(&wait->completion);
710}
711EXPORT_SYMBOL_GPL(crypto_req_done);
712
c3715cb9
SS
713MODULE_DESCRIPTION("Cryptographic core API");
714MODULE_LICENSE("GPL");