1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Cryptographic API for algorithms (i.e., low-level API).
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/algapi.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 #include <linux/workqueue.h>
23 static LIST_HEAD(crypto_template_list);
25 static inline void crypto_check_module_sig(struct module *mod)
27 if (fips_enabled && mod && !module_sig_ok(mod))
28 panic("Module %s signature verification failed in FIPS mode\n",
32 static int crypto_check_alg(struct crypto_alg *alg)
34 crypto_check_module_sig(alg->cra_module);
36 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
39 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
42 /* General maximums for all algs. */
43 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
46 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
49 /* Lower maximums for specific alg types. */
50 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
51 CRYPTO_ALG_TYPE_CIPHER) {
52 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
55 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
59 if (alg->cra_priority < 0)
62 refcount_set(&alg->cra_refcnt, 1);
67 static void crypto_free_instance(struct crypto_instance *inst)
69 inst->alg.cra_type->free(inst);
72 static void crypto_destroy_instance_workfn(struct work_struct *w)
74 struct crypto_template *tmpl = container_of(w, struct crypto_template,
76 struct crypto_instance *inst;
80 down_write(&crypto_alg_sem);
81 hlist_for_each_entry_safe(inst, n, &tmpl->dead, list) {
82 if (refcount_read(&inst->alg.cra_refcnt) != -1)
84 hlist_del(&inst->list);
85 hlist_add_head(&inst->list, &list);
87 up_write(&crypto_alg_sem);
89 hlist_for_each_entry_safe(inst, n, &list, list)
90 crypto_free_instance(inst);
93 static void crypto_destroy_instance(struct crypto_alg *alg)
95 struct crypto_instance *inst = container_of(alg,
96 struct crypto_instance,
98 struct crypto_template *tmpl = inst->tmpl;
100 refcount_set(&alg->cra_refcnt, -1);
101 schedule_work(&tmpl->free_work);
105 * This function adds a spawn to the list secondary_spawns which
106 * will be used at the end of crypto_remove_spawns to unregister
107 * instances, unless the spawn happens to be one that is depended
108 * on by the new algorithm (nalg in crypto_remove_spawns).
110 * This function is also responsible for resurrecting any algorithms
111 * in the dependency chain of nalg by unsetting n->dead.
113 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
114 struct list_head *stack,
115 struct list_head *top,
116 struct list_head *secondary_spawns)
118 struct crypto_spawn *spawn, *n;
120 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
124 n = list_prev_entry(spawn, list);
125 list_move(&spawn->list, secondary_spawns);
127 if (list_is_last(&n->list, stack))
130 n = list_next_entry(n, list);
134 return &n->inst->alg.cra_users;
137 static void crypto_remove_instance(struct crypto_instance *inst,
138 struct list_head *list)
140 struct crypto_template *tmpl = inst->tmpl;
142 if (crypto_is_dead(&inst->alg))
145 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
150 list_del_init(&inst->alg.cra_list);
151 hlist_del(&inst->list);
152 hlist_add_head(&inst->list, &tmpl->dead);
154 BUG_ON(!list_empty(&inst->alg.cra_users));
156 crypto_alg_put(&inst->alg);
160 * Given an algorithm alg, remove all algorithms that depend on it
161 * through spawns. If nalg is not null, then exempt any algorithms
162 * that is depended on by nalg. This is useful when nalg itself
165 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
166 struct crypto_alg *nalg)
168 u32 new_type = (nalg ?: alg)->cra_flags;
169 struct crypto_spawn *spawn, *n;
170 LIST_HEAD(secondary_spawns);
171 struct list_head *spawns;
175 spawns = &alg->cra_users;
176 list_for_each_entry_safe(spawn, n, spawns, list) {
177 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
180 list_move(&spawn->list, &top);
184 * Perform a depth-first walk starting from alg through
185 * the cra_users tree. The list stack records the path
186 * from alg to the current spawn.
190 while (!list_empty(spawns)) {
191 struct crypto_instance *inst;
193 spawn = list_first_entry(spawns, struct crypto_spawn,
197 list_move(&spawn->list, &stack);
198 spawn->dead = !spawn->registered || &inst->alg != nalg;
200 if (!spawn->registered)
203 BUG_ON(&inst->alg == alg);
205 if (&inst->alg == nalg)
208 spawns = &inst->alg.cra_users;
211 * Even if spawn->registered is true, the
212 * instance itself may still be unregistered.
213 * This is because it may have failed during
214 * registration. Therefore we still need to
215 * make the following test.
217 * We may encounter an unregistered instance here, since
218 * an instance's spawns are set up prior to the instance
219 * being registered. An unregistered instance will have
220 * NULL ->cra_users.next, since ->cra_users isn't
221 * properly initialized until registration. But an
222 * unregistered instance cannot have any users, so treat
223 * it the same as ->cra_users being empty.
225 if (spawns->next == NULL)
228 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
229 &secondary_spawns)));
232 * Remove all instances that are marked as dead. Also
233 * complete the resurrection of the others by moving them
234 * back to the cra_users list.
236 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
238 list_move(&spawn->list, &spawn->alg->cra_users);
239 else if (spawn->registered)
240 crypto_remove_instance(spawn->inst, list);
243 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
245 static void crypto_alg_finish_registration(struct crypto_alg *alg,
246 struct list_head *algs_to_put)
248 struct crypto_alg *q;
250 list_for_each_entry(q, &crypto_alg_list, cra_list) {
254 if (crypto_is_moribund(q))
257 if (crypto_is_larval(q))
260 if (strcmp(alg->cra_name, q->cra_name))
263 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
264 q->cra_priority > alg->cra_priority)
267 crypto_remove_spawns(q, algs_to_put, alg);
270 crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
273 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
275 struct crypto_larval *larval;
277 if (!IS_ENABLED(CONFIG_CRYPTO_SELFTESTS) ||
278 (alg->cra_flags & CRYPTO_ALG_INTERNAL))
279 return NULL; /* No self-test needed */
281 larval = crypto_larval_alloc(alg->cra_name,
282 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
286 larval->adult = crypto_mod_get(alg);
287 if (!larval->adult) {
289 return ERR_PTR(-ENOENT);
292 refcount_set(&larval->alg.cra_refcnt, 1);
293 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
294 CRYPTO_MAX_ALG_NAME);
295 larval->alg.cra_priority = alg->cra_priority;
300 static struct crypto_larval *
301 __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
303 struct crypto_alg *q;
304 struct crypto_larval *larval;
307 if (crypto_is_dead(alg))
310 INIT_LIST_HEAD(&alg->cra_users);
314 list_for_each_entry(q, &crypto_alg_list, cra_list) {
318 if (crypto_is_moribund(q))
321 if (crypto_is_larval(q)) {
322 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
327 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
328 !strcmp(q->cra_driver_name, alg->cra_driver_name) ||
329 !strcmp(q->cra_name, alg->cra_driver_name))
333 larval = crypto_alloc_test_larval(alg);
337 list_add(&alg->cra_list, &crypto_alg_list);
341 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
343 list_add(&larval->alg.cra_list, &crypto_alg_list);
345 alg->cra_flags |= CRYPTO_ALG_TESTED;
346 crypto_alg_finish_registration(alg, algs_to_put);
353 larval = ERR_PTR(ret);
357 void crypto_alg_tested(const char *name, int err)
359 struct crypto_larval *test;
360 struct crypto_alg *alg;
361 struct crypto_alg *q;
364 down_write(&crypto_alg_sem);
365 list_for_each_entry(q, &crypto_alg_list, cra_list) {
366 if (crypto_is_moribund(q) || !crypto_is_larval(q))
369 test = (struct crypto_larval *)q;
371 if (!strcmp(q->cra_driver_name, name))
375 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
376 up_write(&crypto_alg_sem);
380 q->cra_flags |= CRYPTO_ALG_DEAD;
383 if (crypto_is_dead(alg))
386 if (err == -ECANCELED)
387 alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
391 alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
393 alg->cra_flags |= CRYPTO_ALG_TESTED;
395 crypto_alg_finish_registration(alg, &list);
398 list_del_init(&test->alg.cra_list);
399 complete_all(&test->completion);
401 up_write(&crypto_alg_sem);
403 crypto_alg_put(&test->alg);
404 crypto_remove_final(&list);
406 EXPORT_SYMBOL_GPL(crypto_alg_tested);
408 void crypto_remove_final(struct list_head *list)
410 struct crypto_alg *alg;
411 struct crypto_alg *n;
413 list_for_each_entry_safe(alg, n, list, cra_list) {
414 list_del_init(&alg->cra_list);
418 EXPORT_SYMBOL_GPL(crypto_remove_final);
420 static void crypto_free_alg(struct crypto_alg *alg)
422 unsigned int algsize = alg->cra_type->algsize;
423 u8 *p = (u8 *)alg - algsize;
425 crypto_destroy_alg(alg);
429 int crypto_register_alg(struct crypto_alg *alg)
431 struct crypto_larval *larval;
432 bool test_started = false;
433 LIST_HEAD(algs_to_put);
436 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
437 err = crypto_check_alg(alg);
441 if (alg->cra_flags & CRYPTO_ALG_DUP_FIRST &&
442 !WARN_ON_ONCE(alg->cra_destroy)) {
443 unsigned int algsize = alg->cra_type->algsize;
444 u8 *p = (u8 *)alg - algsize;
446 p = kmemdup(p, algsize + sizeof(*alg), GFP_KERNEL);
450 alg = (void *)(p + algsize);
451 alg->cra_destroy = crypto_free_alg;
454 down_write(&crypto_alg_sem);
455 larval = __crypto_register_alg(alg, &algs_to_put);
456 if (!IS_ERR_OR_NULL(larval)) {
457 test_started = crypto_boot_test_finished();
458 larval->test_started = test_started;
460 up_write(&crypto_alg_sem);
462 if (IS_ERR(larval)) {
464 return PTR_ERR(larval);
468 crypto_schedule_test(larval);
470 crypto_remove_final(&algs_to_put);
474 EXPORT_SYMBOL_GPL(crypto_register_alg);
476 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
478 if (unlikely(list_empty(&alg->cra_list)))
481 alg->cra_flags |= CRYPTO_ALG_DEAD;
483 list_del_init(&alg->cra_list);
484 crypto_remove_spawns(alg, list, NULL);
489 void crypto_unregister_alg(struct crypto_alg *alg)
494 down_write(&crypto_alg_sem);
495 ret = crypto_remove_alg(alg, &list);
496 up_write(&crypto_alg_sem);
498 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
501 WARN_ON(!alg->cra_destroy && refcount_read(&alg->cra_refcnt) != 1);
503 list_add(&alg->cra_list, &list);
504 crypto_remove_final(&list);
506 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
508 int crypto_register_algs(struct crypto_alg *algs, int count)
512 for (i = 0; i < count; i++) {
513 ret = crypto_register_alg(&algs[i]);
521 for (--i; i >= 0; --i)
522 crypto_unregister_alg(&algs[i]);
526 EXPORT_SYMBOL_GPL(crypto_register_algs);
528 void crypto_unregister_algs(struct crypto_alg *algs, int count)
532 for (i = 0; i < count; i++)
533 crypto_unregister_alg(&algs[i]);
535 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
537 int crypto_register_template(struct crypto_template *tmpl)
539 struct crypto_template *q;
542 INIT_WORK(&tmpl->free_work, crypto_destroy_instance_workfn);
544 down_write(&crypto_alg_sem);
546 crypto_check_module_sig(tmpl->module);
548 list_for_each_entry(q, &crypto_template_list, list) {
553 list_add(&tmpl->list, &crypto_template_list);
556 up_write(&crypto_alg_sem);
559 EXPORT_SYMBOL_GPL(crypto_register_template);
561 int crypto_register_templates(struct crypto_template *tmpls, int count)
565 for (i = 0; i < count; i++) {
566 err = crypto_register_template(&tmpls[i]);
573 for (--i; i >= 0; --i)
574 crypto_unregister_template(&tmpls[i]);
577 EXPORT_SYMBOL_GPL(crypto_register_templates);
579 void crypto_unregister_template(struct crypto_template *tmpl)
581 struct crypto_instance *inst;
582 struct hlist_node *n;
583 struct hlist_head *list;
586 down_write(&crypto_alg_sem);
588 BUG_ON(list_empty(&tmpl->list));
589 list_del_init(&tmpl->list);
591 list = &tmpl->instances;
592 hlist_for_each_entry(inst, list, list) {
593 int err = crypto_remove_alg(&inst->alg, &users);
598 up_write(&crypto_alg_sem);
600 hlist_for_each_entry_safe(inst, n, list, list) {
601 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
602 crypto_free_instance(inst);
604 crypto_remove_final(&users);
606 flush_work(&tmpl->free_work);
608 EXPORT_SYMBOL_GPL(crypto_unregister_template);
610 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
614 for (i = count - 1; i >= 0; --i)
615 crypto_unregister_template(&tmpls[i]);
617 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
619 static struct crypto_template *__crypto_lookup_template(const char *name)
621 struct crypto_template *q, *tmpl = NULL;
623 down_read(&crypto_alg_sem);
624 list_for_each_entry(q, &crypto_template_list, list) {
625 if (strcmp(q->name, name))
627 if (unlikely(!crypto_tmpl_get(q)))
633 up_read(&crypto_alg_sem);
638 struct crypto_template *crypto_lookup_template(const char *name)
640 return try_then_request_module(__crypto_lookup_template(name),
643 EXPORT_SYMBOL_GPL(crypto_lookup_template);
645 int crypto_register_instance(struct crypto_template *tmpl,
646 struct crypto_instance *inst)
648 struct crypto_larval *larval;
649 struct crypto_spawn *spawn;
650 u32 fips_internal = 0;
651 LIST_HEAD(algs_to_put);
654 err = crypto_check_alg(&inst->alg);
658 inst->alg.cra_module = tmpl->module;
659 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
660 inst->alg.cra_destroy = crypto_destroy_instance;
662 down_write(&crypto_alg_sem);
664 larval = ERR_PTR(-EAGAIN);
665 for (spawn = inst->spawns; spawn;) {
666 struct crypto_spawn *next;
673 spawn->registered = true;
675 fips_internal |= spawn->alg->cra_flags;
677 crypto_mod_put(spawn->alg);
682 inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
684 larval = __crypto_register_alg(&inst->alg, &algs_to_put);
688 larval->test_started = true;
690 hlist_add_head(&inst->list, &tmpl->instances);
694 up_write(&crypto_alg_sem);
697 return PTR_ERR(larval);
700 crypto_schedule_test(larval);
702 crypto_remove_final(&algs_to_put);
706 EXPORT_SYMBOL_GPL(crypto_register_instance);
708 void crypto_unregister_instance(struct crypto_instance *inst)
712 down_write(&crypto_alg_sem);
714 crypto_remove_spawns(&inst->alg, &list, NULL);
715 crypto_remove_instance(inst, &list);
717 up_write(&crypto_alg_sem);
719 crypto_remove_final(&list);
721 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
723 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
724 const char *name, u32 type, u32 mask)
726 struct crypto_alg *alg;
729 if (WARN_ON_ONCE(inst == NULL))
732 /* Allow the result of crypto_attr_alg_name() to be passed directly */
734 return PTR_ERR(name);
736 alg = crypto_find_alg(name, spawn->frontend,
737 type | CRYPTO_ALG_FIPS_INTERNAL, mask);
741 down_write(&crypto_alg_sem);
742 if (!crypto_is_moribund(alg)) {
743 list_add(&spawn->list, &alg->cra_users);
746 spawn->next = inst->spawns;
747 inst->spawns = spawn;
748 inst->alg.cra_flags |=
749 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
752 up_write(&crypto_alg_sem);
757 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
759 void crypto_drop_spawn(struct crypto_spawn *spawn)
761 if (!spawn->alg) /* not yet initialized? */
764 down_write(&crypto_alg_sem);
766 list_del(&spawn->list);
767 up_write(&crypto_alg_sem);
769 if (!spawn->registered)
770 crypto_mod_put(spawn->alg);
772 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
774 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
776 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
777 struct crypto_alg *target;
780 down_read(&crypto_alg_sem);
783 if (!crypto_mod_get(alg)) {
784 target = crypto_alg_get(alg);
786 alg = ERR_PTR(-EAGAIN);
789 up_read(&crypto_alg_sem);
792 crypto_shoot_alg(target);
793 crypto_alg_put(target);
799 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
802 struct crypto_alg *alg;
803 struct crypto_tfm *tfm;
805 alg = crypto_spawn_alg(spawn);
807 return ERR_CAST(alg);
809 tfm = ERR_PTR(-EINVAL);
810 if (unlikely((alg->cra_flags ^ type) & mask))
813 tfm = __crypto_alloc_tfm(alg, type, mask);
823 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
825 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
827 struct crypto_alg *alg;
828 struct crypto_tfm *tfm;
830 alg = crypto_spawn_alg(spawn);
832 return ERR_CAST(alg);
834 tfm = crypto_create_tfm(alg, spawn->frontend);
844 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
846 int crypto_register_notifier(struct notifier_block *nb)
848 return blocking_notifier_chain_register(&crypto_chain, nb);
850 EXPORT_SYMBOL_GPL(crypto_register_notifier);
852 int crypto_unregister_notifier(struct notifier_block *nb)
854 return blocking_notifier_chain_unregister(&crypto_chain, nb);
856 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
858 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
860 struct rtattr *rta = tb[0];
861 struct crypto_attr_type *algt;
864 return ERR_PTR(-ENOENT);
865 if (RTA_PAYLOAD(rta) < sizeof(*algt))
866 return ERR_PTR(-EINVAL);
867 if (rta->rta_type != CRYPTOA_TYPE)
868 return ERR_PTR(-EINVAL);
870 algt = RTA_DATA(rta);
874 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
877 * crypto_check_attr_type() - check algorithm type and compute inherited mask
878 * @tb: the template parameters
879 * @type: the algorithm type the template would be instantiated as
880 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
881 * to restrict the flags of any inner algorithms
883 * Validate that the algorithm type the user requested is compatible with the
884 * one the template would actually be instantiated as. E.g., if the user is
885 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
886 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
888 * Also compute the mask to use to restrict the flags of any inner algorithms.
890 * Return: 0 on success; -errno on failure
892 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
894 struct crypto_attr_type *algt;
896 algt = crypto_get_attr_type(tb);
898 return PTR_ERR(algt);
900 if ((algt->type ^ type) & algt->mask)
903 *mask_ret = crypto_algt_inherited_mask(algt);
906 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
908 const char *crypto_attr_alg_name(struct rtattr *rta)
910 struct crypto_attr_alg *alga;
913 return ERR_PTR(-ENOENT);
914 if (RTA_PAYLOAD(rta) < sizeof(*alga))
915 return ERR_PTR(-EINVAL);
916 if (rta->rta_type != CRYPTOA_ALG)
917 return ERR_PTR(-EINVAL);
919 alga = RTA_DATA(rta);
920 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
924 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
926 int __crypto_inst_setname(struct crypto_instance *inst, const char *name,
927 const char *driver, struct crypto_alg *alg)
929 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
930 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
931 return -ENAMETOOLONG;
933 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
934 driver, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
935 return -ENAMETOOLONG;
939 EXPORT_SYMBOL_GPL(__crypto_inst_setname);
941 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
943 INIT_LIST_HEAD(&queue->list);
944 queue->backlog = &queue->list;
946 queue->max_qlen = max_qlen;
948 EXPORT_SYMBOL_GPL(crypto_init_queue);
950 int crypto_enqueue_request(struct crypto_queue *queue,
951 struct crypto_async_request *request)
953 int err = -EINPROGRESS;
955 if (unlikely(queue->qlen >= queue->max_qlen)) {
956 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
961 if (queue->backlog == &queue->list)
962 queue->backlog = &request->list;
966 list_add_tail(&request->list, &queue->list);
971 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
973 void crypto_enqueue_request_head(struct crypto_queue *queue,
974 struct crypto_async_request *request)
976 if (unlikely(queue->qlen >= queue->max_qlen))
977 queue->backlog = queue->backlog->prev;
980 list_add(&request->list, &queue->list);
982 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
984 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
986 struct list_head *request;
988 if (unlikely(!queue->qlen))
993 if (queue->backlog != &queue->list)
994 queue->backlog = queue->backlog->next;
996 request = queue->list.next;
997 list_del_init(request);
999 return list_entry(request, struct crypto_async_request, list);
1001 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
1003 static inline void crypto_inc_byte(u8 *a, unsigned int size)
1008 for (; size; size--) {
1016 void crypto_inc(u8 *a, unsigned int size)
1018 __be32 *b = (__be32 *)(a + size);
1021 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1022 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1023 for (; size >= 4; size -= 4) {
1024 c = be32_to_cpu(*--b) + 1;
1025 *b = cpu_to_be32(c);
1030 crypto_inc_byte(a, size);
1032 EXPORT_SYMBOL_GPL(crypto_inc);
1034 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1036 return alg->cra_ctxsize +
1037 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1039 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1041 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1045 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1048 crypto_mod_put(alg);
1054 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1056 static void __init crypto_start_tests(void)
1058 if (!IS_BUILTIN(CONFIG_CRYPTO_ALGAPI))
1061 if (!IS_ENABLED(CONFIG_CRYPTO_SELFTESTS))
1064 set_crypto_boot_test_finished();
1067 struct crypto_larval *larval = NULL;
1068 struct crypto_alg *q;
1070 down_write(&crypto_alg_sem);
1072 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1073 struct crypto_larval *l;
1075 if (!crypto_is_larval(q))
1080 if (!crypto_is_test_larval(l))
1083 if (l->test_started)
1086 l->test_started = true;
1091 up_write(&crypto_alg_sem);
1096 crypto_schedule_test(larval);
1100 static int __init crypto_algapi_init(void)
1103 crypto_start_tests();
1107 static void __exit crypto_algapi_exit(void)
1113 * We run this at late_initcall so that all the built-in algorithms
1114 * have had a chance to register themselves first.
1116 late_initcall(crypto_algapi_init);
1117 module_exit(crypto_algapi_exit);
1119 MODULE_LICENSE("GPL");
1120 MODULE_DESCRIPTION("Cryptographic algorithms API");
1121 MODULE_SOFTDEP("pre: cryptomgr");