crypto: sun4i-ss - make unexported sun4i_ss_pm_ops static
[linux-2.6-block.git] / crypto / algapi.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
cce9e06d
HX
2/*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
cce9e06d
HX
6 */
7
13c935bb 8#include <crypto/algapi.h>
6bfd4809 9#include <linux/err.h>
cce9e06d 10#include <linux/errno.h>
3133d76f 11#include <linux/fips.h>
cce9e06d
HX
12#include <linux/init.h>
13#include <linux/kernel.h>
4cc7720c 14#include <linux/list.h>
cce9e06d 15#include <linux/module.h>
7fed0bf2 16#include <linux/rtnetlink.h>
5a0e3ad6 17#include <linux/slab.h>
cce9e06d
HX
18#include <linux/string.h>
19
20#include "internal.h"
21
4cc7720c
HX
22static LIST_HEAD(crypto_template_list);
23
002c77a4
JW
24static inline void crypto_check_module_sig(struct module *mod)
25{
59afdc7b 26 if (fips_enabled && mod && !module_sig_ok(mod))
002c77a4 27 panic("Module %s signature verification failed in FIPS mode\n",
bd4a7c69 28 module_name(mod));
002c77a4
JW
29}
30
4cc7720c 31static int crypto_check_alg(struct crypto_alg *alg)
cce9e06d 32{
002c77a4
JW
33 crypto_check_module_sig(alg->cra_module);
34
177f87d0
EB
35 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 return -EINVAL;
37
cce9e06d
HX
38 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 return -EINVAL;
40
a9f7f88a
KC
41 /* General maximums for all algs. */
42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
cce9e06d
HX
43 return -EINVAL;
44
a9f7f88a
KC
45 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 return -EINVAL;
47
48 /* Lower maximums for specific alg types. */
13c935bb
SM
49 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 CRYPTO_ALG_TYPE_CIPHER) {
51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 return -EINVAL;
53
54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 return -EINVAL;
56 }
57
cce9e06d
HX
58 if (alg->cra_priority < 0)
59 return -EINVAL;
cce9e06d 60
ce8614a3 61 refcount_set(&alg->cra_refcnt, 1);
e9b8e5be 62
177f87d0 63 return 0;
4cc7720c
HX
64}
65
319382a6
HX
66static void crypto_free_instance(struct crypto_instance *inst)
67{
68 if (!inst->alg.cra_type->free) {
69 inst->tmpl->free(inst);
70 return;
71 }
72
73 inst->alg.cra_type->free(inst);
74}
75
6bfd4809
HX
76static void crypto_destroy_instance(struct crypto_alg *alg)
77{
78 struct crypto_instance *inst = (void *)alg;
79 struct crypto_template *tmpl = inst->tmpl;
80
319382a6 81 crypto_free_instance(inst);
6bfd4809
HX
82 crypto_tmpl_put(tmpl);
83}
84
02244ba4
HX
85/*
86 * This function adds a spawn to the list secondary_spawns which
87 * will be used at the end of crypto_remove_spawns to unregister
88 * instances, unless the spawn happens to be one that is depended
89 * on by the new algorithm (nalg in crypto_remove_spawns).
90 *
91 * This function is also responsible for resurrecting any algorithms
92 * in the dependency chain of nalg by unsetting n->dead.
93 */
2bf29016
HX
94static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
95 struct list_head *stack,
96 struct list_head *top,
97 struct list_head *secondary_spawns)
98{
99 struct crypto_spawn *spawn, *n;
100
304e4818
GT
101 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
102 if (!spawn)
2bf29016
HX
103 return NULL;
104
4f87ee11
HX
105 n = list_prev_entry(spawn, list);
106 list_move(&spawn->list, secondary_spawns);
2bf29016 107
4f87ee11
HX
108 if (list_is_last(&n->list, stack))
109 return top;
2bf29016 110
4f87ee11
HX
111 n = list_next_entry(n, list);
112 if (!spawn->dead)
113 n->dead = false;
2bf29016 114
4f87ee11 115 return &n->inst->alg.cra_users;
2bf29016
HX
116}
117
1f723710
HX
118static void crypto_remove_instance(struct crypto_instance *inst,
119 struct list_head *list)
6bfd4809 120{
a73e6996 121 struct crypto_template *tmpl = inst->tmpl;
6bfd4809 122
a73e6996
HX
123 if (crypto_is_dead(&inst->alg))
124 return;
6bfd4809 125
a73e6996 126 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
38cb2419
HX
127 if (hlist_unhashed(&inst->list))
128 return;
129
a73e6996
HX
130 if (!tmpl || !crypto_tmpl_get(tmpl))
131 return;
132
a73e6996
HX
133 list_move(&inst->alg.cra_list, list);
134 hlist_del(&inst->list);
135 inst->alg.cra_destroy = crypto_destroy_instance;
136
2bf29016 137 BUG_ON(!list_empty(&inst->alg.cra_users));
a73e6996
HX
138}
139
02244ba4
HX
140/*
141 * Given an algorithm alg, remove all algorithms that depend on it
142 * through spawns. If nalg is not null, then exempt any algorithms
143 * that is depended on by nalg. This is useful when nalg itself
144 * depends on alg.
145 */
89b596ba
SK
146void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
147 struct crypto_alg *nalg)
a73e6996 148{
2bf29016 149 u32 new_type = (nalg ?: alg)->cra_flags;
a73e6996
HX
150 struct crypto_spawn *spawn, *n;
151 LIST_HEAD(secondary_spawns);
2bf29016
HX
152 struct list_head *spawns;
153 LIST_HEAD(stack);
154 LIST_HEAD(top);
6bfd4809 155
2bf29016 156 spawns = &alg->cra_users;
a73e6996
HX
157 list_for_each_entry_safe(spawn, n, spawns, list) {
158 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
6bfd4809
HX
159 continue;
160
2bf29016 161 list_move(&spawn->list, &top);
a73e6996 162 }
6bfd4809 163
02244ba4
HX
164 /*
165 * Perform a depth-first walk starting from alg through
166 * the cra_users tree. The list stack records the path
167 * from alg to the current spawn.
168 */
2bf29016
HX
169 spawns = &top;
170 do {
171 while (!list_empty(spawns)) {
172 struct crypto_instance *inst;
173
174 spawn = list_first_entry(spawns, struct crypto_spawn,
175 list);
176 inst = spawn->inst;
177
178 BUG_ON(&inst->alg == alg);
179
180 list_move(&spawn->list, &stack);
181
182 if (&inst->alg == nalg)
183 break;
184
4f87ee11 185 spawn->dead = true;
2bf29016 186 spawns = &inst->alg.cra_users;
9a006742
EB
187
188 /*
189 * We may encounter an unregistered instance here, since
190 * an instance's spawns are set up prior to the instance
191 * being registered. An unregistered instance will have
192 * NULL ->cra_users.next, since ->cra_users isn't
193 * properly initialized until registration. But an
194 * unregistered instance cannot have any users, so treat
195 * it the same as ->cra_users being empty.
196 */
197 if (spawns->next == NULL)
198 break;
2bf29016
HX
199 }
200 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
201 &secondary_spawns)));
202
02244ba4
HX
203 /*
204 * Remove all instances that are marked as dead. Also
205 * complete the resurrection of the others by moving them
206 * back to the cra_users list.
207 */
2bf29016 208 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
4f87ee11 209 if (!spawn->dead)
2bf29016
HX
210 list_move(&spawn->list, &spawn->alg->cra_users);
211 else
1f723710 212 crypto_remove_instance(spawn->inst, list);
6bfd4809
HX
213 }
214}
89b596ba 215EXPORT_SYMBOL_GPL(crypto_remove_spawns);
6bfd4809 216
73d3864a 217static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
4cc7720c
HX
218{
219 struct crypto_alg *q;
73d3864a 220 struct crypto_larval *larval;
6bfd4809
HX
221 int ret = -EAGAIN;
222
223 if (crypto_is_dead(alg))
73d3864a 224 goto err;
6bfd4809
HX
225
226 INIT_LIST_HEAD(&alg->cra_users);
227
73d3864a
HX
228 /* No cheating! */
229 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
230
6bfd4809 231 ret = -EEXIST;
4cc7720c 232
cce9e06d 233 list_for_each_entry(q, &crypto_alg_list, cra_list) {
4cc7720c 234 if (q == alg)
73d3864a
HX
235 goto err;
236
b8e15992
HX
237 if (crypto_is_moribund(q))
238 continue;
239
73d3864a
HX
240 if (crypto_is_larval(q)) {
241 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
242 goto err;
243 continue;
244 }
245
246 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
247 !strcmp(q->cra_name, alg->cra_driver_name))
248 goto err;
249 }
250
251 larval = crypto_larval_alloc(alg->cra_name,
252 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
253 if (IS_ERR(larval))
254 goto out;
255
256 ret = -ENOENT;
257 larval->adult = crypto_mod_get(alg);
258 if (!larval->adult)
259 goto free_larval;
260
ce8614a3 261 refcount_set(&larval->alg.cra_refcnt, 1);
73d3864a
HX
262 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
263 CRYPTO_MAX_ALG_NAME);
264 larval->alg.cra_priority = alg->cra_priority;
265
266 list_add(&alg->cra_list, &crypto_alg_list);
267 list_add(&larval->alg.cra_list, &crypto_alg_list);
268
1f6669b9 269 crypto_stats_init(alg);
cac5818c 270
5357c6c4 271out:
73d3864a
HX
272 return larval;
273
274free_larval:
275 kfree(larval);
276err:
277 larval = ERR_PTR(ret);
278 goto out;
279}
280
281void crypto_alg_tested(const char *name, int err)
282{
283 struct crypto_larval *test;
284 struct crypto_alg *alg;
285 struct crypto_alg *q;
286 LIST_HEAD(list);
2bbb3375 287 bool best;
73d3864a
HX
288
289 down_write(&crypto_alg_sem);
290 list_for_each_entry(q, &crypto_alg_list, cra_list) {
b8e15992 291 if (crypto_is_moribund(q) || !crypto_is_larval(q))
73d3864a
HX
292 continue;
293
294 test = (struct crypto_larval *)q;
295
296 if (!strcmp(q->cra_driver_name, name))
297 goto found;
298 }
299
c7235857 300 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
73d3864a
HX
301 goto unlock;
302
303found:
b8e15992 304 q->cra_flags |= CRYPTO_ALG_DEAD;
73d3864a
HX
305 alg = test->adult;
306 if (err || list_empty(&alg->cra_list))
307 goto complete;
308
309 alg->cra_flags |= CRYPTO_ALG_TESTED;
310
2bbb3375
HX
311 /* Only satisfy larval waiters if we are the best. */
312 best = true;
313 list_for_each_entry(q, &crypto_alg_list, cra_list) {
314 if (crypto_is_moribund(q) || !crypto_is_larval(q))
315 continue;
316
317 if (strcmp(alg->cra_name, q->cra_name))
318 continue;
319
320 if (q->cra_priority > alg->cra_priority) {
321 best = false;
322 break;
323 }
324 }
325
73d3864a
HX
326 list_for_each_entry(q, &crypto_alg_list, cra_list) {
327 if (q == alg)
328 continue;
6bfd4809
HX
329
330 if (crypto_is_moribund(q))
331 continue;
332
333 if (crypto_is_larval(q)) {
2825982d
HX
334 struct crypto_larval *larval = (void *)q;
335
d8058480
HX
336 /*
337 * Check to see if either our generic name or
338 * specific name can satisfy the name requested
339 * by the larval entry q.
340 */
6bfd4809
HX
341 if (strcmp(alg->cra_name, q->cra_name) &&
342 strcmp(alg->cra_driver_name, q->cra_name))
343 continue;
344
345 if (larval->adult)
346 continue;
492e2b63
HX
347 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
348 continue;
6bfd4809 349
2bbb3375
HX
350 if (best && crypto_mod_get(alg))
351 larval->adult = alg;
352 else
353 larval->adult = ERR_PTR(-EAGAIN);
354
6bfd4809 355 continue;
2825982d 356 }
6bfd4809
HX
357
358 if (strcmp(alg->cra_name, q->cra_name))
359 continue;
360
361 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
362 q->cra_priority > alg->cra_priority)
363 continue;
364
2bf29016 365 crypto_remove_spawns(q, &list, alg);
cce9e06d 366 }
2825982d 367
73d3864a
HX
368complete:
369 complete_all(&test->completion);
2825982d 370
73d3864a
HX
371unlock:
372 up_write(&crypto_alg_sem);
373
374 crypto_remove_final(&list);
cce9e06d 375}
73d3864a 376EXPORT_SYMBOL_GPL(crypto_alg_tested);
4cc7720c 377
22e5b20b 378void crypto_remove_final(struct list_head *list)
6bfd4809
HX
379{
380 struct crypto_alg *alg;
381 struct crypto_alg *n;
382
383 list_for_each_entry_safe(alg, n, list, cra_list) {
384 list_del_init(&alg->cra_list);
385 crypto_alg_put(alg);
386 }
387}
22e5b20b 388EXPORT_SYMBOL_GPL(crypto_remove_final);
6bfd4809 389
73d3864a
HX
390static void crypto_wait_for_test(struct crypto_larval *larval)
391{
392 int err;
393
394 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
395 if (err != NOTIFY_STOP) {
396 if (WARN_ON(err != NOTIFY_DONE))
397 goto out;
398 crypto_alg_tested(larval->alg.cra_driver_name, 0);
399 }
400
3fc89adb 401 err = wait_for_completion_killable(&larval->completion);
73d3864a 402 WARN_ON(err);
dd8b083f
MP
403 if (!err)
404 crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval);
73d3864a
HX
405
406out:
407 crypto_larval_kill(&larval->alg);
408}
409
4cc7720c
HX
410int crypto_register_alg(struct crypto_alg *alg)
411{
73d3864a 412 struct crypto_larval *larval;
4cc7720c
HX
413 int err;
414
d6040764 415 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
4cc7720c
HX
416 err = crypto_check_alg(alg);
417 if (err)
418 return err;
419
420 down_write(&crypto_alg_sem);
73d3864a 421 larval = __crypto_register_alg(alg);
4cc7720c
HX
422 up_write(&crypto_alg_sem);
423
73d3864a
HX
424 if (IS_ERR(larval))
425 return PTR_ERR(larval);
426
427 crypto_wait_for_test(larval);
428 return 0;
4cc7720c 429}
cce9e06d
HX
430EXPORT_SYMBOL_GPL(crypto_register_alg);
431
6bfd4809
HX
432static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
433{
434 if (unlikely(list_empty(&alg->cra_list)))
435 return -ENOENT;
436
437 alg->cra_flags |= CRYPTO_ALG_DEAD;
438
6bfd4809 439 list_del_init(&alg->cra_list);
2bf29016 440 crypto_remove_spawns(alg, list, NULL);
6bfd4809
HX
441
442 return 0;
443}
444
c6d633a9 445void crypto_unregister_alg(struct crypto_alg *alg)
cce9e06d 446{
6bfd4809
HX
447 int ret;
448 LIST_HEAD(list);
5357c6c4 449
cce9e06d 450 down_write(&crypto_alg_sem);
6bfd4809 451 ret = crypto_remove_alg(alg, &list);
cce9e06d
HX
452 up_write(&crypto_alg_sem);
453
c6d633a9
EB
454 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
455 return;
cce9e06d 456
ce8614a3 457 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
cce9e06d
HX
458 if (alg->cra_destroy)
459 alg->cra_destroy(alg);
460
6bfd4809 461 crypto_remove_final(&list);
cce9e06d
HX
462}
463EXPORT_SYMBOL_GPL(crypto_unregister_alg);
464
4b004346
MB
465int crypto_register_algs(struct crypto_alg *algs, int count)
466{
467 int i, ret;
468
469 for (i = 0; i < count; i++) {
470 ret = crypto_register_alg(&algs[i]);
471 if (ret)
472 goto err;
473 }
474
475 return 0;
476
477err:
478 for (--i; i >= 0; --i)
479 crypto_unregister_alg(&algs[i]);
480
481 return ret;
482}
483EXPORT_SYMBOL_GPL(crypto_register_algs);
484
c6d633a9 485void crypto_unregister_algs(struct crypto_alg *algs, int count)
4b004346 486{
c6d633a9 487 int i;
4b004346 488
c6d633a9
EB
489 for (i = 0; i < count; i++)
490 crypto_unregister_alg(&algs[i]);
4b004346
MB
491}
492EXPORT_SYMBOL_GPL(crypto_unregister_algs);
493
4cc7720c
HX
494int crypto_register_template(struct crypto_template *tmpl)
495{
496 struct crypto_template *q;
497 int err = -EEXIST;
498
499 down_write(&crypto_alg_sem);
500
002c77a4
JW
501 crypto_check_module_sig(tmpl->module);
502
4cc7720c
HX
503 list_for_each_entry(q, &crypto_template_list, list) {
504 if (q == tmpl)
505 goto out;
506 }
507
508 list_add(&tmpl->list, &crypto_template_list);
509 err = 0;
510out:
511 up_write(&crypto_alg_sem);
512 return err;
513}
514EXPORT_SYMBOL_GPL(crypto_register_template);
515
9572442d
XW
516int crypto_register_templates(struct crypto_template *tmpls, int count)
517{
518 int i, err;
519
520 for (i = 0; i < count; i++) {
521 err = crypto_register_template(&tmpls[i]);
522 if (err)
523 goto out;
524 }
525 return 0;
526
527out:
528 for (--i; i >= 0; --i)
529 crypto_unregister_template(&tmpls[i]);
530 return err;
531}
532EXPORT_SYMBOL_GPL(crypto_register_templates);
533
4cc7720c
HX
534void crypto_unregister_template(struct crypto_template *tmpl)
535{
536 struct crypto_instance *inst;
b67bfe0d 537 struct hlist_node *n;
4cc7720c 538 struct hlist_head *list;
6bfd4809 539 LIST_HEAD(users);
4cc7720c
HX
540
541 down_write(&crypto_alg_sem);
542
543 BUG_ON(list_empty(&tmpl->list));
544 list_del_init(&tmpl->list);
545
546 list = &tmpl->instances;
b67bfe0d 547 hlist_for_each_entry(inst, list, list) {
6bfd4809 548 int err = crypto_remove_alg(&inst->alg, &users);
0efcb8d5 549
6bfd4809 550 BUG_ON(err);
4cc7720c
HX
551 }
552
553 up_write(&crypto_alg_sem);
554
b67bfe0d 555 hlist_for_each_entry_safe(inst, n, list, list) {
ce8614a3 556 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
319382a6 557 crypto_free_instance(inst);
4cc7720c 558 }
6bfd4809 559 crypto_remove_final(&users);
4cc7720c
HX
560}
561EXPORT_SYMBOL_GPL(crypto_unregister_template);
562
9572442d
XW
563void crypto_unregister_templates(struct crypto_template *tmpls, int count)
564{
565 int i;
566
567 for (i = count - 1; i >= 0; --i)
568 crypto_unregister_template(&tmpls[i]);
569}
570EXPORT_SYMBOL_GPL(crypto_unregister_templates);
571
4cc7720c
HX
572static struct crypto_template *__crypto_lookup_template(const char *name)
573{
574 struct crypto_template *q, *tmpl = NULL;
575
576 down_read(&crypto_alg_sem);
577 list_for_each_entry(q, &crypto_template_list, list) {
578 if (strcmp(q->name, name))
579 continue;
580 if (unlikely(!crypto_tmpl_get(q)))
581 continue;
582
583 tmpl = q;
584 break;
585 }
586 up_read(&crypto_alg_sem);
587
588 return tmpl;
589}
590
591struct crypto_template *crypto_lookup_template(const char *name)
592{
4943ba16
KC
593 return try_then_request_module(__crypto_lookup_template(name),
594 "crypto-%s", name);
4cc7720c
HX
595}
596EXPORT_SYMBOL_GPL(crypto_lookup_template);
597
598int crypto_register_instance(struct crypto_template *tmpl,
599 struct crypto_instance *inst)
600{
73d3864a
HX
601 struct crypto_larval *larval;
602 int err;
4cc7720c 603
4cc7720c
HX
604 err = crypto_check_alg(&inst->alg);
605 if (err)
9c521a20
SM
606 return err;
607
4cc7720c 608 inst->alg.cra_module = tmpl->module;
64a947b1 609 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
4cc7720c
HX
610
611 down_write(&crypto_alg_sem);
612
73d3864a
HX
613 larval = __crypto_register_alg(&inst->alg);
614 if (IS_ERR(larval))
4cc7720c
HX
615 goto unlock;
616
617 hlist_add_head(&inst->list, &tmpl->instances);
618 inst->tmpl = tmpl;
619
620unlock:
621 up_write(&crypto_alg_sem);
622
73d3864a
HX
623 err = PTR_ERR(larval);
624 if (IS_ERR(larval))
625 goto err;
626
627 crypto_wait_for_test(larval);
628 err = 0;
6bfd4809 629
4cc7720c
HX
630err:
631 return err;
632}
633EXPORT_SYMBOL_GPL(crypto_register_instance);
ce3fd840 634
c6d633a9 635void crypto_unregister_instance(struct crypto_instance *inst)
ce3fd840 636{
1f723710 637 LIST_HEAD(list);
ce3fd840 638
ce3fd840
SK
639 down_write(&crypto_alg_sem);
640
87b16756 641 crypto_remove_spawns(&inst->alg, &list, NULL);
1f723710 642 crypto_remove_instance(inst, &list);
ce3fd840
SK
643
644 up_write(&crypto_alg_sem);
645
1f723710 646 crypto_remove_final(&list);
ce3fd840
SK
647}
648EXPORT_SYMBOL_GPL(crypto_unregister_instance);
4cc7720c 649
6bfd4809 650int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
a73e6996 651 struct crypto_instance *inst, u32 mask)
6bfd4809
HX
652{
653 int err = -EAGAIN;
654
6b476662
EB
655 if (WARN_ON_ONCE(inst == NULL))
656 return -EINVAL;
657
6bfd4809 658 spawn->inst = inst;
a73e6996 659 spawn->mask = mask;
6bfd4809
HX
660
661 down_write(&crypto_alg_sem);
662 if (!crypto_is_moribund(alg)) {
663 list_add(&spawn->list, &alg->cra_users);
664 spawn->alg = alg;
665 err = 0;
666 }
667 up_write(&crypto_alg_sem);
668
669 return err;
670}
671EXPORT_SYMBOL_GPL(crypto_init_spawn);
672
97eedce1
HX
673int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
674 struct crypto_instance *inst,
675 const struct crypto_type *frontend)
676{
677 int err = -EINVAL;
678
c614e109 679 if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
97eedce1
HX
680 goto out;
681
682 spawn->frontend = frontend;
683 err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
684
685out:
686 return err;
687}
688EXPORT_SYMBOL_GPL(crypto_init_spawn2);
689
d6ef2f19
HX
690int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
691 u32 type, u32 mask)
692{
693 struct crypto_alg *alg;
694 int err;
695
696 alg = crypto_find_alg(name, spawn->frontend, type, mask);
697 if (IS_ERR(alg))
698 return PTR_ERR(alg);
699
700 err = crypto_init_spawn(spawn, alg, spawn->inst, mask);
701 crypto_mod_put(alg);
702 return err;
703}
704EXPORT_SYMBOL_GPL(crypto_grab_spawn);
705
6bfd4809
HX
706void crypto_drop_spawn(struct crypto_spawn *spawn)
707{
708 down_write(&crypto_alg_sem);
4f87ee11 709 if (!spawn->dead)
7db3b61b 710 list_del(&spawn->list);
6bfd4809
HX
711 up_write(&crypto_alg_sem);
712}
713EXPORT_SYMBOL_GPL(crypto_drop_spawn);
714
97eedce1 715static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
6bfd4809
HX
716{
717 struct crypto_alg *alg;
6bfd4809
HX
718
719 down_read(&crypto_alg_sem);
720 alg = spawn->alg;
4f87ee11 721 if (!spawn->dead && !crypto_mod_get(alg)) {
73669cc5
HX
722 alg->cra_flags |= CRYPTO_ALG_DYING;
723 alg = NULL;
6bfd4809 724 }
73669cc5 725 up_read(&crypto_alg_sem);
6bfd4809 726
73669cc5 727 return alg ?: ERR_PTR(-EAGAIN);
97eedce1
HX
728}
729
730struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
731 u32 mask)
732{
733 struct crypto_alg *alg;
734 struct crypto_tfm *tfm;
735
736 alg = crypto_spawn_alg(spawn);
737 if (IS_ERR(alg))
738 return ERR_CAST(alg);
739
2e306ee0
HX
740 tfm = ERR_PTR(-EINVAL);
741 if (unlikely((alg->cra_flags ^ type) & mask))
742 goto out_put_alg;
743
27d2a330 744 tfm = __crypto_alloc_tfm(alg, type, mask);
6bfd4809 745 if (IS_ERR(tfm))
2e306ee0
HX
746 goto out_put_alg;
747
748 return tfm;
6bfd4809 749
2e306ee0
HX
750out_put_alg:
751 crypto_mod_put(alg);
6bfd4809
HX
752 return tfm;
753}
754EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
755
97eedce1
HX
756void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
757{
758 struct crypto_alg *alg;
759 struct crypto_tfm *tfm;
760
761 alg = crypto_spawn_alg(spawn);
762 if (IS_ERR(alg))
763 return ERR_CAST(alg);
764
765 tfm = crypto_create_tfm(alg, spawn->frontend);
766 if (IS_ERR(tfm))
767 goto out_put_alg;
768
769 return tfm;
770
771out_put_alg:
772 crypto_mod_put(alg);
773 return tfm;
774}
775EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
776
2825982d
HX
777int crypto_register_notifier(struct notifier_block *nb)
778{
779 return blocking_notifier_chain_register(&crypto_chain, nb);
780}
781EXPORT_SYMBOL_GPL(crypto_register_notifier);
782
783int crypto_unregister_notifier(struct notifier_block *nb)
784{
785 return blocking_notifier_chain_unregister(&crypto_chain, nb);
786}
787EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
788
ebc610e5 789struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
7fed0bf2 790{
39e1ee01 791 struct rtattr *rta = tb[0];
ebc610e5
HX
792 struct crypto_attr_type *algt;
793
794 if (!rta)
795 return ERR_PTR(-ENOENT);
796 if (RTA_PAYLOAD(rta) < sizeof(*algt))
797 return ERR_PTR(-EINVAL);
39e1ee01
HX
798 if (rta->rta_type != CRYPTOA_TYPE)
799 return ERR_PTR(-EINVAL);
ebc610e5
HX
800
801 algt = RTA_DATA(rta);
802
803 return algt;
804}
805EXPORT_SYMBOL_GPL(crypto_get_attr_type);
806
807int crypto_check_attr_type(struct rtattr **tb, u32 type)
808{
809 struct crypto_attr_type *algt;
810
811 algt = crypto_get_attr_type(tb);
812 if (IS_ERR(algt))
813 return PTR_ERR(algt);
814
815 if ((algt->type ^ type) & algt->mask)
816 return -EINVAL;
817
818 return 0;
819}
820EXPORT_SYMBOL_GPL(crypto_check_attr_type);
821
68b6c7d6 822const char *crypto_attr_alg_name(struct rtattr *rta)
ebc610e5 823{
7fed0bf2
HX
824 struct crypto_attr_alg *alga;
825
ebc610e5
HX
826 if (!rta)
827 return ERR_PTR(-ENOENT);
828 if (RTA_PAYLOAD(rta) < sizeof(*alga))
7fed0bf2 829 return ERR_PTR(-EINVAL);
39e1ee01
HX
830 if (rta->rta_type != CRYPTOA_ALG)
831 return ERR_PTR(-EINVAL);
7fed0bf2
HX
832
833 alga = RTA_DATA(rta);
834 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
835
68b6c7d6
HX
836 return alga->name;
837}
838EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
839
d06854f0
HX
840struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
841 const struct crypto_type *frontend,
842 u32 type, u32 mask)
68b6c7d6
HX
843{
844 const char *name;
68b6c7d6
HX
845
846 name = crypto_attr_alg_name(rta);
68b6c7d6 847 if (IS_ERR(name))
3e8afe35 848 return ERR_CAST(name);
68b6c7d6 849
d06854f0 850 return crypto_find_alg(name, frontend, type, mask);
7fed0bf2 851}
d06854f0 852EXPORT_SYMBOL_GPL(crypto_attr_alg2);
3c09f17c
HX
853
854int crypto_attr_u32(struct rtattr *rta, u32 *num)
855{
856 struct crypto_attr_u32 *nu32;
857
858 if (!rta)
859 return -ENOENT;
860 if (RTA_PAYLOAD(rta) < sizeof(*nu32))
861 return -EINVAL;
862 if (rta->rta_type != CRYPTOA_U32)
863 return -EINVAL;
864
865 nu32 = RTA_DATA(rta);
866 *num = nu32->num;
867
868 return 0;
869}
870EXPORT_SYMBOL_GPL(crypto_attr_u32);
7fed0bf2 871
32f27c74
HX
872int crypto_inst_setname(struct crypto_instance *inst, const char *name,
873 struct crypto_alg *alg)
874{
875 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
876 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
877 return -ENAMETOOLONG;
878
879 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
880 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
881 return -ENAMETOOLONG;
882
883 return 0;
884}
885EXPORT_SYMBOL_GPL(crypto_inst_setname);
886
14aa1a83
EB
887void *crypto_alloc_instance(const char *name, struct crypto_alg *alg,
888 unsigned int head)
7fed0bf2
HX
889{
890 struct crypto_instance *inst;
70ec7bb9 891 char *p;
7fed0bf2
HX
892 int err;
893
70ec7bb9
HX
894 p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
895 GFP_KERNEL);
896 if (!p)
7fed0bf2
HX
897 return ERR_PTR(-ENOMEM);
898
70ec7bb9
HX
899 inst = (void *)(p + head);
900
32f27c74
HX
901 err = crypto_inst_setname(inst, name, alg);
902 if (err)
7fed0bf2
HX
903 goto err_free_inst;
904
70ec7bb9
HX
905 return p;
906
907err_free_inst:
908 kfree(p);
909 return ERR_PTR(err);
910}
7fed0bf2
HX
911EXPORT_SYMBOL_GPL(crypto_alloc_instance);
912
b5b7f088
HX
913void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
914{
915 INIT_LIST_HEAD(&queue->list);
916 queue->backlog = &queue->list;
917 queue->qlen = 0;
918 queue->max_qlen = max_qlen;
919}
920EXPORT_SYMBOL_GPL(crypto_init_queue);
921
922int crypto_enqueue_request(struct crypto_queue *queue,
923 struct crypto_async_request *request)
924{
925 int err = -EINPROGRESS;
926
927 if (unlikely(queue->qlen >= queue->max_qlen)) {
6b80ea38
GBY
928 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
929 err = -ENOSPC;
b5b7f088 930 goto out;
6b80ea38
GBY
931 }
932 err = -EBUSY;
b5b7f088
HX
933 if (queue->backlog == &queue->list)
934 queue->backlog = &request->list;
935 }
936
937 queue->qlen++;
938 list_add_tail(&request->list, &queue->list);
939
940out:
941 return err;
942}
943EXPORT_SYMBOL_GPL(crypto_enqueue_request);
944
31d228cc 945struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
b5b7f088
HX
946{
947 struct list_head *request;
948
949 if (unlikely(!queue->qlen))
950 return NULL;
951
952 queue->qlen--;
953
954 if (queue->backlog != &queue->list)
955 queue->backlog = queue->backlog->next;
956
957 request = queue->list.next;
958 list_del(request);
959
31d228cc 960 return list_entry(request, struct crypto_async_request, list);
b5b7f088
HX
961}
962EXPORT_SYMBOL_GPL(crypto_dequeue_request);
963
7613636d
HX
964static inline void crypto_inc_byte(u8 *a, unsigned int size)
965{
966 u8 *b = (a + size);
967 u8 c;
968
969 for (; size; size--) {
970 c = *--b + 1;
971 *b = c;
972 if (c)
973 break;
974 }
975}
976
977void crypto_inc(u8 *a, unsigned int size)
978{
979 __be32 *b = (__be32 *)(a + size);
980 u32 c;
981
db91af0f 982 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
27c539ae 983 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
db91af0f
AB
984 for (; size >= 4; size -= 4) {
985 c = be32_to_cpu(*--b) + 1;
986 *b = cpu_to_be32(c);
27c539ae 987 if (likely(c))
db91af0f
AB
988 return;
989 }
7613636d
HX
990
991 crypto_inc_byte(a, size);
992}
993EXPORT_SYMBOL_GPL(crypto_inc);
994
a7c391f0 995void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
7613636d 996{
db91af0f
AB
997 int relalign = 0;
998
999 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1000 int size = sizeof(unsigned long);
a7c391f0
AB
1001 int d = (((unsigned long)dst ^ (unsigned long)src1) |
1002 ((unsigned long)dst ^ (unsigned long)src2)) &
1003 (size - 1);
db91af0f
AB
1004
1005 relalign = d ? 1 << __ffs(d) : size;
1006
1007 /*
1008 * If we care about alignment, process as many bytes as
1009 * needed to advance dst and src to values whose alignments
1010 * equal their relative alignment. This will allow us to
1011 * process the remainder of the input using optimal strides.
1012 */
1013 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
a7c391f0 1014 *dst++ = *src1++ ^ *src2++;
db91af0f
AB
1015 len--;
1016 }
1017 }
7613636d 1018
db91af0f 1019 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
a7c391f0 1020 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
db91af0f 1021 dst += 8;
a7c391f0
AB
1022 src1 += 8;
1023 src2 += 8;
db91af0f
AB
1024 len -= 8;
1025 }
7613636d 1026
db91af0f 1027 while (len >= 4 && !(relalign & 3)) {
a7c391f0 1028 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
db91af0f 1029 dst += 4;
a7c391f0
AB
1030 src1 += 4;
1031 src2 += 4;
db91af0f
AB
1032 len -= 4;
1033 }
1034
1035 while (len >= 2 && !(relalign & 1)) {
a7c391f0 1036 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
db91af0f 1037 dst += 2;
a7c391f0
AB
1038 src1 += 2;
1039 src2 += 2;
db91af0f
AB
1040 len -= 2;
1041 }
7613636d 1042
db91af0f 1043 while (len--)
a7c391f0 1044 *dst++ = *src1++ ^ *src2++;
7613636d 1045}
db91af0f 1046EXPORT_SYMBOL_GPL(__crypto_xor);
7613636d 1047
38d21433
HX
1048unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1049{
c2110f28
HX
1050 return alg->cra_ctxsize +
1051 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
38d21433
HX
1052}
1053EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1054
f2aefdab
HX
1055int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1056 u32 type, u32 mask)
1057{
1058 int ret = 0;
1059 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1060
1061 if (!IS_ERR(alg)) {
1062 crypto_mod_put(alg);
1063 ret = 1;
1064 }
1065
1066 return ret;
1067}
1068EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1069
f7d76e05 1070#ifdef CONFIG_CRYPTO_STATS
1f6669b9
CL
1071void crypto_stats_init(struct crypto_alg *alg)
1072{
1073 memset(&alg->stats, 0, sizeof(alg->stats));
1074}
1075EXPORT_SYMBOL_GPL(crypto_stats_init);
1076
f7d76e05
CL
1077void crypto_stats_get(struct crypto_alg *alg)
1078{
1079 crypto_alg_get(alg);
1080}
1081EXPORT_SYMBOL_GPL(crypto_stats_get);
1082
f7d76e05
CL
1083void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1084 int ret)
1085{
1086 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1087 atomic64_inc(&alg->stats.aead.err_cnt);
f7d76e05 1088 } else {
17c18f9e
CL
1089 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1090 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
f7d76e05
CL
1091 }
1092 crypto_alg_put(alg);
1093}
1094EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1095
1096void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1097 int ret)
1098{
1099 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1100 atomic64_inc(&alg->stats.aead.err_cnt);
f7d76e05 1101 } else {
17c18f9e
CL
1102 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1103 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
f7d76e05
CL
1104 }
1105 crypto_alg_put(alg);
1106}
1107EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1108
1109void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1110 struct crypto_alg *alg)
1111{
1112 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1113 atomic64_inc(&alg->stats.akcipher.err_cnt);
f7d76e05 1114 } else {
17c18f9e
CL
1115 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1116 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
f7d76e05
CL
1117 }
1118 crypto_alg_put(alg);
1119}
1120EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1121
1122void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1123 struct crypto_alg *alg)
1124{
1125 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1126 atomic64_inc(&alg->stats.akcipher.err_cnt);
f7d76e05 1127 } else {
17c18f9e
CL
1128 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1129 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
f7d76e05
CL
1130 }
1131 crypto_alg_put(alg);
1132}
1133EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1134
1135void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1136{
1137 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
44f13133 1138 atomic64_inc(&alg->stats.akcipher.err_cnt);
f7d76e05 1139 else
17c18f9e 1140 atomic64_inc(&alg->stats.akcipher.sign_cnt);
f7d76e05
CL
1141 crypto_alg_put(alg);
1142}
1143EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1144
1145void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1146{
1147 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
44f13133 1148 atomic64_inc(&alg->stats.akcipher.err_cnt);
f7d76e05 1149 else
17c18f9e 1150 atomic64_inc(&alg->stats.akcipher.verify_cnt);
f7d76e05
CL
1151 crypto_alg_put(alg);
1152}
1153EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1154
1155void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1156{
1157 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1158 atomic64_inc(&alg->stats.compress.err_cnt);
f7d76e05 1159 } else {
17c18f9e
CL
1160 atomic64_inc(&alg->stats.compress.compress_cnt);
1161 atomic64_add(slen, &alg->stats.compress.compress_tlen);
f7d76e05
CL
1162 }
1163 crypto_alg_put(alg);
1164}
1165EXPORT_SYMBOL_GPL(crypto_stats_compress);
1166
1167void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1168{
1169 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1170 atomic64_inc(&alg->stats.compress.err_cnt);
f7d76e05 1171 } else {
17c18f9e
CL
1172 atomic64_inc(&alg->stats.compress.decompress_cnt);
1173 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
f7d76e05
CL
1174 }
1175 crypto_alg_put(alg);
1176}
1177EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1178
1179void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1180 struct crypto_alg *alg)
1181{
1182 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
44f13133 1183 atomic64_inc(&alg->stats.hash.err_cnt);
f7d76e05 1184 else
17c18f9e 1185 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
f7d76e05
CL
1186 crypto_alg_put(alg);
1187}
1188EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1189
1190void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1191 struct crypto_alg *alg)
1192{
1193 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1194 atomic64_inc(&alg->stats.hash.err_cnt);
f7d76e05 1195 } else {
17c18f9e
CL
1196 atomic64_inc(&alg->stats.hash.hash_cnt);
1197 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
f7d76e05
CL
1198 }
1199 crypto_alg_put(alg);
1200}
1201EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1202
1203void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1204{
1205 if (ret)
44f13133 1206 atomic64_inc(&alg->stats.kpp.err_cnt);
f7d76e05 1207 else
17c18f9e 1208 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
f7d76e05
CL
1209 crypto_alg_put(alg);
1210}
1211EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1212
1213void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1214{
1215 if (ret)
44f13133 1216 atomic64_inc(&alg->stats.kpp.err_cnt);
f7d76e05 1217 else
17c18f9e 1218 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
f7d76e05
CL
1219 crypto_alg_put(alg);
1220}
1221EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1222
1223void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1224{
1225 if (ret)
44f13133 1226 atomic64_inc(&alg->stats.kpp.err_cnt);
f7d76e05 1227 else
17c18f9e 1228 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
f7d76e05
CL
1229 crypto_alg_put(alg);
1230}
1231EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1232
1233void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1234{
1235 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
44f13133 1236 atomic64_inc(&alg->stats.rng.err_cnt);
f7d76e05 1237 else
17c18f9e 1238 atomic64_inc(&alg->stats.rng.seed_cnt);
f7d76e05
CL
1239 crypto_alg_put(alg);
1240}
1241EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1242
1243void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1244 int ret)
1245{
1246 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1247 atomic64_inc(&alg->stats.rng.err_cnt);
f7d76e05 1248 } else {
17c18f9e
CL
1249 atomic64_inc(&alg->stats.rng.generate_cnt);
1250 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
f7d76e05
CL
1251 }
1252 crypto_alg_put(alg);
1253}
1254EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1255
1256void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1257 struct crypto_alg *alg)
1258{
1259 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1260 atomic64_inc(&alg->stats.cipher.err_cnt);
f7d76e05 1261 } else {
17c18f9e
CL
1262 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1263 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
f7d76e05
CL
1264 }
1265 crypto_alg_put(alg);
1266}
1267EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1268
1269void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1270 struct crypto_alg *alg)
1271{
1272 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
44f13133 1273 atomic64_inc(&alg->stats.cipher.err_cnt);
f7d76e05 1274 } else {
17c18f9e
CL
1275 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1276 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
f7d76e05
CL
1277 }
1278 crypto_alg_put(alg);
1279}
1280EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1281#endif
1282
cce9e06d
HX
1283static int __init crypto_algapi_init(void)
1284{
1285 crypto_init_proc();
1286 return 0;
1287}
1288
1289static void __exit crypto_algapi_exit(void)
1290{
1291 crypto_exit_proc();
1292}
1293
1294module_init(crypto_algapi_init);
1295module_exit(crypto_algapi_exit);
1296
1297MODULE_LICENSE("GPL");
1298MODULE_DESCRIPTION("Cryptographic algorithms API");