crypto: arm64 - Use modern annotations for assembly functions
[linux-block.git] / crypto / shash.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
7b5a080b
HX
2/*
3 * Synchronous Cryptographic Hash operations.
4 *
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
7b5a080b
HX
6 */
7
3b2f6df0 8#include <crypto/scatterwalk.h>
7b5a080b
HX
9#include <crypto/internal/hash.h>
10#include <linux/err.h>
11#include <linux/kernel.h>
12#include <linux/module.h>
13#include <linux/slab.h>
14#include <linux/seq_file.h>
f4d663ce
SK
15#include <linux/cryptouser.h>
16#include <net/netlink.h>
d8c34b94 17#include <linux/compiler.h>
7b5a080b 18
3b2f6df0
HX
19#include "internal.h"
20
3f683d61
HX
21static const struct crypto_type crypto_shash_type;
22
af3ff804
EB
23int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
24 unsigned int keylen)
57cfe44b
HX
25{
26 return -ENOSYS;
27}
af3ff804 28EXPORT_SYMBOL_GPL(shash_no_setkey);
57cfe44b 29
7b5a080b
HX
30static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
31 unsigned int keylen)
32{
33 struct shash_alg *shash = crypto_shash_alg(tfm);
34 unsigned long alignmask = crypto_shash_alignmask(tfm);
35 unsigned long absize;
36 u8 *buffer, *alignbuffer;
37 int err;
38
18eb8ea6 39 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
9039f3ef 40 buffer = kmalloc(absize, GFP_ATOMIC);
7b5a080b
HX
41 if (!buffer)
42 return -ENOMEM;
43
44 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
45 memcpy(alignbuffer, key, keylen);
46 err = shash->setkey(tfm, alignbuffer, keylen);
8c32c516 47 kzfree(buffer);
7b5a080b
HX
48 return err;
49}
50
ba7d7433
EB
51static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
52{
c2881789 53 if (crypto_shash_alg_needs_key(alg))
ba7d7433
EB
54 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
55}
56
7b5a080b
HX
57int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
58 unsigned int keylen)
59{
60 struct shash_alg *shash = crypto_shash_alg(tfm);
61 unsigned long alignmask = crypto_shash_alignmask(tfm);
9fa68f62 62 int err;
7b5a080b
HX
63
64 if ((unsigned long)key & alignmask)
9fa68f62
EB
65 err = shash_setkey_unaligned(tfm, key, keylen);
66 else
67 err = shash->setkey(tfm, key, keylen);
68
ba7d7433
EB
69 if (unlikely(err)) {
70 shash_set_needkey(tfm, shash);
9fa68f62 71 return err;
ba7d7433 72 }
7b5a080b 73
9fa68f62
EB
74 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
75 return 0;
7b5a080b
HX
76}
77EXPORT_SYMBOL_GPL(crypto_shash_setkey);
78
7b5a080b
HX
79static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
80 unsigned int len)
81{
82 struct crypto_shash *tfm = desc->tfm;
83 struct shash_alg *shash = crypto_shash_alg(tfm);
84 unsigned long alignmask = crypto_shash_alignmask(tfm);
85 unsigned int unaligned_len = alignmask + 1 -
86 ((unsigned long)data & alignmask);
f3569fd6
KC
87 /*
88 * We cannot count on __aligned() working for large values:
89 * https://patchwork.kernel.org/patch/9507697/
90 */
91 u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
0e2d3a12 92 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
8c32c516 93 int err;
7b5a080b 94
f3569fd6
KC
95 if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
96 return -EINVAL;
97
f4f68993
YS
98 if (unaligned_len > len)
99 unaligned_len = len;
100
7b5a080b 101 memcpy(buf, data, unaligned_len);
8c32c516
HX
102 err = shash->update(desc, buf, unaligned_len);
103 memset(buf, 0, unaligned_len);
7b5a080b 104
8c32c516 105 return err ?:
7b5a080b
HX
106 shash->update(desc, data + unaligned_len, len - unaligned_len);
107}
108
109int crypto_shash_update(struct shash_desc *desc, const u8 *data,
110 unsigned int len)
111{
112 struct crypto_shash *tfm = desc->tfm;
113 struct shash_alg *shash = crypto_shash_alg(tfm);
114 unsigned long alignmask = crypto_shash_alignmask(tfm);
115
116 if ((unsigned long)data & alignmask)
117 return shash_update_unaligned(desc, data, len);
118
119 return shash->update(desc, data, len);
120}
121EXPORT_SYMBOL_GPL(crypto_shash_update);
122
123static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
124{
125 struct crypto_shash *tfm = desc->tfm;
126 unsigned long alignmask = crypto_shash_alignmask(tfm);
127 struct shash_alg *shash = crypto_shash_alg(tfm);
128 unsigned int ds = crypto_shash_digestsize(tfm);
f3569fd6
KC
129 /*
130 * We cannot count on __aligned() working for large values:
131 * https://patchwork.kernel.org/patch/9507697/
132 */
133 u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
0e2d3a12 134 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
7b5a080b
HX
135 int err;
136
f3569fd6
KC
137 if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
138 return -EINVAL;
139
7b5a080b 140 err = shash->final(desc, buf);
8c32c516
HX
141 if (err)
142 goto out;
143
7b5a080b 144 memcpy(out, buf, ds);
8c32c516
HX
145
146out:
147 memset(buf, 0, ds);
7b5a080b
HX
148 return err;
149}
150
151int crypto_shash_final(struct shash_desc *desc, u8 *out)
152{
153 struct crypto_shash *tfm = desc->tfm;
154 struct shash_alg *shash = crypto_shash_alg(tfm);
155 unsigned long alignmask = crypto_shash_alignmask(tfm);
156
157 if ((unsigned long)out & alignmask)
158 return shash_final_unaligned(desc, out);
159
160 return shash->final(desc, out);
161}
162EXPORT_SYMBOL_GPL(crypto_shash_final);
163
164static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
165 unsigned int len, u8 *out)
166{
167 return crypto_shash_update(desc, data, len) ?:
168 crypto_shash_final(desc, out);
169}
170
171int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
172 unsigned int len, u8 *out)
173{
174 struct crypto_shash *tfm = desc->tfm;
175 struct shash_alg *shash = crypto_shash_alg(tfm);
176 unsigned long alignmask = crypto_shash_alignmask(tfm);
177
8267adab 178 if (((unsigned long)data | (unsigned long)out) & alignmask)
7b5a080b
HX
179 return shash_finup_unaligned(desc, data, len, out);
180
181 return shash->finup(desc, data, len, out);
182}
183EXPORT_SYMBOL_GPL(crypto_shash_finup);
184
185static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
186 unsigned int len, u8 *out)
187{
188 return crypto_shash_init(desc) ?:
f88ad8de 189 crypto_shash_finup(desc, data, len, out);
7b5a080b
HX
190}
191
192int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
193 unsigned int len, u8 *out)
194{
195 struct crypto_shash *tfm = desc->tfm;
196 struct shash_alg *shash = crypto_shash_alg(tfm);
197 unsigned long alignmask = crypto_shash_alignmask(tfm);
198
9fa68f62
EB
199 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
200 return -ENOKEY;
201
8267adab 202 if (((unsigned long)data | (unsigned long)out) & alignmask)
7b5a080b
HX
203 return shash_digest_unaligned(desc, data, len, out);
204
205 return shash->digest(desc, data, len, out);
206}
207EXPORT_SYMBOL_GPL(crypto_shash_digest);
208
f592682f 209static int shash_default_export(struct shash_desc *desc, void *out)
dec8b786 210{
f592682f
HX
211 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
212 return 0;
99d27e1c 213}
dec8b786 214
f592682f 215static int shash_default_import(struct shash_desc *desc, const void *in)
99d27e1c 216{
f592682f
HX
217 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
218 return 0;
dec8b786 219}
dec8b786 220
3b2f6df0
HX
221static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
222 unsigned int keylen)
223{
224 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
225
226 return crypto_shash_setkey(*ctx, key, keylen);
227}
228
229static int shash_async_init(struct ahash_request *req)
230{
231 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
232 struct shash_desc *desc = ahash_request_ctx(req);
233
234 desc->tfm = *ctx;
3b2f6df0
HX
235
236 return crypto_shash_init(desc);
237}
238
7eddf95e 239int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
3b2f6df0 240{
3b2f6df0
HX
241 struct crypto_hash_walk walk;
242 int nbytes;
243
244 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
245 nbytes = crypto_hash_walk_done(&walk, nbytes))
246 nbytes = crypto_shash_update(desc, walk.data, nbytes);
247
248 return nbytes;
249}
7eddf95e
HX
250EXPORT_SYMBOL_GPL(shash_ahash_update);
251
252static int shash_async_update(struct ahash_request *req)
253{
254 return shash_ahash_update(req, ahash_request_ctx(req));
255}
3b2f6df0
HX
256
257static int shash_async_final(struct ahash_request *req)
258{
259 return crypto_shash_final(ahash_request_ctx(req), req->result);
260}
261
66f6ce5e
HX
262int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
263{
264 struct crypto_hash_walk walk;
265 int nbytes;
266
cbc86b91
HX
267 nbytes = crypto_hash_walk_first(req, &walk);
268 if (!nbytes)
269 return crypto_shash_final(desc, req->result);
270
271 do {
66f6ce5e
HX
272 nbytes = crypto_hash_walk_last(&walk) ?
273 crypto_shash_finup(desc, walk.data, nbytes,
274 req->result) :
275 crypto_shash_update(desc, walk.data, nbytes);
cbc86b91
HX
276 nbytes = crypto_hash_walk_done(&walk, nbytes);
277 } while (nbytes > 0);
66f6ce5e
HX
278
279 return nbytes;
280}
281EXPORT_SYMBOL_GPL(shash_ahash_finup);
282
283static int shash_async_finup(struct ahash_request *req)
284{
285 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
286 struct shash_desc *desc = ahash_request_ctx(req);
287
288 desc->tfm = *ctx;
66f6ce5e
HX
289
290 return shash_ahash_finup(req, desc);
291}
292
7eddf95e 293int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
3b2f6df0 294{
3b2f6df0 295 unsigned int nbytes = req->nbytes;
b61907bb
HX
296 struct scatterlist *sg;
297 unsigned int offset;
3b2f6df0
HX
298 int err;
299
b61907bb
HX
300 if (nbytes &&
301 (sg = req->src, offset = sg->offset,
67cb60e4 302 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
3b2f6df0
HX
303 void *data;
304
f0dfc0b0 305 data = kmap_atomic(sg_page(sg));
3b2f6df0
HX
306 err = crypto_shash_digest(desc, data + offset, nbytes,
307 req->result);
f0dfc0b0 308 kunmap_atomic(data);
7eddf95e
HX
309 } else
310 err = crypto_shash_init(desc) ?:
66f6ce5e 311 shash_ahash_finup(req, desc);
3b2f6df0 312
7eddf95e
HX
313 return err;
314}
315EXPORT_SYMBOL_GPL(shash_ahash_digest);
3b2f6df0 316
7eddf95e
HX
317static int shash_async_digest(struct ahash_request *req)
318{
319 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
320 struct shash_desc *desc = ahash_request_ctx(req);
3b2f6df0 321
7eddf95e 322 desc->tfm = *ctx;
3b2f6df0 323
7eddf95e 324 return shash_ahash_digest(req, desc);
3b2f6df0
HX
325}
326
66f6ce5e
HX
327static int shash_async_export(struct ahash_request *req, void *out)
328{
329 return crypto_shash_export(ahash_request_ctx(req), out);
330}
331
332static int shash_async_import(struct ahash_request *req, const void *in)
333{
90246e79
HX
334 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
335 struct shash_desc *desc = ahash_request_ctx(req);
336
337 desc->tfm = *ctx;
90246e79
HX
338
339 return crypto_shash_import(desc, in);
66f6ce5e
HX
340}
341
3b2f6df0
HX
342static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
343{
344 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
345
346 crypto_free_shash(*ctx);
347}
348
88056ec3 349int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
3b2f6df0
HX
350{
351 struct crypto_alg *calg = tfm->__crt_alg;
66f6ce5e 352 struct shash_alg *alg = __crypto_shash_alg(calg);
88056ec3 353 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
3b2f6df0
HX
354 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
355 struct crypto_shash *shash;
356
357 if (!crypto_mod_get(calg))
358 return -EAGAIN;
359
3f683d61 360 shash = crypto_create_tfm(calg, &crypto_shash_type);
3b2f6df0
HX
361 if (IS_ERR(shash)) {
362 crypto_mod_put(calg);
363 return PTR_ERR(shash);
364 }
365
366 *ctx = shash;
367 tfm->exit = crypto_exit_shash_ops_async;
368
369 crt->init = shash_async_init;
370 crt->update = shash_async_update;
66f6ce5e
HX
371 crt->final = shash_async_final;
372 crt->finup = shash_async_finup;
3b2f6df0 373 crt->digest = shash_async_digest;
ba7d7433
EB
374 if (crypto_shash_alg_has_setkey(alg))
375 crt->setkey = shash_async_setkey;
00420a65 376
9fa68f62
EB
377 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
378 CRYPTO_TFM_NEED_KEY);
66f6ce5e 379
2b091e32
EB
380 crt->export = shash_async_export;
381 crt->import = shash_async_import;
3b2f6df0 382
3b2f6df0
HX
383 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
384
385 return 0;
386}
387
fbce6be5
HX
388static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
389{
390 struct crypto_shash *hash = __crypto_shash_cast(tfm);
391 struct shash_alg *alg = crypto_shash_alg(hash);
392
393 alg->exit_tfm(hash);
394}
395
2ca33da1 396static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
7b5a080b 397{
113adefc 398 struct crypto_shash *hash = __crypto_shash_cast(tfm);
9fa68f62 399 struct shash_alg *alg = crypto_shash_alg(hash);
fbce6be5 400 int err;
9fa68f62
EB
401
402 hash->descsize = alg->descsize;
403
ba7d7433 404 shash_set_needkey(hash, alg);
113adefc 405
fbce6be5
HX
406 if (alg->exit_tfm)
407 tfm->exit = crypto_shash_exit_tfm;
408
409 if (!alg->init_tfm)
410 return 0;
411
412 err = alg->init_tfm(hash);
413 if (err)
414 return err;
415
416 /* ->init_tfm() may have increased the descsize. */
417 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
418 if (alg->exit_tfm)
419 alg->exit_tfm(hash);
420 return -EINVAL;
421 }
422
7b5a080b
HX
423 return 0;
424}
425
3acc8473 426#ifdef CONFIG_NET
f4d663ce
SK
427static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
428{
429 struct crypto_report_hash rhash;
430 struct shash_alg *salg = __crypto_shash_alg(alg);
431
37db69e0
EB
432 memset(&rhash, 0, sizeof(rhash));
433
434 strscpy(rhash.type, "shash", sizeof(rhash.type));
9a5467bf 435
f4d663ce
SK
436 rhash.blocksize = alg->cra_blocksize;
437 rhash.digestsize = salg->digestsize;
438
37db69e0 439 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
f4d663ce 440}
3acc8473
HX
441#else
442static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
443{
444 return -ENOSYS;
445}
446#endif
f4d663ce 447
7b5a080b 448static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
d8c34b94 449 __maybe_unused;
7b5a080b
HX
450static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
451{
452 struct shash_alg *salg = __crypto_shash_alg(alg);
453
454 seq_printf(m, "type : shash\n");
455 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
456 seq_printf(m, "digestsize : %u\n", salg->digestsize);
7b5a080b
HX
457}
458
459static const struct crypto_type crypto_shash_type = {
ac611680 460 .extsize = crypto_alg_extsize,
7b5a080b
HX
461 .init_tfm = crypto_shash_init_tfm,
462#ifdef CONFIG_PROC_FS
463 .show = crypto_shash_show,
464#endif
f4d663ce 465 .report = crypto_shash_report,
7b5a080b
HX
466 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
467 .maskset = CRYPTO_ALG_TYPE_MASK,
468 .type = CRYPTO_ALG_TYPE_SHASH,
469 .tfmsize = offsetof(struct crypto_shash, base),
470};
471
472struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
473 u32 mask)
474{
3f683d61 475 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
7b5a080b
HX
476}
477EXPORT_SYMBOL_GPL(crypto_alloc_shash);
478
619a6ebd 479static int shash_prepare_alg(struct shash_alg *alg)
7b5a080b
HX
480{
481 struct crypto_alg *base = &alg->base;
482
b68a7ec1
KC
483 if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
484 alg->descsize > HASH_MAX_DESCSIZE ||
485 alg->statesize > HASH_MAX_STATESIZE)
7b5a080b
HX
486 return -EINVAL;
487
41a2e94f
EB
488 if ((alg->export && !alg->import) || (alg->import && !alg->export))
489 return -EINVAL;
490
7b5a080b
HX
491 base->cra_type = &crypto_shash_type;
492 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
493 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
99d27e1c 494
8267adab
HX
495 if (!alg->finup)
496 alg->finup = shash_finup_unaligned;
497 if (!alg->digest)
498 alg->digest = shash_digest_unaligned;
f592682f
HX
499 if (!alg->export) {
500 alg->export = shash_default_export;
501 alg->import = shash_default_import;
502 alg->statesize = alg->descsize;
503 }
57cfe44b
HX
504 if (!alg->setkey)
505 alg->setkey = shash_no_setkey;
99d27e1c 506
619a6ebd
HX
507 return 0;
508}
509
510int crypto_register_shash(struct shash_alg *alg)
511{
512 struct crypto_alg *base = &alg->base;
513 int err;
514
515 err = shash_prepare_alg(alg);
516 if (err)
517 return err;
7b5a080b
HX
518
519 return crypto_register_alg(base);
520}
521EXPORT_SYMBOL_GPL(crypto_register_shash);
522
523int crypto_unregister_shash(struct shash_alg *alg)
524{
525 return crypto_unregister_alg(&alg->base);
526}
527EXPORT_SYMBOL_GPL(crypto_unregister_shash);
528
50fc3e8d
JK
529int crypto_register_shashes(struct shash_alg *algs, int count)
530{
531 int i, ret;
532
533 for (i = 0; i < count; i++) {
534 ret = crypto_register_shash(&algs[i]);
535 if (ret)
536 goto err;
537 }
538
539 return 0;
540
541err:
542 for (--i; i >= 0; --i)
543 crypto_unregister_shash(&algs[i]);
544
545 return ret;
546}
547EXPORT_SYMBOL_GPL(crypto_register_shashes);
548
549int crypto_unregister_shashes(struct shash_alg *algs, int count)
550{
551 int i, ret;
552
553 for (i = count - 1; i >= 0; --i) {
554 ret = crypto_unregister_shash(&algs[i]);
555 if (ret)
556 pr_err("Failed to unregister %s %s: %d\n",
557 algs[i].base.cra_driver_name,
558 algs[i].base.cra_name, ret);
559 }
560
561 return 0;
562}
563EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
564
619a6ebd
HX
565int shash_register_instance(struct crypto_template *tmpl,
566 struct shash_instance *inst)
567{
568 int err;
569
570 err = shash_prepare_alg(&inst->alg);
571 if (err)
572 return err;
573
574 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
575}
576EXPORT_SYMBOL_GPL(shash_register_instance);
577
2e4fddd8
HX
578void shash_free_instance(struct crypto_instance *inst)
579{
580 crypto_drop_spawn(crypto_instance_ctx(inst));
581 kfree(shash_instance(inst));
582}
583EXPORT_SYMBOL_GPL(shash_free_instance);
584
94296999
HX
585int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
586 struct shash_alg *alg,
587 struct crypto_instance *inst)
588{
589 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
590 &crypto_shash_type);
591}
592EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
593
7d6f5640
HX
594struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
595{
596 struct crypto_alg *alg;
597
598 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
599 return IS_ERR(alg) ? ERR_CAST(alg) :
600 container_of(alg, struct shash_alg, base);
601}
602EXPORT_SYMBOL_GPL(shash_attr_alg);
603
7b5a080b
HX
604MODULE_LICENSE("GPL");
605MODULE_DESCRIPTION("Synchronous cryptographic hash type");