2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
23 static const struct crypto_type crypto_shash_type;
25 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
28 struct shash_alg *shash = crypto_shash_alg(tfm);
29 unsigned long alignmask = crypto_shash_alignmask(tfm);
31 u8 *buffer, *alignbuffer;
34 absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1));
35 buffer = kmalloc(absize, GFP_KERNEL);
39 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
40 memcpy(alignbuffer, key, keylen);
41 err = shash->setkey(tfm, alignbuffer, keylen);
42 memset(alignbuffer, 0, keylen);
47 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
50 struct shash_alg *shash = crypto_shash_alg(tfm);
51 unsigned long alignmask = crypto_shash_alignmask(tfm);
56 if ((unsigned long)key & alignmask)
57 return shash_setkey_unaligned(tfm, key, keylen);
59 return shash->setkey(tfm, key, keylen);
61 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
63 static inline unsigned int shash_align_buffer_size(unsigned len,
66 return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
69 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
72 struct crypto_shash *tfm = desc->tfm;
73 struct shash_alg *shash = crypto_shash_alg(tfm);
74 unsigned long alignmask = crypto_shash_alignmask(tfm);
75 unsigned int unaligned_len = alignmask + 1 -
76 ((unsigned long)data & alignmask);
77 u8 buf[shash_align_buffer_size(unaligned_len, alignmask)]
78 __attribute__ ((aligned));
80 if (unaligned_len > len)
83 memcpy(buf, data, unaligned_len);
85 return shash->update(desc, buf, unaligned_len) ?:
86 shash->update(desc, data + unaligned_len, len - unaligned_len);
89 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
92 struct crypto_shash *tfm = desc->tfm;
93 struct shash_alg *shash = crypto_shash_alg(tfm);
94 unsigned long alignmask = crypto_shash_alignmask(tfm);
96 if ((unsigned long)data & alignmask)
97 return shash_update_unaligned(desc, data, len);
99 return shash->update(desc, data, len);
101 EXPORT_SYMBOL_GPL(crypto_shash_update);
103 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
105 struct crypto_shash *tfm = desc->tfm;
106 unsigned long alignmask = crypto_shash_alignmask(tfm);
107 struct shash_alg *shash = crypto_shash_alg(tfm);
108 unsigned int ds = crypto_shash_digestsize(tfm);
109 u8 buf[shash_align_buffer_size(ds, alignmask)]
110 __attribute__ ((aligned));
113 err = shash->final(desc, buf);
114 memcpy(out, buf, ds);
118 int crypto_shash_final(struct shash_desc *desc, u8 *out)
120 struct crypto_shash *tfm = desc->tfm;
121 struct shash_alg *shash = crypto_shash_alg(tfm);
122 unsigned long alignmask = crypto_shash_alignmask(tfm);
124 if ((unsigned long)out & alignmask)
125 return shash_final_unaligned(desc, out);
127 return shash->final(desc, out);
129 EXPORT_SYMBOL_GPL(crypto_shash_final);
131 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
132 unsigned int len, u8 *out)
134 return crypto_shash_update(desc, data, len) ?:
135 crypto_shash_final(desc, out);
138 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
139 unsigned int len, u8 *out)
141 struct crypto_shash *tfm = desc->tfm;
142 struct shash_alg *shash = crypto_shash_alg(tfm);
143 unsigned long alignmask = crypto_shash_alignmask(tfm);
145 if (((unsigned long)data | (unsigned long)out) & alignmask ||
147 return shash_finup_unaligned(desc, data, len, out);
149 return shash->finup(desc, data, len, out);
151 EXPORT_SYMBOL_GPL(crypto_shash_finup);
153 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
154 unsigned int len, u8 *out)
156 return crypto_shash_init(desc) ?:
157 crypto_shash_finup(desc, data, len, out);
160 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
161 unsigned int len, u8 *out)
163 struct crypto_shash *tfm = desc->tfm;
164 struct shash_alg *shash = crypto_shash_alg(tfm);
165 unsigned long alignmask = crypto_shash_alignmask(tfm);
167 if (((unsigned long)data | (unsigned long)out) & alignmask ||
169 return shash_digest_unaligned(desc, data, len, out);
171 return shash->digest(desc, data, len, out);
173 EXPORT_SYMBOL_GPL(crypto_shash_digest);
175 int crypto_shash_import(struct shash_desc *desc, const u8 *in)
177 struct crypto_shash *tfm = desc->tfm;
178 struct shash_alg *alg = crypto_shash_alg(tfm);
180 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
183 return alg->reinit(desc);
187 EXPORT_SYMBOL_GPL(crypto_shash_import);
189 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
192 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
194 return crypto_shash_setkey(*ctx, key, keylen);
197 static int shash_async_init(struct ahash_request *req)
199 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
200 struct shash_desc *desc = ahash_request_ctx(req);
203 desc->flags = req->base.flags;
205 return crypto_shash_init(desc);
208 static int shash_async_update(struct ahash_request *req)
210 struct shash_desc *desc = ahash_request_ctx(req);
211 struct crypto_hash_walk walk;
214 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
215 nbytes = crypto_hash_walk_done(&walk, nbytes))
216 nbytes = crypto_shash_update(desc, walk.data, nbytes);
221 static int shash_async_final(struct ahash_request *req)
223 return crypto_shash_final(ahash_request_ctx(req), req->result);
226 static int shash_async_digest(struct ahash_request *req)
228 struct scatterlist *sg = req->src;
229 unsigned int offset = sg->offset;
230 unsigned int nbytes = req->nbytes;
233 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
234 struct crypto_shash **ctx =
235 crypto_ahash_ctx(crypto_ahash_reqtfm(req));
236 struct shash_desc *desc = ahash_request_ctx(req);
240 desc->flags = req->base.flags;
242 data = crypto_kmap(sg_page(sg), 0);
243 err = crypto_shash_digest(desc, data + offset, nbytes,
245 crypto_kunmap(data, 0);
246 crypto_yield(desc->flags);
250 err = shash_async_init(req);
254 err = shash_async_update(req);
258 err = shash_async_final(req);
264 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
266 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
268 crypto_free_shash(*ctx);
271 static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
273 struct crypto_alg *calg = tfm->__crt_alg;
274 struct shash_alg *alg = __crypto_shash_alg(calg);
275 struct ahash_tfm *crt = &tfm->crt_ahash;
276 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
277 struct crypto_shash *shash;
279 if (!crypto_mod_get(calg))
282 shash = crypto_create_tfm(calg, &crypto_shash_type);
284 crypto_mod_put(calg);
285 return PTR_ERR(shash);
289 tfm->exit = crypto_exit_shash_ops_async;
291 crt->init = shash_async_init;
292 crt->update = shash_async_update;
293 crt->final = shash_async_final;
294 crt->digest = shash_async_digest;
295 crt->setkey = shash_async_setkey;
297 crt->digestsize = alg->digestsize;
298 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
303 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
306 struct shash_desc *desc = crypto_hash_ctx(tfm);
308 return crypto_shash_setkey(desc->tfm, key, keylen);
311 static int shash_compat_init(struct hash_desc *hdesc)
313 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
315 desc->flags = hdesc->flags;
317 return crypto_shash_init(desc);
320 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
323 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
324 struct crypto_hash_walk walk;
327 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
328 nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
329 nbytes = crypto_shash_update(desc, walk.data, nbytes);
334 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
336 return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out);
339 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
340 unsigned int nbytes, u8 *out)
342 unsigned int offset = sg->offset;
345 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
346 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
349 desc->flags = hdesc->flags;
351 data = crypto_kmap(sg_page(sg), 0);
352 err = crypto_shash_digest(desc, data + offset, nbytes, out);
353 crypto_kunmap(data, 0);
354 crypto_yield(desc->flags);
358 err = shash_compat_init(hdesc);
362 err = shash_compat_update(hdesc, sg, nbytes);
366 err = shash_compat_final(hdesc, out);
372 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
374 struct shash_desc *desc= crypto_tfm_ctx(tfm);
376 crypto_free_shash(desc->tfm);
379 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
381 struct hash_tfm *crt = &tfm->crt_hash;
382 struct crypto_alg *calg = tfm->__crt_alg;
383 struct shash_alg *alg = __crypto_shash_alg(calg);
384 struct shash_desc *desc = crypto_tfm_ctx(tfm);
385 struct crypto_shash *shash;
387 if (!crypto_mod_get(calg))
390 shash = crypto_create_tfm(calg, &crypto_shash_type);
392 crypto_mod_put(calg);
393 return PTR_ERR(shash);
397 tfm->exit = crypto_exit_shash_ops_compat;
399 crt->init = shash_compat_init;
400 crt->update = shash_compat_update;
401 crt->final = shash_compat_final;
402 crt->digest = shash_compat_digest;
403 crt->setkey = shash_compat_setkey;
405 crt->digestsize = alg->digestsize;
410 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
412 switch (mask & CRYPTO_ALG_TYPE_MASK) {
413 case CRYPTO_ALG_TYPE_HASH_MASK:
414 return crypto_init_shash_ops_compat(tfm);
415 case CRYPTO_ALG_TYPE_AHASH_MASK:
416 return crypto_init_shash_ops_async(tfm);
422 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
425 struct shash_alg *salg = __crypto_shash_alg(alg);
427 switch (mask & CRYPTO_ALG_TYPE_MASK) {
428 case CRYPTO_ALG_TYPE_HASH_MASK:
429 return sizeof(struct shash_desc) + salg->descsize;
430 case CRYPTO_ALG_TYPE_AHASH_MASK:
431 return sizeof(struct crypto_shash *);
437 static int crypto_shash_init_tfm(struct crypto_tfm *tfm,
438 const struct crypto_type *frontend)
443 static unsigned int crypto_shash_extsize(struct crypto_alg *alg,
444 const struct crypto_type *frontend)
446 return alg->cra_ctxsize;
449 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
450 __attribute__ ((unused));
451 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
453 struct shash_alg *salg = __crypto_shash_alg(alg);
455 seq_printf(m, "type : shash\n");
456 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
457 seq_printf(m, "digestsize : %u\n", salg->digestsize);
458 seq_printf(m, "descsize : %u\n", salg->descsize);
461 static const struct crypto_type crypto_shash_type = {
462 .ctxsize = crypto_shash_ctxsize,
463 .extsize = crypto_shash_extsize,
464 .init = crypto_init_shash_ops,
465 .init_tfm = crypto_shash_init_tfm,
466 #ifdef CONFIG_PROC_FS
467 .show = crypto_shash_show,
469 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
470 .maskset = CRYPTO_ALG_TYPE_MASK,
471 .type = CRYPTO_ALG_TYPE_SHASH,
472 .tfmsize = offsetof(struct crypto_shash, base),
475 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
478 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
480 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
482 static int shash_prepare_alg(struct shash_alg *alg)
484 struct crypto_alg *base = &alg->base;
486 if (alg->digestsize > PAGE_SIZE / 8 ||
487 alg->descsize > PAGE_SIZE / 8)
490 base->cra_type = &crypto_shash_type;
491 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
492 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
496 int crypto_register_shash(struct shash_alg *alg)
498 struct crypto_alg *base = &alg->base;
501 err = shash_prepare_alg(alg);
505 return crypto_register_alg(base);
507 EXPORT_SYMBOL_GPL(crypto_register_shash);
509 int crypto_unregister_shash(struct shash_alg *alg)
511 return crypto_unregister_alg(&alg->base);
513 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
515 int shash_register_instance(struct crypto_template *tmpl,
516 struct shash_instance *inst)
520 err = shash_prepare_alg(&inst->alg);
524 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
526 EXPORT_SYMBOL_GPL(shash_register_instance);
528 void shash_free_instance(struct crypto_instance *inst)
530 crypto_drop_spawn(crypto_instance_ctx(inst));
531 kfree(shash_instance(inst));
533 EXPORT_SYMBOL_GPL(shash_free_instance);
535 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
536 struct shash_alg *alg,
537 struct crypto_instance *inst)
539 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
542 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
544 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
546 struct crypto_alg *alg;
548 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
549 return IS_ERR(alg) ? ERR_CAST(alg) :
550 container_of(alg, struct shash_alg, base);
552 EXPORT_SYMBOL_GPL(shash_attr_alg);
554 MODULE_LICENSE("GPL");
555 MODULE_DESCRIPTION("Synchronous cryptographic hash type");