Commit | Line | Data |
---|---|---|
7b5a080b HX |
1 | /* |
2 | * Synchronous Cryptographic Hash operations. | |
3 | * | |
4 | * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify it | |
7 | * under the terms of the GNU General Public License as published by the Free | |
8 | * Software Foundation; either version 2 of the License, or (at your option) | |
9 | * any later version. | |
10 | * | |
11 | */ | |
12 | ||
3b2f6df0 | 13 | #include <crypto/scatterwalk.h> |
7b5a080b HX |
14 | #include <crypto/internal/hash.h> |
15 | #include <linux/err.h> | |
16 | #include <linux/kernel.h> | |
17 | #include <linux/module.h> | |
18 | #include <linux/slab.h> | |
19 | #include <linux/seq_file.h> | |
20 | ||
3b2f6df0 HX |
21 | static const struct crypto_type crypto_shash_type; |
22 | ||
7b5a080b HX |
23 | static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm) |
24 | { | |
25 | return container_of(tfm, struct crypto_shash, base); | |
26 | } | |
27 | ||
3b2f6df0 HX |
28 | #include "internal.h" |
29 | ||
7b5a080b HX |
30 | static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, |
31 | unsigned int keylen) | |
32 | { | |
33 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
34 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
35 | unsigned long absize; | |
36 | u8 *buffer, *alignbuffer; | |
37 | int err; | |
38 | ||
39 | absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1)); | |
40 | buffer = kmalloc(absize, GFP_KERNEL); | |
41 | if (!buffer) | |
42 | return -ENOMEM; | |
43 | ||
44 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | |
45 | memcpy(alignbuffer, key, keylen); | |
46 | err = shash->setkey(tfm, alignbuffer, keylen); | |
47 | memset(alignbuffer, 0, keylen); | |
48 | kfree(buffer); | |
49 | return err; | |
50 | } | |
51 | ||
52 | int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, | |
53 | unsigned int keylen) | |
54 | { | |
55 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
56 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
57 | ||
58 | if ((unsigned long)key & alignmask) | |
59 | return shash_setkey_unaligned(tfm, key, keylen); | |
60 | ||
61 | return shash->setkey(tfm, key, keylen); | |
62 | } | |
63 | EXPORT_SYMBOL_GPL(crypto_shash_setkey); | |
64 | ||
65 | static inline unsigned int shash_align_buffer_size(unsigned len, | |
66 | unsigned long mask) | |
67 | { | |
68 | return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1)); | |
69 | } | |
70 | ||
71 | static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, | |
72 | unsigned int len) | |
73 | { | |
74 | struct crypto_shash *tfm = desc->tfm; | |
75 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
76 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
77 | unsigned int unaligned_len = alignmask + 1 - | |
78 | ((unsigned long)data & alignmask); | |
79 | u8 buf[shash_align_buffer_size(unaligned_len, alignmask)] | |
80 | __attribute__ ((aligned)); | |
81 | ||
82 | memcpy(buf, data, unaligned_len); | |
83 | ||
84 | return shash->update(desc, buf, unaligned_len) ?: | |
85 | shash->update(desc, data + unaligned_len, len - unaligned_len); | |
86 | } | |
87 | ||
88 | int crypto_shash_update(struct shash_desc *desc, const u8 *data, | |
89 | unsigned int len) | |
90 | { | |
91 | struct crypto_shash *tfm = desc->tfm; | |
92 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
93 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
94 | ||
95 | if ((unsigned long)data & alignmask) | |
96 | return shash_update_unaligned(desc, data, len); | |
97 | ||
98 | return shash->update(desc, data, len); | |
99 | } | |
100 | EXPORT_SYMBOL_GPL(crypto_shash_update); | |
101 | ||
102 | static int shash_final_unaligned(struct shash_desc *desc, u8 *out) | |
103 | { | |
104 | struct crypto_shash *tfm = desc->tfm; | |
105 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
106 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
107 | unsigned int ds = crypto_shash_digestsize(tfm); | |
108 | u8 buf[shash_align_buffer_size(ds, alignmask)] | |
109 | __attribute__ ((aligned)); | |
110 | int err; | |
111 | ||
112 | err = shash->final(desc, buf); | |
113 | memcpy(out, buf, ds); | |
114 | return err; | |
115 | } | |
116 | ||
117 | int crypto_shash_final(struct shash_desc *desc, u8 *out) | |
118 | { | |
119 | struct crypto_shash *tfm = desc->tfm; | |
120 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
121 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
122 | ||
123 | if ((unsigned long)out & alignmask) | |
124 | return shash_final_unaligned(desc, out); | |
125 | ||
126 | return shash->final(desc, out); | |
127 | } | |
128 | EXPORT_SYMBOL_GPL(crypto_shash_final); | |
129 | ||
130 | static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, | |
131 | unsigned int len, u8 *out) | |
132 | { | |
133 | return crypto_shash_update(desc, data, len) ?: | |
134 | crypto_shash_final(desc, out); | |
135 | } | |
136 | ||
137 | int crypto_shash_finup(struct shash_desc *desc, const u8 *data, | |
138 | unsigned int len, u8 *out) | |
139 | { | |
140 | struct crypto_shash *tfm = desc->tfm; | |
141 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
142 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
143 | ||
144 | if (((unsigned long)data | (unsigned long)out) & alignmask || | |
145 | !shash->finup) | |
146 | return shash_finup_unaligned(desc, data, len, out); | |
147 | ||
148 | return shash->finup(desc, data, len, out); | |
149 | } | |
150 | EXPORT_SYMBOL_GPL(crypto_shash_finup); | |
151 | ||
152 | static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, | |
153 | unsigned int len, u8 *out) | |
154 | { | |
155 | return crypto_shash_init(desc) ?: | |
156 | crypto_shash_update(desc, data, len) ?: | |
157 | crypto_shash_final(desc, out); | |
158 | } | |
159 | ||
160 | int crypto_shash_digest(struct shash_desc *desc, const u8 *data, | |
161 | unsigned int len, u8 *out) | |
162 | { | |
163 | struct crypto_shash *tfm = desc->tfm; | |
164 | struct shash_alg *shash = crypto_shash_alg(tfm); | |
165 | unsigned long alignmask = crypto_shash_alignmask(tfm); | |
166 | ||
167 | if (((unsigned long)data | (unsigned long)out) & alignmask || | |
168 | !shash->digest) | |
169 | return shash_digest_unaligned(desc, data, len, out); | |
170 | ||
171 | return shash->digest(desc, data, len, out); | |
172 | } | |
173 | EXPORT_SYMBOL_GPL(crypto_shash_digest); | |
174 | ||
dec8b786 HX |
175 | int crypto_shash_import(struct shash_desc *desc, const u8 *in) |
176 | { | |
177 | struct crypto_shash *tfm = desc->tfm; | |
178 | struct shash_alg *alg = crypto_shash_alg(tfm); | |
179 | ||
180 | memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm)); | |
181 | ||
182 | if (alg->reinit) | |
183 | alg->reinit(desc); | |
184 | ||
185 | return 0; | |
186 | } | |
187 | EXPORT_SYMBOL_GPL(crypto_shash_import); | |
188 | ||
3b2f6df0 HX |
189 | static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, |
190 | unsigned int keylen) | |
191 | { | |
192 | struct crypto_shash **ctx = crypto_ahash_ctx(tfm); | |
193 | ||
194 | return crypto_shash_setkey(*ctx, key, keylen); | |
195 | } | |
196 | ||
197 | static int shash_async_init(struct ahash_request *req) | |
198 | { | |
199 | struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | |
200 | struct shash_desc *desc = ahash_request_ctx(req); | |
201 | ||
202 | desc->tfm = *ctx; | |
203 | desc->flags = req->base.flags; | |
204 | ||
205 | return crypto_shash_init(desc); | |
206 | } | |
207 | ||
208 | static int shash_async_update(struct ahash_request *req) | |
209 | { | |
210 | struct shash_desc *desc = ahash_request_ctx(req); | |
211 | struct crypto_hash_walk walk; | |
212 | int nbytes; | |
213 | ||
214 | for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; | |
215 | nbytes = crypto_hash_walk_done(&walk, nbytes)) | |
216 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | |
217 | ||
218 | return nbytes; | |
219 | } | |
220 | ||
221 | static int shash_async_final(struct ahash_request *req) | |
222 | { | |
223 | return crypto_shash_final(ahash_request_ctx(req), req->result); | |
224 | } | |
225 | ||
226 | static int shash_async_digest(struct ahash_request *req) | |
227 | { | |
228 | struct scatterlist *sg = req->src; | |
229 | unsigned int offset = sg->offset; | |
230 | unsigned int nbytes = req->nbytes; | |
231 | int err; | |
232 | ||
233 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | |
234 | struct crypto_shash **ctx = | |
235 | crypto_ahash_ctx(crypto_ahash_reqtfm(req)); | |
236 | struct shash_desc *desc = ahash_request_ctx(req); | |
237 | void *data; | |
238 | ||
239 | desc->tfm = *ctx; | |
240 | desc->flags = req->base.flags; | |
241 | ||
242 | data = crypto_kmap(sg_page(sg), 0); | |
243 | err = crypto_shash_digest(desc, data + offset, nbytes, | |
244 | req->result); | |
245 | crypto_kunmap(data, 0); | |
246 | crypto_yield(desc->flags); | |
247 | goto out; | |
248 | } | |
249 | ||
250 | err = shash_async_init(req); | |
251 | if (err) | |
252 | goto out; | |
253 | ||
254 | err = shash_async_update(req); | |
255 | if (err) | |
256 | goto out; | |
257 | ||
258 | err = shash_async_final(req); | |
259 | ||
260 | out: | |
261 | return err; | |
262 | } | |
263 | ||
264 | static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) | |
265 | { | |
266 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | |
267 | ||
268 | crypto_free_shash(*ctx); | |
269 | } | |
270 | ||
271 | static int crypto_init_shash_ops_async(struct crypto_tfm *tfm) | |
272 | { | |
273 | struct crypto_alg *calg = tfm->__crt_alg; | |
274 | struct shash_alg *alg = __crypto_shash_alg(calg); | |
275 | struct ahash_tfm *crt = &tfm->crt_ahash; | |
276 | struct crypto_shash **ctx = crypto_tfm_ctx(tfm); | |
277 | struct crypto_shash *shash; | |
278 | ||
279 | if (!crypto_mod_get(calg)) | |
280 | return -EAGAIN; | |
281 | ||
282 | shash = __crypto_shash_cast(crypto_create_tfm( | |
283 | calg, &crypto_shash_type)); | |
284 | if (IS_ERR(shash)) { | |
285 | crypto_mod_put(calg); | |
286 | return PTR_ERR(shash); | |
287 | } | |
288 | ||
289 | *ctx = shash; | |
290 | tfm->exit = crypto_exit_shash_ops_async; | |
291 | ||
292 | crt->init = shash_async_init; | |
293 | crt->update = shash_async_update; | |
294 | crt->final = shash_async_final; | |
295 | crt->digest = shash_async_digest; | |
296 | crt->setkey = shash_async_setkey; | |
297 | ||
298 | crt->digestsize = alg->digestsize; | |
299 | crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); | |
300 | ||
301 | return 0; | |
302 | } | |
303 | ||
5f7082ed HX |
304 | static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, |
305 | unsigned int keylen) | |
306 | { | |
307 | struct shash_desc *desc = crypto_hash_ctx(tfm); | |
308 | ||
309 | return crypto_shash_setkey(desc->tfm, key, keylen); | |
310 | } | |
311 | ||
312 | static int shash_compat_init(struct hash_desc *hdesc) | |
313 | { | |
314 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | |
315 | ||
316 | desc->flags = hdesc->flags; | |
317 | ||
318 | return crypto_shash_init(desc); | |
319 | } | |
320 | ||
321 | static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, | |
322 | unsigned int len) | |
323 | { | |
324 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | |
325 | struct crypto_hash_walk walk; | |
326 | int nbytes; | |
327 | ||
328 | for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); | |
329 | nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes)) | |
330 | nbytes = crypto_shash_update(desc, walk.data, nbytes); | |
331 | ||
332 | return nbytes; | |
333 | } | |
334 | ||
335 | static int shash_compat_final(struct hash_desc *hdesc, u8 *out) | |
336 | { | |
337 | return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out); | |
338 | } | |
339 | ||
340 | static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, | |
341 | unsigned int nbytes, u8 *out) | |
342 | { | |
343 | unsigned int offset = sg->offset; | |
344 | int err; | |
345 | ||
346 | if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { | |
347 | struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm); | |
348 | void *data; | |
349 | ||
350 | desc->flags = hdesc->flags; | |
351 | ||
352 | data = crypto_kmap(sg_page(sg), 0); | |
353 | err = crypto_shash_digest(desc, data + offset, nbytes, out); | |
354 | crypto_kunmap(data, 0); | |
355 | crypto_yield(desc->flags); | |
356 | goto out; | |
357 | } | |
358 | ||
359 | err = shash_compat_init(hdesc); | |
360 | if (err) | |
361 | goto out; | |
362 | ||
363 | err = shash_compat_update(hdesc, sg, nbytes); | |
364 | if (err) | |
365 | goto out; | |
366 | ||
367 | err = shash_compat_final(hdesc, out); | |
368 | ||
369 | out: | |
370 | return err; | |
371 | } | |
372 | ||
373 | static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) | |
374 | { | |
375 | struct shash_desc *desc= crypto_tfm_ctx(tfm); | |
376 | ||
377 | crypto_free_shash(desc->tfm); | |
378 | } | |
379 | ||
380 | static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) | |
381 | { | |
382 | struct hash_tfm *crt = &tfm->crt_hash; | |
383 | struct crypto_alg *calg = tfm->__crt_alg; | |
384 | struct shash_alg *alg = __crypto_shash_alg(calg); | |
385 | struct shash_desc *desc = crypto_tfm_ctx(tfm); | |
386 | struct crypto_shash *shash; | |
387 | ||
388 | shash = __crypto_shash_cast(crypto_create_tfm( | |
389 | calg, &crypto_shash_type)); | |
390 | if (IS_ERR(shash)) | |
391 | return PTR_ERR(shash); | |
392 | ||
393 | desc->tfm = shash; | |
394 | tfm->exit = crypto_exit_shash_ops_compat; | |
395 | ||
396 | crt->init = shash_compat_init; | |
397 | crt->update = shash_compat_update; | |
398 | crt->final = shash_compat_final; | |
399 | crt->digest = shash_compat_digest; | |
400 | crt->setkey = shash_compat_setkey; | |
401 | ||
402 | crt->digestsize = alg->digestsize; | |
403 | ||
404 | return 0; | |
405 | } | |
406 | ||
3b2f6df0 HX |
407 | static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) |
408 | { | |
409 | switch (mask & CRYPTO_ALG_TYPE_MASK) { | |
5f7082ed HX |
410 | case CRYPTO_ALG_TYPE_HASH_MASK: |
411 | return crypto_init_shash_ops_compat(tfm); | |
3b2f6df0 HX |
412 | case CRYPTO_ALG_TYPE_AHASH_MASK: |
413 | return crypto_init_shash_ops_async(tfm); | |
414 | } | |
415 | ||
416 | return -EINVAL; | |
417 | } | |
418 | ||
419 | static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, | |
420 | u32 mask) | |
421 | { | |
5f7082ed HX |
422 | struct shash_alg *salg = __crypto_shash_alg(alg); |
423 | ||
3b2f6df0 | 424 | switch (mask & CRYPTO_ALG_TYPE_MASK) { |
5f7082ed HX |
425 | case CRYPTO_ALG_TYPE_HASH_MASK: |
426 | return sizeof(struct shash_desc) + salg->descsize; | |
3b2f6df0 HX |
427 | case CRYPTO_ALG_TYPE_AHASH_MASK: |
428 | return sizeof(struct crypto_shash *); | |
429 | } | |
430 | ||
431 | return 0; | |
432 | } | |
433 | ||
7b5a080b HX |
434 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm, |
435 | const struct crypto_type *frontend) | |
436 | { | |
437 | if (frontend->type != CRYPTO_ALG_TYPE_SHASH) | |
438 | return -EINVAL; | |
439 | return 0; | |
440 | } | |
441 | ||
442 | static unsigned int crypto_shash_extsize(struct crypto_alg *alg, | |
443 | const struct crypto_type *frontend) | |
444 | { | |
445 | return alg->cra_ctxsize; | |
446 | } | |
447 | ||
448 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | |
449 | __attribute__ ((unused)); | |
450 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | |
451 | { | |
452 | struct shash_alg *salg = __crypto_shash_alg(alg); | |
453 | ||
454 | seq_printf(m, "type : shash\n"); | |
455 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); | |
456 | seq_printf(m, "digestsize : %u\n", salg->digestsize); | |
457 | seq_printf(m, "descsize : %u\n", salg->descsize); | |
458 | } | |
459 | ||
460 | static const struct crypto_type crypto_shash_type = { | |
3b2f6df0 | 461 | .ctxsize = crypto_shash_ctxsize, |
7b5a080b | 462 | .extsize = crypto_shash_extsize, |
3b2f6df0 | 463 | .init = crypto_init_shash_ops, |
7b5a080b HX |
464 | .init_tfm = crypto_shash_init_tfm, |
465 | #ifdef CONFIG_PROC_FS | |
466 | .show = crypto_shash_show, | |
467 | #endif | |
468 | .maskclear = ~CRYPTO_ALG_TYPE_MASK, | |
469 | .maskset = CRYPTO_ALG_TYPE_MASK, | |
470 | .type = CRYPTO_ALG_TYPE_SHASH, | |
471 | .tfmsize = offsetof(struct crypto_shash, base), | |
472 | }; | |
473 | ||
474 | struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, | |
475 | u32 mask) | |
476 | { | |
477 | return __crypto_shash_cast( | |
478 | crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask)); | |
479 | } | |
480 | EXPORT_SYMBOL_GPL(crypto_alloc_shash); | |
481 | ||
482 | int crypto_register_shash(struct shash_alg *alg) | |
483 | { | |
484 | struct crypto_alg *base = &alg->base; | |
485 | ||
486 | if (alg->digestsize > PAGE_SIZE / 8 || | |
487 | alg->descsize > PAGE_SIZE / 8) | |
488 | return -EINVAL; | |
489 | ||
490 | base->cra_type = &crypto_shash_type; | |
491 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; | |
492 | base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; | |
493 | ||
494 | return crypto_register_alg(base); | |
495 | } | |
496 | EXPORT_SYMBOL_GPL(crypto_register_shash); | |
497 | ||
498 | int crypto_unregister_shash(struct shash_alg *alg) | |
499 | { | |
500 | return crypto_unregister_alg(&alg->base); | |
501 | } | |
502 | EXPORT_SYMBOL_GPL(crypto_unregister_shash); | |
503 | ||
504 | MODULE_LICENSE("GPL"); | |
505 | MODULE_DESCRIPTION("Synchronous cryptographic hash type"); |