1 // SPDX-License-Identifier: GPL-2.0
3 * ESSIV skcipher and aead template for block encryption
5 * This template encapsulates the ESSIV IV generation algorithm used by
6 * dm-crypt and fscrypt, which converts the initial vector for the skcipher
7 * used for block encryption, by encrypting it using the hash of the
8 * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
9 * number in LE representation zero-padded to the size of the IV, but this
10 * is not assumed by this driver.
12 * The typical use of this template is to instantiate the skcipher
13 * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
14 * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
15 * also permits ESSIV to be used in combination with the authenc template,
16 * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
17 * we need to instantiate an aead that accepts the same special key format
18 * as the authenc template, and deals with the way the encrypted IV is
19 * embedded into the AAD area of the aead request. This means the AEAD
20 * flavor produced by this template is tightly coupled to the way dm-crypt
23 * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
26 * adiantum length-preserving encryption mode
28 * Copyright 2018 Google LLC
31 #include <crypto/authenc.h>
32 #include <crypto/internal/aead.h>
33 #include <crypto/internal/hash.h>
34 #include <crypto/internal/skcipher.h>
35 #include <crypto/scatterwalk.h>
36 #include <linux/module.h>
40 struct essiv_instance_ctx {
42 struct crypto_skcipher_spawn skcipher_spawn;
43 struct crypto_aead_spawn aead_spawn;
45 char essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
46 char shash_driver_name[CRYPTO_MAX_ALG_NAME];
49 struct essiv_tfm_ctx {
51 struct crypto_skcipher *skcipher;
52 struct crypto_aead *aead;
54 struct crypto_cipher *essiv_cipher;
55 struct crypto_shash *hash;
59 struct essiv_aead_request_ctx {
60 struct scatterlist sg[4];
62 struct aead_request aead_req;
65 static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
66 const u8 *key, unsigned int keylen)
68 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
69 SHASH_DESC_ON_STACK(desc, tctx->hash);
70 u8 salt[HASH_MAX_DIGESTSIZE];
73 crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
74 crypto_skcipher_set_flags(tctx->u.skcipher,
75 crypto_skcipher_get_flags(tfm) &
77 err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
78 crypto_skcipher_set_flags(tfm,
79 crypto_skcipher_get_flags(tctx->u.skcipher) &
84 desc->tfm = tctx->hash;
85 err = crypto_shash_digest(desc, key, keylen, salt);
89 crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
90 crypto_cipher_set_flags(tctx->essiv_cipher,
91 crypto_skcipher_get_flags(tfm) &
93 err = crypto_cipher_setkey(tctx->essiv_cipher, salt,
94 crypto_shash_digestsize(tctx->hash));
95 crypto_skcipher_set_flags(tfm,
96 crypto_cipher_get_flags(tctx->essiv_cipher) &
102 static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
105 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
106 SHASH_DESC_ON_STACK(desc, tctx->hash);
107 struct crypto_authenc_keys keys;
108 u8 salt[HASH_MAX_DIGESTSIZE];
111 crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
112 crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
113 CRYPTO_TFM_REQ_MASK);
114 err = crypto_aead_setkey(tctx->u.aead, key, keylen);
115 crypto_aead_set_flags(tfm, crypto_aead_get_flags(tctx->u.aead) &
116 CRYPTO_TFM_RES_MASK);
120 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
123 desc->tfm = tctx->hash;
124 err = crypto_shash_init(desc) ?:
125 crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
126 crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
130 crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
131 crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
132 CRYPTO_TFM_REQ_MASK);
133 err = crypto_cipher_setkey(tctx->essiv_cipher, salt,
134 crypto_shash_digestsize(tctx->hash));
135 crypto_aead_set_flags(tfm, crypto_cipher_get_flags(tctx->essiv_cipher) &
136 CRYPTO_TFM_RES_MASK);
141 static int essiv_aead_setauthsize(struct crypto_aead *tfm,
142 unsigned int authsize)
144 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
146 return crypto_aead_setauthsize(tctx->u.aead, authsize);
149 static void essiv_skcipher_done(struct crypto_async_request *areq, int err)
151 struct skcipher_request *req = areq->data;
153 skcipher_request_complete(req, err);
156 static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
158 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
159 const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
160 struct skcipher_request *subreq = skcipher_request_ctx(req);
162 crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
164 skcipher_request_set_tfm(subreq, tctx->u.skcipher);
165 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
167 skcipher_request_set_callback(subreq, skcipher_request_flags(req),
168 essiv_skcipher_done, req);
170 return enc ? crypto_skcipher_encrypt(subreq) :
171 crypto_skcipher_decrypt(subreq);
174 static int essiv_skcipher_encrypt(struct skcipher_request *req)
176 return essiv_skcipher_crypt(req, true);
179 static int essiv_skcipher_decrypt(struct skcipher_request *req)
181 return essiv_skcipher_crypt(req, false);
184 static void essiv_aead_done(struct crypto_async_request *areq, int err)
186 struct aead_request *req = areq->data;
187 struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
190 aead_request_complete(req, err);
193 static int essiv_aead_crypt(struct aead_request *req, bool enc)
195 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
196 const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
197 struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
198 struct aead_request *subreq = &rctx->aead_req;
199 struct scatterlist *src = req->src;
202 crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
205 * dm-crypt embeds the sector number and the IV in the AAD region, so
206 * we have to copy the converted IV into the right scatterlist before
210 if (req->src == req->dst || !enc) {
211 scatterwalk_map_and_copy(req->iv, req->dst,
212 req->assoclen - crypto_aead_ivsize(tfm),
213 crypto_aead_ivsize(tfm), 1);
215 u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
216 int ivsize = crypto_aead_ivsize(tfm);
217 int ssize = req->assoclen - ivsize;
218 struct scatterlist *sg;
224 nents = sg_nents_for_len(req->src, ssize);
228 memcpy(iv, req->iv, ivsize);
229 sg_init_table(rctx->sg, 4);
231 if (unlikely(nents > 1)) {
233 * This is a case that rarely occurs in practice, but
234 * for correctness, we have to deal with it nonetheless.
236 rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
240 scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
242 sg_set_buf(rctx->sg, rctx->assoc, ssize);
244 sg_set_page(rctx->sg, sg_page(req->src), ssize,
248 sg_set_buf(rctx->sg + 1, iv, ivsize);
249 sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
250 if (sg != rctx->sg + 2)
251 sg_chain(rctx->sg, 3, sg);
256 aead_request_set_tfm(subreq, tctx->u.aead);
257 aead_request_set_ad(subreq, req->assoclen);
258 aead_request_set_callback(subreq, aead_request_flags(req),
259 essiv_aead_done, req);
260 aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
262 err = enc ? crypto_aead_encrypt(subreq) :
263 crypto_aead_decrypt(subreq);
265 if (rctx->assoc && err != -EINPROGRESS)
270 static int essiv_aead_encrypt(struct aead_request *req)
272 return essiv_aead_crypt(req, true);
275 static int essiv_aead_decrypt(struct aead_request *req)
277 return essiv_aead_crypt(req, false);
280 static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
281 struct essiv_tfm_ctx *tctx)
283 struct crypto_cipher *essiv_cipher;
284 struct crypto_shash *hash;
287 essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
288 if (IS_ERR(essiv_cipher))
289 return PTR_ERR(essiv_cipher);
291 hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
294 goto err_free_essiv_cipher;
297 tctx->essiv_cipher = essiv_cipher;
302 err_free_essiv_cipher:
303 crypto_free_cipher(essiv_cipher);
307 static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
309 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
310 struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
311 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
312 struct crypto_skcipher *skcipher;
315 skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
316 if (IS_ERR(skcipher))
317 return PTR_ERR(skcipher);
319 crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
320 crypto_skcipher_reqsize(skcipher));
322 err = essiv_init_tfm(ictx, tctx);
324 crypto_free_skcipher(skcipher);
328 tctx->u.skcipher = skcipher;
332 static int essiv_aead_init_tfm(struct crypto_aead *tfm)
334 struct aead_instance *inst = aead_alg_instance(tfm);
335 struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
336 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
337 struct crypto_aead *aead;
338 unsigned int subreq_size;
341 BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
342 sizeof(struct essiv_aead_request_ctx));
344 aead = crypto_spawn_aead(&ictx->u.aead_spawn);
346 return PTR_ERR(aead);
348 subreq_size = FIELD_SIZEOF(struct essiv_aead_request_ctx, aead_req) +
349 crypto_aead_reqsize(aead);
351 tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
353 crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
355 err = essiv_init_tfm(ictx, tctx);
357 crypto_free_aead(aead);
365 static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
367 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
369 crypto_free_skcipher(tctx->u.skcipher);
370 crypto_free_cipher(tctx->essiv_cipher);
371 crypto_free_shash(tctx->hash);
374 static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
376 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
378 crypto_free_aead(tctx->u.aead);
379 crypto_free_cipher(tctx->essiv_cipher);
380 crypto_free_shash(tctx->hash);
383 static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
385 struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
387 crypto_drop_skcipher(&ictx->u.skcipher_spawn);
391 static void essiv_aead_free_instance(struct aead_instance *inst)
393 struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
395 crypto_drop_aead(&ictx->u.aead_spawn);
399 static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
404 /* find the last opening parens */
405 p = strrchr(cra_name, '(');
409 /* find the first closing parens in the tail of the string */
415 if (len >= CRYPTO_MAX_ALG_NAME)
418 memcpy(essiv_cipher_name, p, len);
419 essiv_cipher_name[len] = '\0';
423 static bool essiv_supported_algorithms(const char *essiv_cipher_name,
424 struct shash_alg *hash_alg,
427 struct crypto_alg *alg;
430 alg = crypto_alg_mod_lookup(essiv_cipher_name,
431 CRYPTO_ALG_TYPE_CIPHER,
432 CRYPTO_ALG_TYPE_MASK);
436 if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
437 hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
440 if (ivsize != alg->cra_blocksize)
443 if (crypto_shash_alg_needs_key(hash_alg))
453 static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
455 struct crypto_attr_type *algt;
456 const char *inner_cipher_name;
457 const char *shash_name;
458 struct skcipher_instance *skcipher_inst = NULL;
459 struct aead_instance *aead_inst = NULL;
460 struct crypto_instance *inst;
461 struct crypto_alg *base, *block_base;
462 struct essiv_instance_ctx *ictx;
463 struct skcipher_alg *skcipher_alg = NULL;
464 struct aead_alg *aead_alg = NULL;
465 struct crypto_alg *_hash_alg;
466 struct shash_alg *hash_alg;
471 algt = crypto_get_attr_type(tb);
473 return PTR_ERR(algt);
475 inner_cipher_name = crypto_attr_alg_name(tb[1]);
476 if (IS_ERR(inner_cipher_name))
477 return PTR_ERR(inner_cipher_name);
479 shash_name = crypto_attr_alg_name(tb[2]);
480 if (IS_ERR(shash_name))
481 return PTR_ERR(shash_name);
483 type = algt->type & algt->mask;
486 case CRYPTO_ALG_TYPE_SKCIPHER:
487 skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
488 sizeof(*ictx), GFP_KERNEL);
491 inst = skcipher_crypto_instance(skcipher_inst);
492 base = &skcipher_inst->alg.base;
493 ictx = crypto_instance_ctx(inst);
495 /* Symmetric cipher, e.g., "cbc(aes)" */
496 crypto_set_skcipher_spawn(&ictx->u.skcipher_spawn, inst);
497 err = crypto_grab_skcipher(&ictx->u.skcipher_spawn,
498 inner_cipher_name, 0,
499 crypto_requires_sync(algt->type,
503 skcipher_alg = crypto_spawn_skcipher_alg(&ictx->u.skcipher_spawn);
504 block_base = &skcipher_alg->base;
505 ivsize = crypto_skcipher_alg_ivsize(skcipher_alg);
508 case CRYPTO_ALG_TYPE_AEAD:
509 aead_inst = kzalloc(sizeof(*aead_inst) +
510 sizeof(*ictx), GFP_KERNEL);
513 inst = aead_crypto_instance(aead_inst);
514 base = &aead_inst->alg.base;
515 ictx = crypto_instance_ctx(inst);
517 /* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
518 crypto_set_aead_spawn(&ictx->u.aead_spawn, inst);
519 err = crypto_grab_aead(&ictx->u.aead_spawn,
520 inner_cipher_name, 0,
521 crypto_requires_sync(algt->type,
525 aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
526 block_base = &aead_alg->base;
527 if (!strstarts(block_base->cra_name, "authenc(")) {
528 pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
530 goto out_drop_skcipher;
532 ivsize = aead_alg->ivsize;
539 if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
540 pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
542 goto out_drop_skcipher;
545 /* Synchronous hash, e.g., "sha256" */
546 _hash_alg = crypto_alg_mod_lookup(shash_name,
547 CRYPTO_ALG_TYPE_SHASH,
548 CRYPTO_ALG_TYPE_MASK);
549 if (IS_ERR(_hash_alg)) {
550 err = PTR_ERR(_hash_alg);
551 goto out_drop_skcipher;
553 hash_alg = __crypto_shash_alg(_hash_alg);
555 /* Check the set of algorithms */
556 if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
558 pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
559 block_base->cra_name, hash_alg->base.cra_name);
564 /* record the driver name so we can instantiate this exact algo later */
565 strlcpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name,
566 CRYPTO_MAX_ALG_NAME);
568 /* Instance fields */
571 if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
572 "essiv(%s,%s)", block_base->cra_name,
573 hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
575 if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
576 "essiv(%s,%s)", block_base->cra_driver_name,
577 hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
580 base->cra_flags = block_base->cra_flags & CRYPTO_ALG_ASYNC;
581 base->cra_blocksize = block_base->cra_blocksize;
582 base->cra_ctxsize = sizeof(struct essiv_tfm_ctx);
583 base->cra_alignmask = block_base->cra_alignmask;
584 base->cra_priority = block_base->cra_priority;
586 if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
587 skcipher_inst->alg.setkey = essiv_skcipher_setkey;
588 skcipher_inst->alg.encrypt = essiv_skcipher_encrypt;
589 skcipher_inst->alg.decrypt = essiv_skcipher_decrypt;
590 skcipher_inst->alg.init = essiv_skcipher_init_tfm;
591 skcipher_inst->alg.exit = essiv_skcipher_exit_tfm;
593 skcipher_inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(skcipher_alg);
594 skcipher_inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(skcipher_alg);
595 skcipher_inst->alg.ivsize = ivsize;
596 skcipher_inst->alg.chunksize = crypto_skcipher_alg_chunksize(skcipher_alg);
597 skcipher_inst->alg.walksize = crypto_skcipher_alg_walksize(skcipher_alg);
599 skcipher_inst->free = essiv_skcipher_free_instance;
601 err = skcipher_register_instance(tmpl, skcipher_inst);
603 aead_inst->alg.setkey = essiv_aead_setkey;
604 aead_inst->alg.setauthsize = essiv_aead_setauthsize;
605 aead_inst->alg.encrypt = essiv_aead_encrypt;
606 aead_inst->alg.decrypt = essiv_aead_decrypt;
607 aead_inst->alg.init = essiv_aead_init_tfm;
608 aead_inst->alg.exit = essiv_aead_exit_tfm;
610 aead_inst->alg.ivsize = ivsize;
611 aead_inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(aead_alg);
612 aead_inst->alg.chunksize = crypto_aead_alg_chunksize(aead_alg);
614 aead_inst->free = essiv_aead_free_instance;
616 err = aead_register_instance(tmpl, aead_inst);
622 crypto_mod_put(_hash_alg);
626 crypto_mod_put(_hash_alg);
628 if (type == CRYPTO_ALG_TYPE_SKCIPHER)
629 crypto_drop_skcipher(&ictx->u.skcipher_spawn);
631 crypto_drop_aead(&ictx->u.aead_spawn);
633 kfree(skcipher_inst);
638 /* essiv(cipher_name, shash_name) */
639 static struct crypto_template essiv_tmpl = {
641 .create = essiv_create,
642 .module = THIS_MODULE,
645 static int __init essiv_module_init(void)
647 return crypto_register_template(&essiv_tmpl);
650 static void __exit essiv_module_exit(void)
652 crypto_unregister_template(&essiv_tmpl);
655 subsys_initcall(essiv_module_init);
656 module_exit(essiv_module_exit);
658 MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
659 MODULE_LICENSE("GPL v2");
660 MODULE_ALIAS_CRYPTO("essiv");