2 * Accelerated GHASH implementation with ARMv8 PMULL instructions.
4 * Copyright (C) 2014 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation.
13 #include <asm/unaligned.h>
14 #include <crypto/aes.h>
15 #include <crypto/algapi.h>
16 #include <crypto/b128ops.h>
17 #include <crypto/gf128mul.h>
18 #include <crypto/internal/aead.h>
19 #include <crypto/internal/hash.h>
20 #include <crypto/internal/simd.h>
21 #include <crypto/internal/skcipher.h>
22 #include <crypto/scatterwalk.h>
23 #include <linux/cpufeature.h>
24 #include <linux/crypto.h>
25 #include <linux/module.h>
27 MODULE_DESCRIPTION("GHASH and AES-GCM using ARMv8 Crypto Extensions");
28 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
29 MODULE_LICENSE("GPL v2");
30 MODULE_ALIAS_CRYPTO("ghash");
32 #define GHASH_BLOCK_SIZE 16
33 #define GHASH_DIGEST_SIZE 16
34 #define GCM_IV_SIZE 12
45 struct ghash_desc_ctx {
46 u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
47 u8 buf[GHASH_BLOCK_SIZE];
52 struct crypto_aes_ctx aes_key;
53 struct ghash_key ghash_key;
56 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
57 struct ghash_key const *k,
60 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
61 struct ghash_key const *k,
64 asmlinkage void pmull_gcm_encrypt(int blocks, u64 dg[], u8 dst[],
65 const u8 src[], struct ghash_key const *k,
66 u8 ctr[], u32 const rk[], int rounds,
69 asmlinkage void pmull_gcm_decrypt(int blocks, u64 dg[], u8 dst[],
70 const u8 src[], struct ghash_key const *k,
71 u8 ctr[], u32 const rk[], int rounds);
73 asmlinkage void pmull_gcm_encrypt_block(u8 dst[], u8 const src[],
74 u32 const rk[], int rounds);
76 asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
78 static int ghash_init(struct shash_desc *desc)
80 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
82 *ctx = (struct ghash_desc_ctx){};
86 static void ghash_do_update(int blocks, u64 dg[], const char *src,
87 struct ghash_key *key, const char *head,
88 void (*simd_update)(int blocks, u64 dg[],
90 struct ghash_key const *k,
93 if (likely(crypto_simd_usable())) {
95 simd_update(blocks, dg, src, key, head);
98 be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
108 src += GHASH_BLOCK_SIZE;
111 crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
112 gf128mul_lle(&dst, &key->k);
115 dg[0] = be64_to_cpu(dst.b);
116 dg[1] = be64_to_cpu(dst.a);
120 /* avoid hogging the CPU for too long */
121 #define MAX_BLOCKS (SZ_64K / GHASH_BLOCK_SIZE)
123 static int __ghash_update(struct shash_desc *desc, const u8 *src,
125 void (*simd_update)(int blocks, u64 dg[],
127 struct ghash_key const *k,
130 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
131 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
135 if ((partial + len) >= GHASH_BLOCK_SIZE) {
136 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
140 int p = GHASH_BLOCK_SIZE - partial;
142 memcpy(ctx->buf + partial, src, p);
147 blocks = len / GHASH_BLOCK_SIZE;
148 len %= GHASH_BLOCK_SIZE;
151 int chunk = min(blocks, MAX_BLOCKS);
153 ghash_do_update(chunk, ctx->digest, src, key,
154 partial ? ctx->buf : NULL,
158 src += chunk * GHASH_BLOCK_SIZE;
160 } while (unlikely(blocks > 0));
163 memcpy(ctx->buf + partial, src, len);
167 static int ghash_update_p8(struct shash_desc *desc, const u8 *src,
170 return __ghash_update(desc, src, len, pmull_ghash_update_p8);
173 static int ghash_update_p64(struct shash_desc *desc, const u8 *src,
176 return __ghash_update(desc, src, len, pmull_ghash_update_p64);
179 static int ghash_final_p8(struct shash_desc *desc, u8 *dst)
181 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
182 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
185 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
187 memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
189 ghash_do_update(1, ctx->digest, ctx->buf, key, NULL,
190 pmull_ghash_update_p8);
192 put_unaligned_be64(ctx->digest[1], dst);
193 put_unaligned_be64(ctx->digest[0], dst + 8);
195 *ctx = (struct ghash_desc_ctx){};
199 static int ghash_final_p64(struct shash_desc *desc, u8 *dst)
201 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
202 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
205 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
207 memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
209 ghash_do_update(1, ctx->digest, ctx->buf, key, NULL,
210 pmull_ghash_update_p64);
212 put_unaligned_be64(ctx->digest[1], dst);
213 put_unaligned_be64(ctx->digest[0], dst + 8);
215 *ctx = (struct ghash_desc_ctx){};
219 static void ghash_reflect(u64 h[], const be128 *k)
221 u64 carry = be64_to_cpu(k->a) & BIT(63) ? 1 : 0;
223 h[0] = (be64_to_cpu(k->b) << 1) | carry;
224 h[1] = (be64_to_cpu(k->a) << 1) | (be64_to_cpu(k->b) >> 63);
227 h[1] ^= 0xc200000000000000UL;
230 static int __ghash_setkey(struct ghash_key *key,
231 const u8 *inkey, unsigned int keylen)
235 /* needed for the fallback */
236 memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
238 ghash_reflect(key->h, &key->k);
241 gf128mul_lle(&h, &key->k);
242 ghash_reflect(key->h2, &h);
244 gf128mul_lle(&h, &key->k);
245 ghash_reflect(key->h3, &h);
247 gf128mul_lle(&h, &key->k);
248 ghash_reflect(key->h4, &h);
253 static int ghash_setkey(struct crypto_shash *tfm,
254 const u8 *inkey, unsigned int keylen)
256 struct ghash_key *key = crypto_shash_ctx(tfm);
258 if (keylen != GHASH_BLOCK_SIZE) {
259 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
263 return __ghash_setkey(key, inkey, keylen);
266 static struct shash_alg ghash_alg[] = {{
267 .base.cra_name = "ghash",
268 .base.cra_driver_name = "ghash-neon",
269 .base.cra_priority = 100,
270 .base.cra_blocksize = GHASH_BLOCK_SIZE,
271 .base.cra_ctxsize = sizeof(struct ghash_key),
272 .base.cra_module = THIS_MODULE,
274 .digestsize = GHASH_DIGEST_SIZE,
276 .update = ghash_update_p8,
277 .final = ghash_final_p8,
278 .setkey = ghash_setkey,
279 .descsize = sizeof(struct ghash_desc_ctx),
281 .base.cra_name = "ghash",
282 .base.cra_driver_name = "ghash-ce",
283 .base.cra_priority = 200,
284 .base.cra_blocksize = GHASH_BLOCK_SIZE,
285 .base.cra_ctxsize = sizeof(struct ghash_key),
286 .base.cra_module = THIS_MODULE,
288 .digestsize = GHASH_DIGEST_SIZE,
290 .update = ghash_update_p64,
291 .final = ghash_final_p64,
292 .setkey = ghash_setkey,
293 .descsize = sizeof(struct ghash_desc_ctx),
296 static int num_rounds(struct crypto_aes_ctx *ctx)
299 * # of rounds specified by AES:
300 * 128 bit key 10 rounds
301 * 192 bit key 12 rounds
302 * 256 bit key 14 rounds
303 * => n byte key => 6 + (n/4) rounds
305 return 6 + ctx->key_length / 4;
308 static int gcm_setkey(struct crypto_aead *tfm, const u8 *inkey,
311 struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
312 u8 key[GHASH_BLOCK_SIZE];
315 ret = crypto_aes_expand_key(&ctx->aes_key, inkey, keylen);
317 tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
321 __aes_arm64_encrypt(ctx->aes_key.key_enc, key, (u8[AES_BLOCK_SIZE]){},
322 num_rounds(&ctx->aes_key));
324 return __ghash_setkey(&ctx->ghash_key, key, sizeof(be128));
327 static int gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
340 static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
341 int *buf_count, struct gcm_aes_ctx *ctx)
343 if (*buf_count > 0) {
344 int buf_added = min(count, GHASH_BLOCK_SIZE - *buf_count);
346 memcpy(&buf[*buf_count], src, buf_added);
348 *buf_count += buf_added;
353 if (count >= GHASH_BLOCK_SIZE || *buf_count == GHASH_BLOCK_SIZE) {
354 int blocks = count / GHASH_BLOCK_SIZE;
356 ghash_do_update(blocks, dg, src, &ctx->ghash_key,
357 *buf_count ? buf : NULL,
358 pmull_ghash_update_p64);
360 src += blocks * GHASH_BLOCK_SIZE;
361 count %= GHASH_BLOCK_SIZE;
366 memcpy(buf, src, count);
371 static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[])
373 struct crypto_aead *aead = crypto_aead_reqtfm(req);
374 struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
375 u8 buf[GHASH_BLOCK_SIZE];
376 struct scatter_walk walk;
377 u32 len = req->assoclen;
380 scatterwalk_start(&walk, req->src);
383 u32 n = scatterwalk_clamp(&walk, len);
387 scatterwalk_start(&walk, sg_next(walk.sg));
388 n = scatterwalk_clamp(&walk, len);
390 p = scatterwalk_map(&walk);
392 gcm_update_mac(dg, p, n, buf, &buf_count, ctx);
395 scatterwalk_unmap(p);
396 scatterwalk_advance(&walk, n);
397 scatterwalk_done(&walk, 0, len);
401 memset(&buf[buf_count], 0, GHASH_BLOCK_SIZE - buf_count);
402 ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL,
403 pmull_ghash_update_p64);
407 static void gcm_final(struct aead_request *req, struct gcm_aes_ctx *ctx,
408 u64 dg[], u8 tag[], int cryptlen)
410 u8 mac[AES_BLOCK_SIZE];
413 lengths.a = cpu_to_be64(req->assoclen * 8);
414 lengths.b = cpu_to_be64(cryptlen * 8);
416 ghash_do_update(1, dg, (void *)&lengths, &ctx->ghash_key, NULL,
417 pmull_ghash_update_p64);
419 put_unaligned_be64(dg[1], mac);
420 put_unaligned_be64(dg[0], mac + 8);
422 crypto_xor(tag, mac, AES_BLOCK_SIZE);
425 static int gcm_encrypt(struct aead_request *req)
427 struct crypto_aead *aead = crypto_aead_reqtfm(req);
428 struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
429 struct skcipher_walk walk;
430 u8 iv[AES_BLOCK_SIZE];
431 u8 ks[2 * AES_BLOCK_SIZE];
432 u8 tag[AES_BLOCK_SIZE];
434 int nrounds = num_rounds(&ctx->aes_key);
438 gcm_calculate_auth_mac(req, dg);
440 memcpy(iv, req->iv, GCM_IV_SIZE);
441 put_unaligned_be32(1, iv + GCM_IV_SIZE);
443 err = skcipher_walk_aead_encrypt(&walk, req, false);
445 if (likely(crypto_simd_usable() && walk.total >= 2 * AES_BLOCK_SIZE)) {
446 u32 const *rk = NULL;
449 pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc, nrounds);
450 put_unaligned_be32(2, iv + GCM_IV_SIZE);
451 pmull_gcm_encrypt_block(ks, iv, NULL, nrounds);
452 put_unaligned_be32(3, iv + GCM_IV_SIZE);
453 pmull_gcm_encrypt_block(ks + AES_BLOCK_SIZE, iv, NULL, nrounds);
454 put_unaligned_be32(4, iv + GCM_IV_SIZE);
457 int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2;
462 pmull_gcm_encrypt(blocks, dg, walk.dst.virt.addr,
463 walk.src.virt.addr, &ctx->ghash_key,
464 iv, rk, nrounds, ks);
467 err = skcipher_walk_done(&walk,
468 walk.nbytes % (2 * AES_BLOCK_SIZE));
470 rk = ctx->aes_key.key_enc;
471 } while (walk.nbytes >= 2 * AES_BLOCK_SIZE);
473 __aes_arm64_encrypt(ctx->aes_key.key_enc, tag, iv, nrounds);
474 put_unaligned_be32(2, iv + GCM_IV_SIZE);
476 while (walk.nbytes >= (2 * AES_BLOCK_SIZE)) {
478 walk.nbytes / (2 * AES_BLOCK_SIZE) * 2;
479 u8 *dst = walk.dst.virt.addr;
480 u8 *src = walk.src.virt.addr;
481 int remaining = blocks;
484 __aes_arm64_encrypt(ctx->aes_key.key_enc,
486 crypto_xor_cpy(dst, src, ks, AES_BLOCK_SIZE);
487 crypto_inc(iv, AES_BLOCK_SIZE);
489 dst += AES_BLOCK_SIZE;
490 src += AES_BLOCK_SIZE;
491 } while (--remaining > 0);
493 ghash_do_update(blocks, dg,
494 walk.dst.virt.addr, &ctx->ghash_key,
495 NULL, pmull_ghash_update_p64);
497 err = skcipher_walk_done(&walk,
498 walk.nbytes % (2 * AES_BLOCK_SIZE));
501 __aes_arm64_encrypt(ctx->aes_key.key_enc, ks, iv,
503 if (walk.nbytes > AES_BLOCK_SIZE) {
504 crypto_inc(iv, AES_BLOCK_SIZE);
505 __aes_arm64_encrypt(ctx->aes_key.key_enc,
506 ks + AES_BLOCK_SIZE, iv,
512 /* handle the tail */
514 u8 buf[GHASH_BLOCK_SIZE];
515 unsigned int nbytes = walk.nbytes;
516 u8 *dst = walk.dst.virt.addr;
519 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, ks,
522 if (walk.nbytes > GHASH_BLOCK_SIZE) {
524 dst += GHASH_BLOCK_SIZE;
525 nbytes %= GHASH_BLOCK_SIZE;
528 memcpy(buf, dst, nbytes);
529 memset(buf + nbytes, 0, GHASH_BLOCK_SIZE - nbytes);
530 ghash_do_update(!!nbytes, dg, buf, &ctx->ghash_key, head,
531 pmull_ghash_update_p64);
533 err = skcipher_walk_done(&walk, 0);
539 gcm_final(req, ctx, dg, tag, req->cryptlen);
541 /* copy authtag to end of dst */
542 scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen,
543 crypto_aead_authsize(aead), 1);
548 static int gcm_decrypt(struct aead_request *req)
550 struct crypto_aead *aead = crypto_aead_reqtfm(req);
551 struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
552 unsigned int authsize = crypto_aead_authsize(aead);
553 struct skcipher_walk walk;
554 u8 iv[2 * AES_BLOCK_SIZE];
555 u8 tag[AES_BLOCK_SIZE];
556 u8 buf[2 * GHASH_BLOCK_SIZE];
558 int nrounds = num_rounds(&ctx->aes_key);
562 gcm_calculate_auth_mac(req, dg);
564 memcpy(iv, req->iv, GCM_IV_SIZE);
565 put_unaligned_be32(1, iv + GCM_IV_SIZE);
567 err = skcipher_walk_aead_decrypt(&walk, req, false);
569 if (likely(crypto_simd_usable() && walk.total >= 2 * AES_BLOCK_SIZE)) {
570 u32 const *rk = NULL;
573 pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc, nrounds);
574 put_unaligned_be32(2, iv + GCM_IV_SIZE);
577 int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2;
578 int rem = walk.total - blocks * AES_BLOCK_SIZE;
583 pmull_gcm_decrypt(blocks, dg, walk.dst.virt.addr,
584 walk.src.virt.addr, &ctx->ghash_key,
587 /* check if this is the final iteration of the loop */
588 if (rem < (2 * AES_BLOCK_SIZE)) {
589 u8 *iv2 = iv + AES_BLOCK_SIZE;
591 if (rem > AES_BLOCK_SIZE) {
592 memcpy(iv2, iv, AES_BLOCK_SIZE);
593 crypto_inc(iv2, AES_BLOCK_SIZE);
596 pmull_gcm_encrypt_block(iv, iv, NULL, nrounds);
598 if (rem > AES_BLOCK_SIZE)
599 pmull_gcm_encrypt_block(iv2, iv2, NULL,
605 err = skcipher_walk_done(&walk,
606 walk.nbytes % (2 * AES_BLOCK_SIZE));
608 rk = ctx->aes_key.key_enc;
609 } while (walk.nbytes >= 2 * AES_BLOCK_SIZE);
611 __aes_arm64_encrypt(ctx->aes_key.key_enc, tag, iv, nrounds);
612 put_unaligned_be32(2, iv + GCM_IV_SIZE);
614 while (walk.nbytes >= (2 * AES_BLOCK_SIZE)) {
615 int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2;
616 u8 *dst = walk.dst.virt.addr;
617 u8 *src = walk.src.virt.addr;
619 ghash_do_update(blocks, dg, walk.src.virt.addr,
620 &ctx->ghash_key, NULL,
621 pmull_ghash_update_p64);
624 __aes_arm64_encrypt(ctx->aes_key.key_enc,
626 crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE);
627 crypto_inc(iv, AES_BLOCK_SIZE);
629 dst += AES_BLOCK_SIZE;
630 src += AES_BLOCK_SIZE;
631 } while (--blocks > 0);
633 err = skcipher_walk_done(&walk,
634 walk.nbytes % (2 * AES_BLOCK_SIZE));
637 if (walk.nbytes > AES_BLOCK_SIZE) {
638 u8 *iv2 = iv + AES_BLOCK_SIZE;
640 memcpy(iv2, iv, AES_BLOCK_SIZE);
641 crypto_inc(iv2, AES_BLOCK_SIZE);
643 __aes_arm64_encrypt(ctx->aes_key.key_enc, iv2,
646 __aes_arm64_encrypt(ctx->aes_key.key_enc, iv, iv,
651 /* handle the tail */
653 const u8 *src = walk.src.virt.addr;
654 const u8 *head = NULL;
655 unsigned int nbytes = walk.nbytes;
657 if (walk.nbytes > GHASH_BLOCK_SIZE) {
659 src += GHASH_BLOCK_SIZE;
660 nbytes %= GHASH_BLOCK_SIZE;
663 memcpy(buf, src, nbytes);
664 memset(buf + nbytes, 0, GHASH_BLOCK_SIZE - nbytes);
665 ghash_do_update(!!nbytes, dg, buf, &ctx->ghash_key, head,
666 pmull_ghash_update_p64);
668 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, iv,
671 err = skcipher_walk_done(&walk, 0);
677 gcm_final(req, ctx, dg, tag, req->cryptlen - authsize);
679 /* compare calculated auth tag with the stored one */
680 scatterwalk_map_and_copy(buf, req->src,
681 req->assoclen + req->cryptlen - authsize,
684 if (crypto_memneq(tag, buf, authsize))
689 static struct aead_alg gcm_aes_alg = {
690 .ivsize = GCM_IV_SIZE,
691 .chunksize = 2 * AES_BLOCK_SIZE,
692 .maxauthsize = AES_BLOCK_SIZE,
693 .setkey = gcm_setkey,
694 .setauthsize = gcm_setauthsize,
695 .encrypt = gcm_encrypt,
696 .decrypt = gcm_decrypt,
698 .base.cra_name = "gcm(aes)",
699 .base.cra_driver_name = "gcm-aes-ce",
700 .base.cra_priority = 300,
701 .base.cra_blocksize = 1,
702 .base.cra_ctxsize = sizeof(struct gcm_aes_ctx),
703 .base.cra_module = THIS_MODULE,
706 static int __init ghash_ce_mod_init(void)
710 if (!cpu_have_named_feature(ASIMD))
713 if (cpu_have_named_feature(PMULL))
714 ret = crypto_register_shashes(ghash_alg,
715 ARRAY_SIZE(ghash_alg));
717 /* only register the first array element */
718 ret = crypto_register_shash(ghash_alg);
723 if (cpu_have_named_feature(PMULL)) {
724 ret = crypto_register_aead(&gcm_aes_alg);
726 crypto_unregister_shashes(ghash_alg,
727 ARRAY_SIZE(ghash_alg));
732 static void __exit ghash_ce_mod_exit(void)
734 if (cpu_have_named_feature(PMULL))
735 crypto_unregister_shashes(ghash_alg, ARRAY_SIZE(ghash_alg));
737 crypto_unregister_shash(ghash_alg);
738 crypto_unregister_aead(&gcm_aes_alg);
741 static const struct cpu_feature ghash_cpu_feature[] = {
742 { cpu_feature(PMULL) }, { }
744 MODULE_DEVICE_TABLE(cpu, ghash_cpu_feature);
746 module_init(ghash_ce_mod_init);
747 module_exit(ghash_ce_mod_exit);