1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Support for Intel AES-NI instructions. This file contains glue
4 * code, the real AES implementation is in intel-aes_asm.S.
6 * Copyright (C) 2008, Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10 * interface for 64-bit kernels.
11 * Authors: Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Aidan O'Mahony (aidan.o.mahony@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
18 #include <linux/hardirq.h>
19 #include <linux/types.h>
20 #include <linux/module.h>
21 #include <linux/err.h>
22 #include <crypto/algapi.h>
23 #include <crypto/aes.h>
24 #include <crypto/ctr.h>
25 #include <crypto/b128ops.h>
26 #include <crypto/gcm.h>
27 #include <crypto/xts.h>
28 #include <asm/cpu_device_id.h>
30 #include <crypto/scatterwalk.h>
31 #include <crypto/internal/aead.h>
32 #include <crypto/internal/simd.h>
33 #include <crypto/internal/skcipher.h>
34 #include <linux/workqueue.h>
35 #include <linux/spinlock.h>
37 #include <asm/crypto/glue_helper.h>
41 #define AESNI_ALIGN 16
42 #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
43 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
44 #define RFC4106_HASH_SUBKEY_SIZE 16
45 #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
46 #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
47 #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
49 /* This data is stored at the end of the crypto_tfm struct.
50 * It's a type of per "session" data storage location.
51 * This needs to be 16 byte aligned.
53 struct aesni_rfc4106_gcm_ctx {
54 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
55 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
59 struct generic_gcmaes_ctx {
60 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
64 struct aesni_xts_ctx {
65 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
66 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
69 #define GCM_BLOCK_LEN 16
71 struct gcm_context_data {
72 /* init, update and finalize context data */
73 u8 aad_hash[GCM_BLOCK_LEN];
76 u8 partial_block_enc_key[GCM_BLOCK_LEN];
77 u8 orig_IV[GCM_BLOCK_LEN];
78 u8 current_counter[GCM_BLOCK_LEN];
79 u64 partial_block_len;
81 u8 hash_keys[GCM_BLOCK_LEN * 16];
84 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
85 unsigned int key_len);
86 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
88 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
90 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
91 const u8 *in, unsigned int len);
92 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
93 const u8 *in, unsigned int len);
94 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
95 const u8 *in, unsigned int len, u8 *iv);
96 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
97 const u8 *in, unsigned int len, u8 *iv);
99 #define AVX_GEN2_OPTSIZE 640
100 #define AVX_GEN4_OPTSIZE 4096
104 static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
105 const u8 *in, unsigned int len, u8 *iv);
106 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
107 const u8 *in, unsigned int len, u8 *iv);
109 asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
110 const u8 *in, bool enc, u8 *iv);
112 /* asmlinkage void aesni_gcm_enc()
113 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
114 * struct gcm_context_data. May be uninitialized.
115 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
116 * const u8 *in, Plaintext input
117 * unsigned long plaintext_len, Length of data in bytes for encryption.
118 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
119 * 16-byte aligned pointer.
120 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
121 * const u8 *aad, Additional Authentication Data (AAD)
122 * unsigned long aad_len, Length of AAD in bytes.
123 * u8 *auth_tag, Authenticated Tag output.
124 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
125 * Valid values are 16 (most likely), 12 or 8.
127 asmlinkage void aesni_gcm_enc(void *ctx,
128 struct gcm_context_data *gdata, u8 *out,
129 const u8 *in, unsigned long plaintext_len, u8 *iv,
130 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
131 u8 *auth_tag, unsigned long auth_tag_len);
133 /* asmlinkage void aesni_gcm_dec()
134 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
135 * struct gcm_context_data. May be uninitialized.
136 * u8 *out, Plaintext output. Decrypt in-place is allowed.
137 * const u8 *in, Ciphertext input
138 * unsigned long ciphertext_len, Length of data in bytes for decryption.
139 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
140 * 16-byte aligned pointer.
141 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
142 * const u8 *aad, Additional Authentication Data (AAD)
143 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
144 * to be 8 or 12 bytes
145 * u8 *auth_tag, Authenticated Tag output.
146 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
147 * Valid values are 16 (most likely), 12 or 8.
149 asmlinkage void aesni_gcm_dec(void *ctx,
150 struct gcm_context_data *gdata, u8 *out,
151 const u8 *in, unsigned long ciphertext_len, u8 *iv,
152 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
153 u8 *auth_tag, unsigned long auth_tag_len);
155 /* Scatter / Gather routines, with args similar to above */
156 asmlinkage void aesni_gcm_init(void *ctx,
157 struct gcm_context_data *gdata,
159 u8 *hash_subkey, const u8 *aad,
160 unsigned long aad_len);
161 asmlinkage void aesni_gcm_enc_update(void *ctx,
162 struct gcm_context_data *gdata, u8 *out,
163 const u8 *in, unsigned long plaintext_len);
164 asmlinkage void aesni_gcm_dec_update(void *ctx,
165 struct gcm_context_data *gdata, u8 *out,
167 unsigned long ciphertext_len);
168 asmlinkage void aesni_gcm_finalize(void *ctx,
169 struct gcm_context_data *gdata,
170 u8 *auth_tag, unsigned long auth_tag_len);
172 static const struct aesni_gcm_tfm_s {
173 void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv,
174 u8 *hash_subkey, const u8 *aad, unsigned long aad_len);
175 void (*enc_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
176 const u8 *in, unsigned long plaintext_len);
177 void (*dec_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
178 const u8 *in, unsigned long ciphertext_len);
179 void (*finalize)(void *ctx, struct gcm_context_data *gdata,
180 u8 *auth_tag, unsigned long auth_tag_len);
183 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse = {
184 .init = &aesni_gcm_init,
185 .enc_update = &aesni_gcm_enc_update,
186 .dec_update = &aesni_gcm_dec_update,
187 .finalize = &aesni_gcm_finalize,
191 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
192 void *keys, u8 *out, unsigned int num_bytes);
193 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
194 void *keys, u8 *out, unsigned int num_bytes);
195 asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
196 void *keys, u8 *out, unsigned int num_bytes);
198 * asmlinkage void aesni_gcm_init_avx_gen2()
199 * gcm_data *my_ctx_data, context data
200 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
202 asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data,
203 struct gcm_context_data *gdata,
207 unsigned long aad_len);
209 asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
210 struct gcm_context_data *gdata, u8 *out,
211 const u8 *in, unsigned long plaintext_len);
212 asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
213 struct gcm_context_data *gdata, u8 *out,
215 unsigned long ciphertext_len);
216 asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
217 struct gcm_context_data *gdata,
218 u8 *auth_tag, unsigned long auth_tag_len);
220 asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
221 struct gcm_context_data *gdata, u8 *out,
222 const u8 *in, unsigned long plaintext_len, u8 *iv,
223 const u8 *aad, unsigned long aad_len,
224 u8 *auth_tag, unsigned long auth_tag_len);
226 asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx,
227 struct gcm_context_data *gdata, u8 *out,
228 const u8 *in, unsigned long ciphertext_len, u8 *iv,
229 const u8 *aad, unsigned long aad_len,
230 u8 *auth_tag, unsigned long auth_tag_len);
232 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2 = {
233 .init = &aesni_gcm_init_avx_gen2,
234 .enc_update = &aesni_gcm_enc_update_avx_gen2,
235 .dec_update = &aesni_gcm_dec_update_avx_gen2,
236 .finalize = &aesni_gcm_finalize_avx_gen2,
241 #ifdef CONFIG_AS_AVX2
243 * asmlinkage void aesni_gcm_init_avx_gen4()
244 * gcm_data *my_ctx_data, context data
245 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
247 asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data,
248 struct gcm_context_data *gdata,
252 unsigned long aad_len);
254 asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
255 struct gcm_context_data *gdata, u8 *out,
256 const u8 *in, unsigned long plaintext_len);
257 asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
258 struct gcm_context_data *gdata, u8 *out,
260 unsigned long ciphertext_len);
261 asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
262 struct gcm_context_data *gdata,
263 u8 *auth_tag, unsigned long auth_tag_len);
265 asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
266 struct gcm_context_data *gdata, u8 *out,
267 const u8 *in, unsigned long plaintext_len, u8 *iv,
268 const u8 *aad, unsigned long aad_len,
269 u8 *auth_tag, unsigned long auth_tag_len);
271 asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx,
272 struct gcm_context_data *gdata, u8 *out,
273 const u8 *in, unsigned long ciphertext_len, u8 *iv,
274 const u8 *aad, unsigned long aad_len,
275 u8 *auth_tag, unsigned long auth_tag_len);
277 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4 = {
278 .init = &aesni_gcm_init_avx_gen4,
279 .enc_update = &aesni_gcm_enc_update_avx_gen4,
280 .dec_update = &aesni_gcm_dec_update_avx_gen4,
281 .finalize = &aesni_gcm_finalize_avx_gen4,
287 aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
289 unsigned long align = AESNI_ALIGN;
291 if (align <= crypto_tfm_ctx_alignment())
293 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
297 generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
299 unsigned long align = AESNI_ALIGN;
301 if (align <= crypto_tfm_ctx_alignment())
303 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
307 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
309 unsigned long addr = (unsigned long)raw_ctx;
310 unsigned long align = AESNI_ALIGN;
312 if (align <= crypto_tfm_ctx_alignment())
314 return (struct crypto_aes_ctx *)ALIGN(addr, align);
317 static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
318 const u8 *in_key, unsigned int key_len)
320 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
321 u32 *flags = &tfm->crt_flags;
324 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
325 key_len != AES_KEYSIZE_256) {
326 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
330 if (!crypto_simd_usable())
331 err = aes_expandkey(ctx, in_key, key_len);
334 err = aesni_set_key(ctx, in_key, key_len);
341 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
342 unsigned int key_len)
344 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
347 static void aesni_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
349 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
351 if (!crypto_simd_usable()) {
352 aes_encrypt(ctx, dst, src);
355 aesni_enc(ctx, dst, src);
360 static void aesni_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
362 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
364 if (!crypto_simd_usable()) {
365 aes_decrypt(ctx, dst, src);
368 aesni_dec(ctx, dst, src);
373 static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
376 return aes_set_key_common(crypto_skcipher_tfm(tfm),
377 crypto_skcipher_ctx(tfm), key, len);
380 static int ecb_encrypt(struct skcipher_request *req)
382 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
383 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
384 struct skcipher_walk walk;
388 err = skcipher_walk_virt(&walk, req, true);
391 while ((nbytes = walk.nbytes)) {
392 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
393 nbytes & AES_BLOCK_MASK);
394 nbytes &= AES_BLOCK_SIZE - 1;
395 err = skcipher_walk_done(&walk, nbytes);
402 static int ecb_decrypt(struct skcipher_request *req)
404 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
405 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
406 struct skcipher_walk walk;
410 err = skcipher_walk_virt(&walk, req, true);
413 while ((nbytes = walk.nbytes)) {
414 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
415 nbytes & AES_BLOCK_MASK);
416 nbytes &= AES_BLOCK_SIZE - 1;
417 err = skcipher_walk_done(&walk, nbytes);
424 static int cbc_encrypt(struct skcipher_request *req)
426 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
427 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
428 struct skcipher_walk walk;
432 err = skcipher_walk_virt(&walk, req, true);
435 while ((nbytes = walk.nbytes)) {
436 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
437 nbytes & AES_BLOCK_MASK, walk.iv);
438 nbytes &= AES_BLOCK_SIZE - 1;
439 err = skcipher_walk_done(&walk, nbytes);
446 static int cbc_decrypt(struct skcipher_request *req)
448 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
449 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
450 struct skcipher_walk walk;
454 err = skcipher_walk_virt(&walk, req, true);
457 while ((nbytes = walk.nbytes)) {
458 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
459 nbytes & AES_BLOCK_MASK, walk.iv);
460 nbytes &= AES_BLOCK_SIZE - 1;
461 err = skcipher_walk_done(&walk, nbytes);
469 static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
470 struct skcipher_walk *walk)
472 u8 *ctrblk = walk->iv;
473 u8 keystream[AES_BLOCK_SIZE];
474 u8 *src = walk->src.virt.addr;
475 u8 *dst = walk->dst.virt.addr;
476 unsigned int nbytes = walk->nbytes;
478 aesni_enc(ctx, keystream, ctrblk);
479 crypto_xor_cpy(dst, keystream, src, nbytes);
481 crypto_inc(ctrblk, AES_BLOCK_SIZE);
485 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
486 const u8 *in, unsigned int len, u8 *iv)
489 * based on key length, override with the by8 version
490 * of ctr mode encryption/decryption for improved performance
491 * aes_set_key_common() ensures that key length is one of
494 if (ctx->key_length == AES_KEYSIZE_128)
495 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
496 else if (ctx->key_length == AES_KEYSIZE_192)
497 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
499 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
503 static int ctr_crypt(struct skcipher_request *req)
505 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
506 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
507 struct skcipher_walk walk;
511 err = skcipher_walk_virt(&walk, req, true);
514 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
515 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
516 nbytes & AES_BLOCK_MASK, walk.iv);
517 nbytes &= AES_BLOCK_SIZE - 1;
518 err = skcipher_walk_done(&walk, nbytes);
521 ctr_crypt_final(ctx, &walk);
522 err = skcipher_walk_done(&walk, 0);
529 static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
532 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
535 err = xts_verify_key(tfm, key, keylen);
541 /* first half of xts-key is for crypt */
542 err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
547 /* second half of xts-key is for tweak */
548 return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
549 key + keylen, keylen);
553 static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
555 aesni_enc(ctx, out, in);
558 static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
560 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
563 static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
565 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
568 static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
570 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
573 static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
575 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
578 static const struct common_glue_ctx aesni_enc_xts = {
580 .fpu_blocks_limit = 1,
584 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
587 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
591 static const struct common_glue_ctx aesni_dec_xts = {
593 .fpu_blocks_limit = 1,
597 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
600 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
604 static int xts_encrypt(struct skcipher_request *req)
606 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
607 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
609 return glue_xts_req_128bit(&aesni_enc_xts, req,
610 XTS_TWEAK_CAST(aesni_xts_tweak),
611 aes_ctx(ctx->raw_tweak_ctx),
612 aes_ctx(ctx->raw_crypt_ctx),
616 static int xts_decrypt(struct skcipher_request *req)
618 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
619 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
621 return glue_xts_req_128bit(&aesni_dec_xts, req,
622 XTS_TWEAK_CAST(aesni_xts_tweak),
623 aes_ctx(ctx->raw_tweak_ctx),
624 aes_ctx(ctx->raw_crypt_ctx),
629 rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
631 struct crypto_aes_ctx ctx;
634 ret = aes_expandkey(&ctx, key, key_len);
638 /* Clear the data in the hash sub key container to zero.*/
639 /* We want to cipher all zeros to create the hash sub key. */
640 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
642 aes_encrypt(&ctx, hash_subkey, hash_subkey);
644 memzero_explicit(&ctx, sizeof(ctx));
648 static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
649 unsigned int key_len)
651 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
654 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
657 /*Account for 4 byte nonce at the end.*/
660 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
662 return aes_set_key_common(crypto_aead_tfm(aead),
663 &ctx->aes_key_expanded, key, key_len) ?:
664 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
667 /* This is the Integrity Check Value (aka the authentication tag) length and can
668 * be 8, 12 or 16 bytes long. */
669 static int common_rfc4106_set_authsize(struct crypto_aead *aead,
670 unsigned int authsize)
684 static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
685 unsigned int authsize)
703 static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
704 unsigned int assoclen, u8 *hash_subkey,
705 u8 *iv, void *aes_ctx)
707 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
708 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
709 const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm;
710 struct gcm_context_data data AESNI_ALIGN_ATTR;
711 struct scatter_walk dst_sg_walk = {};
712 unsigned long left = req->cryptlen;
713 unsigned long len, srclen, dstlen;
714 struct scatter_walk assoc_sg_walk;
715 struct scatter_walk src_sg_walk;
716 struct scatterlist src_start[2];
717 struct scatterlist dst_start[2];
718 struct scatterlist *src_sg;
719 struct scatterlist *dst_sg;
720 u8 *src, *dst, *assoc;
725 left -= auth_tag_len;
727 #ifdef CONFIG_AS_AVX2
728 if (left < AVX_GEN4_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen4)
729 gcm_tfm = &aesni_gcm_tfm_avx_gen2;
732 if (left < AVX_GEN2_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen2)
733 gcm_tfm = &aesni_gcm_tfm_sse;
736 /* Linearize assoc, if not already linear */
737 if (req->src->length >= assoclen && req->src->length &&
738 (!PageHighMem(sg_page(req->src)) ||
739 req->src->offset + req->src->length <= PAGE_SIZE)) {
740 scatterwalk_start(&assoc_sg_walk, req->src);
741 assoc = scatterwalk_map(&assoc_sg_walk);
743 /* assoc can be any length, so must be on heap */
744 assocmem = kmalloc(assoclen, GFP_ATOMIC);
745 if (unlikely(!assocmem))
749 scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0);
753 src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen);
754 scatterwalk_start(&src_sg_walk, src_sg);
755 if (req->src != req->dst) {
756 dst_sg = scatterwalk_ffwd(dst_start, req->dst,
758 scatterwalk_start(&dst_sg_walk, dst_sg);
763 gcm_tfm->init(aes_ctx, &data, iv,
764 hash_subkey, assoc, assoclen);
765 if (req->src != req->dst) {
767 src = scatterwalk_map(&src_sg_walk);
768 dst = scatterwalk_map(&dst_sg_walk);
769 srclen = scatterwalk_clamp(&src_sg_walk, left);
770 dstlen = scatterwalk_clamp(&dst_sg_walk, left);
771 len = min(srclen, dstlen);
774 gcm_tfm->enc_update(aes_ctx, &data,
777 gcm_tfm->dec_update(aes_ctx, &data,
782 scatterwalk_unmap(src);
783 scatterwalk_unmap(dst);
784 scatterwalk_advance(&src_sg_walk, len);
785 scatterwalk_advance(&dst_sg_walk, len);
786 scatterwalk_done(&src_sg_walk, 0, left);
787 scatterwalk_done(&dst_sg_walk, 1, left);
791 dst = src = scatterwalk_map(&src_sg_walk);
792 len = scatterwalk_clamp(&src_sg_walk, left);
795 gcm_tfm->enc_update(aes_ctx, &data,
798 gcm_tfm->dec_update(aes_ctx, &data,
802 scatterwalk_unmap(src);
803 scatterwalk_advance(&src_sg_walk, len);
804 scatterwalk_done(&src_sg_walk, 1, left);
807 gcm_tfm->finalize(aes_ctx, &data, authTag, auth_tag_len);
811 scatterwalk_unmap(assoc);
818 /* Copy out original authTag */
819 scatterwalk_map_and_copy(authTagMsg, req->src,
820 req->assoclen + req->cryptlen -
824 /* Compare generated tag with passed in tag. */
825 return crypto_memneq(authTagMsg, authTag, auth_tag_len) ?
829 /* Copy in the authTag */
830 scatterwalk_map_and_copy(authTag, req->dst,
831 req->assoclen + req->cryptlen,
837 static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
838 u8 *hash_subkey, u8 *iv, void *aes_ctx)
840 return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
844 static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
845 u8 *hash_subkey, u8 *iv, void *aes_ctx)
847 return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
851 static int helper_rfc4106_encrypt(struct aead_request *req)
853 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
854 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
855 void *aes_ctx = &(ctx->aes_key_expanded);
856 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
858 __be32 counter = cpu_to_be32(1);
860 /* Assuming we are supporting rfc4106 64-bit extended */
861 /* sequence numbers We need to have the AAD length equal */
862 /* to 16 or 20 bytes */
863 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
867 for (i = 0; i < 4; i++)
868 *(iv+i) = ctx->nonce[i];
869 for (i = 0; i < 8; i++)
870 *(iv+4+i) = req->iv[i];
871 *((__be32 *)(iv+12)) = counter;
873 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
877 static int helper_rfc4106_decrypt(struct aead_request *req)
879 __be32 counter = cpu_to_be32(1);
880 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
881 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
882 void *aes_ctx = &(ctx->aes_key_expanded);
883 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
886 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
889 /* Assuming we are supporting rfc4106 64-bit extended */
890 /* sequence numbers We need to have the AAD length */
891 /* equal to 16 or 20 bytes */
894 for (i = 0; i < 4; i++)
895 *(iv+i) = ctx->nonce[i];
896 for (i = 0; i < 8; i++)
897 *(iv+4+i) = req->iv[i];
898 *((__be32 *)(iv+12)) = counter;
900 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
905 static struct crypto_alg aesni_cipher_alg = {
907 .cra_driver_name = "aes-aesni",
909 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
910 .cra_blocksize = AES_BLOCK_SIZE,
911 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
912 .cra_module = THIS_MODULE,
915 .cia_min_keysize = AES_MIN_KEY_SIZE,
916 .cia_max_keysize = AES_MAX_KEY_SIZE,
917 .cia_setkey = aes_set_key,
918 .cia_encrypt = aesni_encrypt,
919 .cia_decrypt = aesni_decrypt
924 static struct skcipher_alg aesni_skciphers[] = {
927 .cra_name = "__ecb(aes)",
928 .cra_driver_name = "__ecb-aes-aesni",
930 .cra_flags = CRYPTO_ALG_INTERNAL,
931 .cra_blocksize = AES_BLOCK_SIZE,
932 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
933 .cra_module = THIS_MODULE,
935 .min_keysize = AES_MIN_KEY_SIZE,
936 .max_keysize = AES_MAX_KEY_SIZE,
937 .setkey = aesni_skcipher_setkey,
938 .encrypt = ecb_encrypt,
939 .decrypt = ecb_decrypt,
942 .cra_name = "__cbc(aes)",
943 .cra_driver_name = "__cbc-aes-aesni",
945 .cra_flags = CRYPTO_ALG_INTERNAL,
946 .cra_blocksize = AES_BLOCK_SIZE,
947 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
948 .cra_module = THIS_MODULE,
950 .min_keysize = AES_MIN_KEY_SIZE,
951 .max_keysize = AES_MAX_KEY_SIZE,
952 .ivsize = AES_BLOCK_SIZE,
953 .setkey = aesni_skcipher_setkey,
954 .encrypt = cbc_encrypt,
955 .decrypt = cbc_decrypt,
959 .cra_name = "__ctr(aes)",
960 .cra_driver_name = "__ctr-aes-aesni",
962 .cra_flags = CRYPTO_ALG_INTERNAL,
964 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
965 .cra_module = THIS_MODULE,
967 .min_keysize = AES_MIN_KEY_SIZE,
968 .max_keysize = AES_MAX_KEY_SIZE,
969 .ivsize = AES_BLOCK_SIZE,
970 .chunksize = AES_BLOCK_SIZE,
971 .setkey = aesni_skcipher_setkey,
972 .encrypt = ctr_crypt,
973 .decrypt = ctr_crypt,
976 .cra_name = "__xts(aes)",
977 .cra_driver_name = "__xts-aes-aesni",
979 .cra_flags = CRYPTO_ALG_INTERNAL,
980 .cra_blocksize = AES_BLOCK_SIZE,
981 .cra_ctxsize = XTS_AES_CTX_SIZE,
982 .cra_module = THIS_MODULE,
984 .min_keysize = 2 * AES_MIN_KEY_SIZE,
985 .max_keysize = 2 * AES_MAX_KEY_SIZE,
986 .ivsize = AES_BLOCK_SIZE,
987 .setkey = xts_aesni_setkey,
988 .encrypt = xts_encrypt,
989 .decrypt = xts_decrypt,
995 struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
998 static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
999 unsigned int key_len)
1001 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
1003 return aes_set_key_common(crypto_aead_tfm(aead),
1004 &ctx->aes_key_expanded, key, key_len) ?:
1005 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1008 static int generic_gcmaes_encrypt(struct aead_request *req)
1010 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1011 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1012 void *aes_ctx = &(ctx->aes_key_expanded);
1013 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1014 __be32 counter = cpu_to_be32(1);
1016 memcpy(iv, req->iv, 12);
1017 *((__be32 *)(iv+12)) = counter;
1019 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1023 static int generic_gcmaes_decrypt(struct aead_request *req)
1025 __be32 counter = cpu_to_be32(1);
1026 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1027 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1028 void *aes_ctx = &(ctx->aes_key_expanded);
1029 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1031 memcpy(iv, req->iv, 12);
1032 *((__be32 *)(iv+12)) = counter;
1034 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1038 static struct aead_alg aesni_aeads[] = { {
1039 .setkey = common_rfc4106_set_key,
1040 .setauthsize = common_rfc4106_set_authsize,
1041 .encrypt = helper_rfc4106_encrypt,
1042 .decrypt = helper_rfc4106_decrypt,
1043 .ivsize = GCM_RFC4106_IV_SIZE,
1046 .cra_name = "__rfc4106(gcm(aes))",
1047 .cra_driver_name = "__rfc4106-gcm-aesni",
1048 .cra_priority = 400,
1049 .cra_flags = CRYPTO_ALG_INTERNAL,
1051 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1052 .cra_alignmask = AESNI_ALIGN - 1,
1053 .cra_module = THIS_MODULE,
1056 .setkey = generic_gcmaes_set_key,
1057 .setauthsize = generic_gcmaes_set_authsize,
1058 .encrypt = generic_gcmaes_encrypt,
1059 .decrypt = generic_gcmaes_decrypt,
1060 .ivsize = GCM_AES_IV_SIZE,
1063 .cra_name = "__gcm(aes)",
1064 .cra_driver_name = "__generic-gcm-aesni",
1065 .cra_priority = 400,
1066 .cra_flags = CRYPTO_ALG_INTERNAL,
1068 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
1069 .cra_alignmask = AESNI_ALIGN - 1,
1070 .cra_module = THIS_MODULE,
1074 static struct aead_alg aesni_aeads[0];
1077 static struct simd_aead_alg *aesni_simd_aeads[ARRAY_SIZE(aesni_aeads)];
1079 static const struct x86_cpu_id aesni_cpu_id[] = {
1080 X86_FEATURE_MATCH(X86_FEATURE_AES),
1083 MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1085 static int __init aesni_init(void)
1089 if (!x86_match_cpu(aesni_cpu_id))
1091 #ifdef CONFIG_X86_64
1092 #ifdef CONFIG_AS_AVX2
1093 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1094 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1095 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen4;
1098 #ifdef CONFIG_AS_AVX
1099 if (boot_cpu_has(X86_FEATURE_AVX)) {
1100 pr_info("AVX version of gcm_enc/dec engaged.\n");
1101 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen2;
1105 pr_info("SSE version of gcm_enc/dec engaged.\n");
1106 aesni_gcm_tfm = &aesni_gcm_tfm_sse;
1108 aesni_ctr_enc_tfm = aesni_ctr_enc;
1109 #ifdef CONFIG_AS_AVX
1110 if (boot_cpu_has(X86_FEATURE_AVX)) {
1111 /* optimize performance of ctr mode encryption transform */
1112 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1113 pr_info("AES CTR mode by8 optimization enabled\n");
1118 err = crypto_register_alg(&aesni_cipher_alg);
1122 err = simd_register_skciphers_compat(aesni_skciphers,
1123 ARRAY_SIZE(aesni_skciphers),
1124 aesni_simd_skciphers);
1126 goto unregister_cipher;
1128 err = simd_register_aeads_compat(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1131 goto unregister_skciphers;
1135 unregister_skciphers:
1136 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1137 aesni_simd_skciphers);
1139 crypto_unregister_alg(&aesni_cipher_alg);
1143 static void __exit aesni_exit(void)
1145 simd_unregister_aeads(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1147 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1148 aesni_simd_skciphers);
1149 crypto_unregister_alg(&aesni_cipher_alg);
1152 late_initcall(aesni_init);
1153 module_exit(aesni_exit);
1155 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1156 MODULE_LICENSE("GPL");
1157 MODULE_ALIAS_CRYPTO("aes");