crypto: remove CRYPTO_TFM_RES_BAD_KEY_LEN
[linux-block.git] / arch / x86 / crypto / aesni-intel_glue.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
54b6a1bd
HY
2/*
3 * Support for Intel AES-NI instructions. This file contains glue
4 * code, the real AES implementation is in intel-aes_asm.S.
5 *
6 * Copyright (C) 2008, Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
8 *
0bd82f5f
TS
9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10 * interface for 64-bit kernels.
11 * Authors: Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Aidan O'Mahony (aidan.o.mahony@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
54b6a1bd
HY
16 */
17
18#include <linux/hardirq.h>
19#include <linux/types.h>
7c52d551 20#include <linux/module.h>
54b6a1bd
HY
21#include <linux/err.h>
22#include <crypto/algapi.h>
23#include <crypto/aes.h>
12387a46 24#include <crypto/ctr.h>
023af608 25#include <crypto/b128ops.h>
46d93748 26#include <crypto/gcm.h>
023af608 27#include <crypto/xts.h>
3bd391f0 28#include <asm/cpu_device_id.h>
f2abe0d7 29#include <asm/simd.h>
0bd82f5f
TS
30#include <crypto/scatterwalk.h>
31#include <crypto/internal/aead.h>
85671860
HX
32#include <crypto/internal/simd.h>
33#include <crypto/internal/skcipher.h>
0bd82f5f
TS
34#include <linux/workqueue.h>
35#include <linux/spinlock.h>
c456a9cd
JK
36#ifdef CONFIG_X86_64
37#include <asm/crypto/glue_helper.h>
38#endif
54b6a1bd 39
e31ac32d 40
b7c89d9e 41#define AESNI_ALIGN 16
85671860 42#define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
b7c89d9e
HX
43#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
44#define RFC4106_HASH_SUBKEY_SIZE 16
85671860
HX
45#define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
46#define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
47#define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
b7c89d9e 48
0bd82f5f
TS
49/* This data is stored at the end of the crypto_tfm struct.
50 * It's a type of per "session" data storage location.
51 * This needs to be 16 byte aligned.
52 */
53struct aesni_rfc4106_gcm_ctx {
85671860
HX
54 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
55 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
0bd82f5f 56 u8 nonce[4];
0bd82f5f
TS
57};
58
cce2ea8d
SD
59struct generic_gcmaes_ctx {
60 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
62};
63
023af608 64struct aesni_xts_ctx {
85671860
HX
65 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
66 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
023af608
JK
67};
68
9ee4a5df
DW
69#define GCM_BLOCK_LEN 16
70
71struct gcm_context_data {
72 /* init, update and finalize context data */
73 u8 aad_hash[GCM_BLOCK_LEN];
74 u64 aad_length;
75 u64 in_length;
76 u8 partial_block_enc_key[GCM_BLOCK_LEN];
77 u8 orig_IV[GCM_BLOCK_LEN];
78 u8 current_counter[GCM_BLOCK_LEN];
79 u64 partial_block_len;
80 u64 unused;
de85fc46 81 u8 hash_keys[GCM_BLOCK_LEN * 16];
9ee4a5df
DW
82};
83
54b6a1bd
HY
84asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
85 unsigned int key_len);
9c1e8836
KC
86asmlinkage void aesni_enc(const void *ctx, u8 *out, const u8 *in);
87asmlinkage void aesni_dec(const void *ctx, u8 *out, const u8 *in);
54b6a1bd
HY
88asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
89 const u8 *in, unsigned int len);
90asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
91 const u8 *in, unsigned int len);
92asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
93 const u8 *in, unsigned int len, u8 *iv);
94asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
95 const u8 *in, unsigned int len, u8 *iv);
9bed4aca 96
d764593a
TC
97#define AVX_GEN2_OPTSIZE 640
98#define AVX_GEN4_OPTSIZE 4096
99
0d258efb 100#ifdef CONFIG_X86_64
22cddcc7 101
102static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
103 const u8 *in, unsigned int len, u8 *iv);
12387a46
HY
104asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
105 const u8 *in, unsigned int len, u8 *iv);
54b6a1bd 106
9c1e8836
KC
107asmlinkage void aesni_xts_crypt8(const struct crypto_aes_ctx *ctx, u8 *out,
108 const u8 *in, bool enc, le128 *iv);
c456a9cd 109
0bd82f5f
TS
110/* asmlinkage void aesni_gcm_enc()
111 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
9ee4a5df 112 * struct gcm_context_data. May be uninitialized.
0bd82f5f
TS
113 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
114 * const u8 *in, Plaintext input
115 * unsigned long plaintext_len, Length of data in bytes for encryption.
cce2ea8d
SD
116 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
117 * 16-byte aligned pointer.
0bd82f5f
TS
118 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
119 * const u8 *aad, Additional Authentication Data (AAD)
cce2ea8d 120 * unsigned long aad_len, Length of AAD in bytes.
0bd82f5f
TS
121 * u8 *auth_tag, Authenticated Tag output.
122 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
123 * Valid values are 16 (most likely), 12 or 8.
124 */
9ee4a5df
DW
125asmlinkage void aesni_gcm_enc(void *ctx,
126 struct gcm_context_data *gdata, u8 *out,
0bd82f5f
TS
127 const u8 *in, unsigned long plaintext_len, u8 *iv,
128 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
129 u8 *auth_tag, unsigned long auth_tag_len);
130
131/* asmlinkage void aesni_gcm_dec()
132 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
9ee4a5df 133 * struct gcm_context_data. May be uninitialized.
0bd82f5f
TS
134 * u8 *out, Plaintext output. Decrypt in-place is allowed.
135 * const u8 *in, Ciphertext input
136 * unsigned long ciphertext_len, Length of data in bytes for decryption.
cce2ea8d
SD
137 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
138 * 16-byte aligned pointer.
0bd82f5f
TS
139 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
140 * const u8 *aad, Additional Authentication Data (AAD)
141 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
142 * to be 8 or 12 bytes
143 * u8 *auth_tag, Authenticated Tag output.
144 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
145 * Valid values are 16 (most likely), 12 or 8.
146 */
9ee4a5df
DW
147asmlinkage void aesni_gcm_dec(void *ctx,
148 struct gcm_context_data *gdata, u8 *out,
0bd82f5f
TS
149 const u8 *in, unsigned long ciphertext_len, u8 *iv,
150 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
151 u8 *auth_tag, unsigned long auth_tag_len);
152
fb8986e6
DW
153/* Scatter / Gather routines, with args similar to above */
154asmlinkage void aesni_gcm_init(void *ctx,
155 struct gcm_context_data *gdata,
156 u8 *iv,
157 u8 *hash_subkey, const u8 *aad,
158 unsigned long aad_len);
159asmlinkage void aesni_gcm_enc_update(void *ctx,
160 struct gcm_context_data *gdata, u8 *out,
161 const u8 *in, unsigned long plaintext_len);
162asmlinkage void aesni_gcm_dec_update(void *ctx,
163 struct gcm_context_data *gdata, u8 *out,
164 const u8 *in,
165 unsigned long ciphertext_len);
166asmlinkage void aesni_gcm_finalize(void *ctx,
167 struct gcm_context_data *gdata,
168 u8 *auth_tag, unsigned long auth_tag_len);
d764593a 169
793ff5ff
EB
170static const struct aesni_gcm_tfm_s {
171 void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv,
172 u8 *hash_subkey, const u8 *aad, unsigned long aad_len);
173 void (*enc_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
174 const u8 *in, unsigned long plaintext_len);
175 void (*dec_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
176 const u8 *in, unsigned long ciphertext_len);
177 void (*finalize)(void *ctx, struct gcm_context_data *gdata,
178 u8 *auth_tag, unsigned long auth_tag_len);
603f8c3b
DW
179} *aesni_gcm_tfm;
180
793ff5ff 181static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse = {
603f8c3b
DW
182 .init = &aesni_gcm_init,
183 .enc_update = &aesni_gcm_enc_update,
184 .dec_update = &aesni_gcm_dec_update,
185 .finalize = &aesni_gcm_finalize,
186};
187
d764593a 188#ifdef CONFIG_AS_AVX
22cddcc7 189asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
190 void *keys, u8 *out, unsigned int num_bytes);
191asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
192 void *keys, u8 *out, unsigned int num_bytes);
193asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
194 void *keys, u8 *out, unsigned int num_bytes);
d764593a 195/*
603f8c3b 196 * asmlinkage void aesni_gcm_init_avx_gen2()
d764593a
TC
197 * gcm_data *my_ctx_data, context data
198 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
199 */
603f8c3b
DW
200asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data,
201 struct gcm_context_data *gdata,
202 u8 *iv,
203 u8 *hash_subkey,
204 const u8 *aad,
205 unsigned long aad_len);
206
207asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
208 struct gcm_context_data *gdata, u8 *out,
209 const u8 *in, unsigned long plaintext_len);
210asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
211 struct gcm_context_data *gdata, u8 *out,
212 const u8 *in,
213 unsigned long ciphertext_len);
214asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
215 struct gcm_context_data *gdata,
216 u8 *auth_tag, unsigned long auth_tag_len);
d764593a 217
de85fc46
DW
218asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
219 struct gcm_context_data *gdata, u8 *out,
d764593a
TC
220 const u8 *in, unsigned long plaintext_len, u8 *iv,
221 const u8 *aad, unsigned long aad_len,
222 u8 *auth_tag, unsigned long auth_tag_len);
223
de85fc46
DW
224asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx,
225 struct gcm_context_data *gdata, u8 *out,
d764593a
TC
226 const u8 *in, unsigned long ciphertext_len, u8 *iv,
227 const u8 *aad, unsigned long aad_len,
228 u8 *auth_tag, unsigned long auth_tag_len);
229
793ff5ff 230static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2 = {
603f8c3b
DW
231 .init = &aesni_gcm_init_avx_gen2,
232 .enc_update = &aesni_gcm_enc_update_avx_gen2,
233 .dec_update = &aesni_gcm_dec_update_avx_gen2,
234 .finalize = &aesni_gcm_finalize_avx_gen2,
235};
d764593a 236
d764593a
TC
237#endif
238
239#ifdef CONFIG_AS_AVX2
240/*
603f8c3b 241 * asmlinkage void aesni_gcm_init_avx_gen4()
d764593a
TC
242 * gcm_data *my_ctx_data, context data
243 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
244 */
603f8c3b
DW
245asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data,
246 struct gcm_context_data *gdata,
247 u8 *iv,
248 u8 *hash_subkey,
249 const u8 *aad,
250 unsigned long aad_len);
251
252asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
253 struct gcm_context_data *gdata, u8 *out,
254 const u8 *in, unsigned long plaintext_len);
255asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
256 struct gcm_context_data *gdata, u8 *out,
257 const u8 *in,
258 unsigned long ciphertext_len);
259asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
260 struct gcm_context_data *gdata,
261 u8 *auth_tag, unsigned long auth_tag_len);
d764593a 262
de85fc46
DW
263asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
264 struct gcm_context_data *gdata, u8 *out,
d764593a
TC
265 const u8 *in, unsigned long plaintext_len, u8 *iv,
266 const u8 *aad, unsigned long aad_len,
267 u8 *auth_tag, unsigned long auth_tag_len);
268
de85fc46
DW
269asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx,
270 struct gcm_context_data *gdata, u8 *out,
d764593a
TC
271 const u8 *in, unsigned long ciphertext_len, u8 *iv,
272 const u8 *aad, unsigned long aad_len,
273 u8 *auth_tag, unsigned long auth_tag_len);
274
793ff5ff 275static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4 = {
603f8c3b
DW
276 .init = &aesni_gcm_init_avx_gen4,
277 .enc_update = &aesni_gcm_enc_update_avx_gen4,
278 .dec_update = &aesni_gcm_dec_update_avx_gen4,
279 .finalize = &aesni_gcm_finalize_avx_gen4,
280};
d764593a 281
d764593a
TC
282#endif
283
0bd82f5f
TS
284static inline struct
285aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
286{
b7c89d9e
HX
287 unsigned long align = AESNI_ALIGN;
288
289 if (align <= crypto_tfm_ctx_alignment())
290 align = 1;
291 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
0bd82f5f 292}
cce2ea8d
SD
293
294static inline struct
295generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
296{
297 unsigned long align = AESNI_ALIGN;
298
299 if (align <= crypto_tfm_ctx_alignment())
300 align = 1;
301 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
302}
559ad0ff 303#endif
0bd82f5f 304
54b6a1bd
HY
305static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
306{
307 unsigned long addr = (unsigned long)raw_ctx;
308 unsigned long align = AESNI_ALIGN;
309
310 if (align <= crypto_tfm_ctx_alignment())
311 align = 1;
312 return (struct crypto_aes_ctx *)ALIGN(addr, align);
313}
314
315static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
316 const u8 *in_key, unsigned int key_len)
317{
318 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
54b6a1bd
HY
319 int err;
320
321 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
674f368a 322 key_len != AES_KEYSIZE_256)
54b6a1bd 323 return -EINVAL;
54b6a1bd 324
f2abe0d7 325 if (!crypto_simd_usable())
2c53fd11 326 err = aes_expandkey(ctx, in_key, key_len);
54b6a1bd
HY
327 else {
328 kernel_fpu_begin();
329 err = aesni_set_key(ctx, in_key, key_len);
330 kernel_fpu_end();
331 }
332
333 return err;
334}
335
336static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
337 unsigned int key_len)
338{
339 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
340}
341
724ecd3c 342static void aesni_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
54b6a1bd
HY
343{
344 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
345
2c53fd11
AB
346 if (!crypto_simd_usable()) {
347 aes_encrypt(ctx, dst, src);
348 } else {
54b6a1bd
HY
349 kernel_fpu_begin();
350 aesni_enc(ctx, dst, src);
351 kernel_fpu_end();
352 }
353}
354
724ecd3c 355static void aesni_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
54b6a1bd
HY
356{
357 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
358
2c53fd11
AB
359 if (!crypto_simd_usable()) {
360 aes_decrypt(ctx, dst, src);
361 } else {
54b6a1bd
HY
362 kernel_fpu_begin();
363 aesni_dec(ctx, dst, src);
364 kernel_fpu_end();
365 }
366}
367
85671860
HX
368static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
369 unsigned int len)
370{
371 return aes_set_key_common(crypto_skcipher_tfm(tfm),
372 crypto_skcipher_ctx(tfm), key, len);
373}
374
375static int ecb_encrypt(struct skcipher_request *req)
54b6a1bd 376{
85671860
HX
377 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
378 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
379 struct skcipher_walk walk;
380 unsigned int nbytes;
54b6a1bd
HY
381 int err;
382
85671860 383 err = skcipher_walk_virt(&walk, req, true);
54b6a1bd
HY
384
385 kernel_fpu_begin();
386 while ((nbytes = walk.nbytes)) {
387 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
388 nbytes & AES_BLOCK_MASK);
389 nbytes &= AES_BLOCK_SIZE - 1;
85671860 390 err = skcipher_walk_done(&walk, nbytes);
54b6a1bd
HY
391 }
392 kernel_fpu_end();
393
394 return err;
395}
396
85671860 397static int ecb_decrypt(struct skcipher_request *req)
54b6a1bd 398{
85671860
HX
399 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
400 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
401 struct skcipher_walk walk;
402 unsigned int nbytes;
54b6a1bd
HY
403 int err;
404
85671860 405 err = skcipher_walk_virt(&walk, req, true);
54b6a1bd
HY
406
407 kernel_fpu_begin();
408 while ((nbytes = walk.nbytes)) {
409 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
410 nbytes & AES_BLOCK_MASK);
411 nbytes &= AES_BLOCK_SIZE - 1;
85671860 412 err = skcipher_walk_done(&walk, nbytes);
54b6a1bd
HY
413 }
414 kernel_fpu_end();
415
416 return err;
417}
418
85671860 419static int cbc_encrypt(struct skcipher_request *req)
54b6a1bd 420{
85671860
HX
421 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
422 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
423 struct skcipher_walk walk;
424 unsigned int nbytes;
54b6a1bd
HY
425 int err;
426
85671860 427 err = skcipher_walk_virt(&walk, req, true);
54b6a1bd
HY
428
429 kernel_fpu_begin();
430 while ((nbytes = walk.nbytes)) {
431 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
432 nbytes & AES_BLOCK_MASK, walk.iv);
433 nbytes &= AES_BLOCK_SIZE - 1;
85671860 434 err = skcipher_walk_done(&walk, nbytes);
54b6a1bd
HY
435 }
436 kernel_fpu_end();
437
438 return err;
439}
440
85671860 441static int cbc_decrypt(struct skcipher_request *req)
54b6a1bd 442{
85671860
HX
443 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
444 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
445 struct skcipher_walk walk;
446 unsigned int nbytes;
54b6a1bd
HY
447 int err;
448
85671860 449 err = skcipher_walk_virt(&walk, req, true);
54b6a1bd
HY
450
451 kernel_fpu_begin();
452 while ((nbytes = walk.nbytes)) {
453 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
454 nbytes & AES_BLOCK_MASK, walk.iv);
455 nbytes &= AES_BLOCK_SIZE - 1;
85671860 456 err = skcipher_walk_done(&walk, nbytes);
54b6a1bd
HY
457 }
458 kernel_fpu_end();
459
460 return err;
461}
462
0d258efb 463#ifdef CONFIG_X86_64
12387a46 464static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
85671860 465 struct skcipher_walk *walk)
12387a46
HY
466{
467 u8 *ctrblk = walk->iv;
468 u8 keystream[AES_BLOCK_SIZE];
469 u8 *src = walk->src.virt.addr;
470 u8 *dst = walk->dst.virt.addr;
471 unsigned int nbytes = walk->nbytes;
472
473 aesni_enc(ctx, keystream, ctrblk);
45fe93df
AB
474 crypto_xor_cpy(dst, keystream, src, nbytes);
475
12387a46
HY
476 crypto_inc(ctrblk, AES_BLOCK_SIZE);
477}
478
5cfed7b3 479#ifdef CONFIG_AS_AVX
22cddcc7 480static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
481 const u8 *in, unsigned int len, u8 *iv)
482{
483 /*
484 * based on key length, override with the by8 version
485 * of ctr mode encryption/decryption for improved performance
486 * aes_set_key_common() ensures that key length is one of
487 * {128,192,256}
488 */
489 if (ctx->key_length == AES_KEYSIZE_128)
490 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
491 else if (ctx->key_length == AES_KEYSIZE_192)
492 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
493 else
494 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
495}
496#endif
497
85671860 498static int ctr_crypt(struct skcipher_request *req)
12387a46 499{
85671860
HX
500 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
501 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
502 struct skcipher_walk walk;
503 unsigned int nbytes;
12387a46
HY
504 int err;
505
85671860 506 err = skcipher_walk_virt(&walk, req, true);
12387a46
HY
507
508 kernel_fpu_begin();
509 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
22cddcc7 510 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
e31ac32d 511 nbytes & AES_BLOCK_MASK, walk.iv);
12387a46 512 nbytes &= AES_BLOCK_SIZE - 1;
85671860 513 err = skcipher_walk_done(&walk, nbytes);
12387a46
HY
514 }
515 if (walk.nbytes) {
516 ctr_crypt_final(ctx, &walk);
85671860 517 err = skcipher_walk_done(&walk, 0);
12387a46
HY
518 }
519 kernel_fpu_end();
520
521 return err;
522}
023af608 523
85671860 524static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
023af608
JK
525 unsigned int keylen)
526{
85671860 527 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
023af608
JK
528 int err;
529
85671860 530 err = xts_verify_key(tfm, key, keylen);
023af608
JK
531 if (err)
532 return err;
533
85671860 534 keylen /= 2;
023af608
JK
535
536 /* first half of xts-key is for crypt */
85671860
HX
537 err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
538 key, keylen);
023af608
JK
539 if (err)
540 return err;
541
542 /* second half of xts-key is for tweak */
85671860
HX
543 return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
544 key + keylen, keylen);
023af608
JK
545}
546
547
9c1e8836 548static void aesni_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
32bec973 549{
9c1e8836 550 glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_enc);
32bec973
JK
551}
552
9c1e8836 553static void aesni_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
c456a9cd 554{
9c1e8836 555 glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_dec);
c456a9cd
JK
556}
557
9c1e8836 558static void aesni_xts_enc8(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
c456a9cd 559{
9c1e8836 560 aesni_xts_crypt8(ctx, dst, src, true, iv);
c456a9cd
JK
561}
562
9c1e8836 563static void aesni_xts_dec8(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
c456a9cd 564{
9c1e8836 565 aesni_xts_crypt8(ctx, dst, src, false, iv);
c456a9cd
JK
566}
567
568static const struct common_glue_ctx aesni_enc_xts = {
569 .num_funcs = 2,
570 .fpu_blocks_limit = 1,
571
572 .funcs = { {
573 .num_blocks = 8,
9c1e8836 574 .fn_u = { .xts = aesni_xts_enc8 }
c456a9cd
JK
575 }, {
576 .num_blocks = 1,
9c1e8836 577 .fn_u = { .xts = aesni_xts_enc }
c456a9cd
JK
578 } }
579};
580
581static const struct common_glue_ctx aesni_dec_xts = {
582 .num_funcs = 2,
583 .fpu_blocks_limit = 1,
584
585 .funcs = { {
586 .num_blocks = 8,
9c1e8836 587 .fn_u = { .xts = aesni_xts_dec8 }
c456a9cd
JK
588 }, {
589 .num_blocks = 1,
9c1e8836 590 .fn_u = { .xts = aesni_xts_dec }
c456a9cd
JK
591 } }
592};
593
85671860 594static int xts_encrypt(struct skcipher_request *req)
c456a9cd 595{
85671860
HX
596 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
597 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
c456a9cd 598
9c1e8836 599 return glue_xts_req_128bit(&aesni_enc_xts, req, aesni_enc,
85671860 600 aes_ctx(ctx->raw_tweak_ctx),
8ce5fac2
AB
601 aes_ctx(ctx->raw_crypt_ctx),
602 false);
c456a9cd
JK
603}
604
85671860 605static int xts_decrypt(struct skcipher_request *req)
c456a9cd 606{
85671860
HX
607 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
608 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
023af608 609
9c1e8836 610 return glue_xts_req_128bit(&aesni_dec_xts, req, aesni_enc,
85671860 611 aes_ctx(ctx->raw_tweak_ctx),
8ce5fac2
AB
612 aes_ctx(ctx->raw_crypt_ctx),
613 true);
2cf4ac8b 614}
2cf4ac8b 615
0bd82f5f
TS
616static int
617rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
618{
f6680cbd 619 struct crypto_aes_ctx ctx;
02fa472a 620 int ret;
0bd82f5f 621
f6680cbd 622 ret = aes_expandkey(&ctx, key, key_len);
7efd95f6 623 if (ret)
f6680cbd 624 return ret;
0bd82f5f
TS
625
626 /* Clear the data in the hash sub key container to zero.*/
627 /* We want to cipher all zeros to create the hash sub key. */
628 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
629
f6680cbd 630 aes_encrypt(&ctx, hash_subkey, hash_subkey);
02fa472a 631
f6680cbd
AB
632 memzero_explicit(&ctx, sizeof(ctx));
633 return 0;
0bd82f5f
TS
634}
635
81e397d9
TS
636static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
637 unsigned int key_len)
0bd82f5f 638{
81e397d9 639 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
0bd82f5f 640
674f368a 641 if (key_len < 4)
0bd82f5f 642 return -EINVAL;
674f368a 643
0bd82f5f
TS
644 /*Account for 4 byte nonce at the end.*/
645 key_len -= 4;
0bd82f5f
TS
646
647 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
0bd82f5f 648
b7c89d9e
HX
649 return aes_set_key_common(crypto_aead_tfm(aead),
650 &ctx->aes_key_expanded, key, key_len) ?:
651 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
0bd82f5f
TS
652}
653
149e1225
EB
654/* This is the Integrity Check Value (aka the authentication tag) length and can
655 * be 8, 12 or 16 bytes long. */
81e397d9
TS
656static int common_rfc4106_set_authsize(struct crypto_aead *aead,
657 unsigned int authsize)
658{
0bd82f5f
TS
659 switch (authsize) {
660 case 8:
661 case 12:
662 case 16:
663 break;
664 default:
665 return -EINVAL;
666 }
b7c89d9e 667
0bd82f5f
TS
668 return 0;
669}
670
cce2ea8d
SD
671static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
672 unsigned int authsize)
673{
674 switch (authsize) {
675 case 4:
676 case 8:
677 case 12:
678 case 13:
679 case 14:
680 case 15:
681 case 16:
682 break;
683 default:
684 return -EINVAL;
685 }
686
687 return 0;
688}
689
e8455207
DW
690static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
691 unsigned int assoclen, u8 *hash_subkey,
692 u8 *iv, void *aes_ctx)
693{
694 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
695 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
793ff5ff 696 const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm;
e8455207
DW
697 struct gcm_context_data data AESNI_ALIGN_ATTR;
698 struct scatter_walk dst_sg_walk = {};
699 unsigned long left = req->cryptlen;
700 unsigned long len, srclen, dstlen;
701 struct scatter_walk assoc_sg_walk;
702 struct scatter_walk src_sg_walk;
703 struct scatterlist src_start[2];
704 struct scatterlist dst_start[2];
705 struct scatterlist *src_sg;
706 struct scatterlist *dst_sg;
707 u8 *src, *dst, *assoc;
708 u8 *assocmem = NULL;
709 u8 authTag[16];
710
711 if (!enc)
712 left -= auth_tag_len;
713
603f8c3b
DW
714#ifdef CONFIG_AS_AVX2
715 if (left < AVX_GEN4_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen4)
716 gcm_tfm = &aesni_gcm_tfm_avx_gen2;
717#endif
718#ifdef CONFIG_AS_AVX
719 if (left < AVX_GEN2_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen2)
720 gcm_tfm = &aesni_gcm_tfm_sse;
721#endif
722
e8455207
DW
723 /* Linearize assoc, if not already linear */
724 if (req->src->length >= assoclen && req->src->length &&
725 (!PageHighMem(sg_page(req->src)) ||
a7888481 726 req->src->offset + req->src->length <= PAGE_SIZE)) {
e8455207
DW
727 scatterwalk_start(&assoc_sg_walk, req->src);
728 assoc = scatterwalk_map(&assoc_sg_walk);
729 } else {
730 /* assoc can be any length, so must be on heap */
731 assocmem = kmalloc(assoclen, GFP_ATOMIC);
732 if (unlikely(!assocmem))
733 return -ENOMEM;
734 assoc = assocmem;
735
736 scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0);
737 }
738
3af34963
EB
739 if (left) {
740 src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen);
741 scatterwalk_start(&src_sg_walk, src_sg);
742 if (req->src != req->dst) {
743 dst_sg = scatterwalk_ffwd(dst_start, req->dst,
744 req->assoclen);
745 scatterwalk_start(&dst_sg_walk, dst_sg);
746 }
e8455207
DW
747 }
748
749 kernel_fpu_begin();
603f8c3b 750 gcm_tfm->init(aes_ctx, &data, iv,
e8455207
DW
751 hash_subkey, assoc, assoclen);
752 if (req->src != req->dst) {
753 while (left) {
754 src = scatterwalk_map(&src_sg_walk);
755 dst = scatterwalk_map(&dst_sg_walk);
756 srclen = scatterwalk_clamp(&src_sg_walk, left);
757 dstlen = scatterwalk_clamp(&dst_sg_walk, left);
758 len = min(srclen, dstlen);
759 if (len) {
760 if (enc)
603f8c3b 761 gcm_tfm->enc_update(aes_ctx, &data,
e8455207
DW
762 dst, src, len);
763 else
603f8c3b 764 gcm_tfm->dec_update(aes_ctx, &data,
e8455207
DW
765 dst, src, len);
766 }
767 left -= len;
768
769 scatterwalk_unmap(src);
770 scatterwalk_unmap(dst);
771 scatterwalk_advance(&src_sg_walk, len);
772 scatterwalk_advance(&dst_sg_walk, len);
773 scatterwalk_done(&src_sg_walk, 0, left);
774 scatterwalk_done(&dst_sg_walk, 1, left);
775 }
776 } else {
777 while (left) {
778 dst = src = scatterwalk_map(&src_sg_walk);
779 len = scatterwalk_clamp(&src_sg_walk, left);
780 if (len) {
781 if (enc)
603f8c3b 782 gcm_tfm->enc_update(aes_ctx, &data,
e8455207
DW
783 src, src, len);
784 else
603f8c3b 785 gcm_tfm->dec_update(aes_ctx, &data,
e8455207
DW
786 src, src, len);
787 }
788 left -= len;
789 scatterwalk_unmap(src);
790 scatterwalk_advance(&src_sg_walk, len);
791 scatterwalk_done(&src_sg_walk, 1, left);
792 }
793 }
603f8c3b 794 gcm_tfm->finalize(aes_ctx, &data, authTag, auth_tag_len);
e8455207
DW
795 kernel_fpu_end();
796
797 if (!assocmem)
798 scatterwalk_unmap(assoc);
799 else
800 kfree(assocmem);
801
802 if (!enc) {
803 u8 authTagMsg[16];
804
805 /* Copy out original authTag */
806 scatterwalk_map_and_copy(authTagMsg, req->src,
807 req->assoclen + req->cryptlen -
808 auth_tag_len,
809 auth_tag_len, 0);
810
811 /* Compare generated tag with passed in tag. */
812 return crypto_memneq(authTagMsg, authTag, auth_tag_len) ?
813 -EBADMSG : 0;
814 }
815
816 /* Copy in the authTag */
817 scatterwalk_map_and_copy(authTag, req->dst,
818 req->assoclen + req->cryptlen,
819 auth_tag_len, 1);
820
821 return 0;
822}
823
cce2ea8d
SD
824static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
825 u8 *hash_subkey, u8 *iv, void *aes_ctx)
0bd82f5f 826{
603f8c3b
DW
827 return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
828 aes_ctx);
0bd82f5f
TS
829}
830
cce2ea8d
SD
831static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
832 u8 *hash_subkey, u8 *iv, void *aes_ctx)
0bd82f5f 833{
603f8c3b
DW
834 return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
835 aes_ctx);
cce2ea8d
SD
836}
837
838static int helper_rfc4106_encrypt(struct aead_request *req)
839{
840 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
841 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
842 void *aes_ctx = &(ctx->aes_key_expanded);
843 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
844 unsigned int i;
845 __be32 counter = cpu_to_be32(1);
846
847 /* Assuming we are supporting rfc4106 64-bit extended */
848 /* sequence numbers We need to have the AAD length equal */
849 /* to 16 or 20 bytes */
850 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
851 return -EINVAL;
852
853 /* IV below built */
854 for (i = 0; i < 4; i++)
855 *(iv+i) = ctx->nonce[i];
856 for (i = 0; i < 8; i++)
857 *(iv+4+i) = req->iv[i];
858 *((__be32 *)(iv+12)) = counter;
859
860 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
861 aes_ctx);
862}
863
864static int helper_rfc4106_decrypt(struct aead_request *req)
865{
866 __be32 counter = cpu_to_be32(1);
867 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
868 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
869 void *aes_ctx = &(ctx->aes_key_expanded);
870 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
871 unsigned int i;
872
873 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
874 return -EINVAL;
875
876 /* Assuming we are supporting rfc4106 64-bit extended */
877 /* sequence numbers We need to have the AAD length */
878 /* equal to 16 or 20 bytes */
879
880 /* IV below built */
881 for (i = 0; i < 4; i++)
882 *(iv+i) = ctx->nonce[i];
883 for (i = 0; i < 8; i++)
884 *(iv+4+i) = req->iv[i];
885 *((__be32 *)(iv+12)) = counter;
886
887 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
888 aes_ctx);
0bd82f5f 889}
fa46ccb8 890#endif
0bd82f5f 891
07269559 892static struct crypto_alg aesni_cipher_alg = {
fa46ccb8
JK
893 .cra_name = "aes",
894 .cra_driver_name = "aes-aesni",
895 .cra_priority = 300,
896 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
897 .cra_blocksize = AES_BLOCK_SIZE,
85671860 898 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
fa46ccb8
JK
899 .cra_module = THIS_MODULE,
900 .cra_u = {
901 .cipher = {
902 .cia_min_keysize = AES_MIN_KEY_SIZE,
903 .cia_max_keysize = AES_MAX_KEY_SIZE,
904 .cia_setkey = aes_set_key,
724ecd3c
AB
905 .cia_encrypt = aesni_encrypt,
906 .cia_decrypt = aesni_decrypt
fa46ccb8
JK
907 }
908 }
07269559 909};
85671860
HX
910
911static struct skcipher_alg aesni_skciphers[] = {
912 {
913 .base = {
914 .cra_name = "__ecb(aes)",
915 .cra_driver_name = "__ecb-aes-aesni",
916 .cra_priority = 400,
917 .cra_flags = CRYPTO_ALG_INTERNAL,
918 .cra_blocksize = AES_BLOCK_SIZE,
919 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
920 .cra_module = THIS_MODULE,
fa46ccb8 921 },
85671860
HX
922 .min_keysize = AES_MIN_KEY_SIZE,
923 .max_keysize = AES_MAX_KEY_SIZE,
924 .setkey = aesni_skcipher_setkey,
925 .encrypt = ecb_encrypt,
926 .decrypt = ecb_decrypt,
927 }, {
928 .base = {
929 .cra_name = "__cbc(aes)",
930 .cra_driver_name = "__cbc-aes-aesni",
931 .cra_priority = 400,
932 .cra_flags = CRYPTO_ALG_INTERNAL,
933 .cra_blocksize = AES_BLOCK_SIZE,
934 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
935 .cra_module = THIS_MODULE,
fa46ccb8 936 },
85671860
HX
937 .min_keysize = AES_MIN_KEY_SIZE,
938 .max_keysize = AES_MAX_KEY_SIZE,
939 .ivsize = AES_BLOCK_SIZE,
940 .setkey = aesni_skcipher_setkey,
941 .encrypt = cbc_encrypt,
942 .decrypt = cbc_decrypt,
fa46ccb8 943#ifdef CONFIG_X86_64
85671860
HX
944 }, {
945 .base = {
946 .cra_name = "__ctr(aes)",
947 .cra_driver_name = "__ctr-aes-aesni",
948 .cra_priority = 400,
949 .cra_flags = CRYPTO_ALG_INTERNAL,
950 .cra_blocksize = 1,
951 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
952 .cra_module = THIS_MODULE,
fa46ccb8 953 },
85671860
HX
954 .min_keysize = AES_MIN_KEY_SIZE,
955 .max_keysize = AES_MAX_KEY_SIZE,
956 .ivsize = AES_BLOCK_SIZE,
957 .chunksize = AES_BLOCK_SIZE,
958 .setkey = aesni_skcipher_setkey,
959 .encrypt = ctr_crypt,
960 .decrypt = ctr_crypt,
961 }, {
962 .base = {
963 .cra_name = "__xts(aes)",
964 .cra_driver_name = "__xts-aes-aesni",
965 .cra_priority = 401,
966 .cra_flags = CRYPTO_ALG_INTERNAL,
967 .cra_blocksize = AES_BLOCK_SIZE,
968 .cra_ctxsize = XTS_AES_CTX_SIZE,
969 .cra_module = THIS_MODULE,
fa46ccb8 970 },
85671860
HX
971 .min_keysize = 2 * AES_MIN_KEY_SIZE,
972 .max_keysize = 2 * AES_MAX_KEY_SIZE,
973 .ivsize = AES_BLOCK_SIZE,
974 .setkey = xts_aesni_setkey,
975 .encrypt = xts_encrypt,
976 .decrypt = xts_decrypt,
fa46ccb8 977#endif
85671860
HX
978 }
979};
980
1c9fa294 981static
85671860
HX
982struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
983
af05b300 984#ifdef CONFIG_X86_64
cce2ea8d
SD
985static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
986 unsigned int key_len)
987{
988 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
989
990 return aes_set_key_common(crypto_aead_tfm(aead),
991 &ctx->aes_key_expanded, key, key_len) ?:
992 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
993}
994
995static int generic_gcmaes_encrypt(struct aead_request *req)
996{
997 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
998 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
999 void *aes_ctx = &(ctx->aes_key_expanded);
1000 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1001 __be32 counter = cpu_to_be32(1);
1002
1003 memcpy(iv, req->iv, 12);
1004 *((__be32 *)(iv+12)) = counter;
1005
1006 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1007 aes_ctx);
1008}
1009
1010static int generic_gcmaes_decrypt(struct aead_request *req)
1011{
1012 __be32 counter = cpu_to_be32(1);
1013 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
106840c4 1014 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
cce2ea8d
SD
1015 void *aes_ctx = &(ctx->aes_key_expanded);
1016 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1017
1018 memcpy(iv, req->iv, 12);
1019 *((__be32 *)(iv+12)) = counter;
1020
1021 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1022 aes_ctx);
1023}
1024
149e1225 1025static struct aead_alg aesni_aeads[] = { {
b7c89d9e
HX
1026 .setkey = common_rfc4106_set_key,
1027 .setauthsize = common_rfc4106_set_authsize,
1028 .encrypt = helper_rfc4106_encrypt,
1029 .decrypt = helper_rfc4106_decrypt,
46d93748 1030 .ivsize = GCM_RFC4106_IV_SIZE,
b7c89d9e
HX
1031 .maxauthsize = 16,
1032 .base = {
149e1225
EB
1033 .cra_name = "__rfc4106(gcm(aes))",
1034 .cra_driver_name = "__rfc4106-gcm-aesni",
1035 .cra_priority = 400,
b7c89d9e
HX
1036 .cra_flags = CRYPTO_ALG_INTERNAL,
1037 .cra_blocksize = 1,
1038 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1039 .cra_alignmask = AESNI_ALIGN - 1,
1040 .cra_module = THIS_MODULE,
1041 },
cce2ea8d
SD
1042}, {
1043 .setkey = generic_gcmaes_set_key,
1044 .setauthsize = generic_gcmaes_set_authsize,
1045 .encrypt = generic_gcmaes_encrypt,
1046 .decrypt = generic_gcmaes_decrypt,
46d93748 1047 .ivsize = GCM_AES_IV_SIZE,
cce2ea8d 1048 .maxauthsize = 16,
fc8517bf 1049 .base = {
149e1225
EB
1050 .cra_name = "__gcm(aes)",
1051 .cra_driver_name = "__generic-gcm-aesni",
1052 .cra_priority = 400,
fc8517bf
SD
1053 .cra_flags = CRYPTO_ALG_INTERNAL,
1054 .cra_blocksize = 1,
1055 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
1056 .cra_alignmask = AESNI_ALIGN - 1,
1057 .cra_module = THIS_MODULE,
1058 },
af05b300
HX
1059} };
1060#else
149e1225 1061static struct aead_alg aesni_aeads[0];
af05b300
HX
1062#endif
1063
149e1225 1064static struct simd_aead_alg *aesni_simd_aeads[ARRAY_SIZE(aesni_aeads)];
3bd391f0
AK
1065
1066static const struct x86_cpu_id aesni_cpu_id[] = {
1067 X86_FEATURE_MATCH(X86_FEATURE_AES),
1068 {}
1069};
1070MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1071
54b6a1bd
HY
1072static int __init aesni_init(void)
1073{
7af6c245 1074 int err;
54b6a1bd 1075
3bd391f0 1076 if (!x86_match_cpu(aesni_cpu_id))
54b6a1bd 1077 return -ENODEV;
8610d7bf 1078#ifdef CONFIG_X86_64
d764593a
TC
1079#ifdef CONFIG_AS_AVX2
1080 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1081 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
603f8c3b 1082 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen4;
d764593a
TC
1083 } else
1084#endif
1085#ifdef CONFIG_AS_AVX
1086 if (boot_cpu_has(X86_FEATURE_AVX)) {
1087 pr_info("AVX version of gcm_enc/dec engaged.\n");
603f8c3b 1088 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen2;
d764593a
TC
1089 } else
1090#endif
1091 {
1092 pr_info("SSE version of gcm_enc/dec engaged.\n");
603f8c3b 1093 aesni_gcm_tfm = &aesni_gcm_tfm_sse;
d764593a 1094 }
22cddcc7 1095 aesni_ctr_enc_tfm = aesni_ctr_enc;
5cfed7b3 1096#ifdef CONFIG_AS_AVX
da154e82 1097 if (boot_cpu_has(X86_FEATURE_AVX)) {
22cddcc7 1098 /* optimize performance of ctr mode encryption transform */
1099 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1100 pr_info("AES CTR mode by8 optimization enabled\n");
1101 }
1102#endif
8610d7bf 1103#endif
0bd82f5f 1104
07269559 1105 err = crypto_register_alg(&aesni_cipher_alg);
af05b300 1106 if (err)
e0db9c48 1107 return err;
af05b300 1108
8b56d348
EB
1109 err = simd_register_skciphers_compat(aesni_skciphers,
1110 ARRAY_SIZE(aesni_skciphers),
1111 aesni_simd_skciphers);
85671860 1112 if (err)
07269559 1113 goto unregister_cipher;
85671860 1114
149e1225
EB
1115 err = simd_register_aeads_compat(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1116 aesni_simd_aeads);
af05b300 1117 if (err)
85671860
HX
1118 goto unregister_skciphers;
1119
85671860
HX
1120 return 0;
1121
85671860 1122unregister_skciphers:
8b56d348
EB
1123 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1124 aesni_simd_skciphers);
07269559
EB
1125unregister_cipher:
1126 crypto_unregister_alg(&aesni_cipher_alg);
af05b300 1127 return err;
54b6a1bd
HY
1128}
1129
1130static void __exit aesni_exit(void)
1131{
149e1225
EB
1132 simd_unregister_aeads(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1133 aesni_simd_aeads);
8b56d348
EB
1134 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1135 aesni_simd_skciphers);
07269559 1136 crypto_unregister_alg(&aesni_cipher_alg);
54b6a1bd
HY
1137}
1138
0fbafd06 1139late_initcall(aesni_init);
54b6a1bd
HY
1140module_exit(aesni_exit);
1141
1142MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1143MODULE_LICENSE("GPL");
5d26a105 1144MODULE_ALIAS_CRYPTO("aes");