crypto: aes-ni - Don't print message with KERN_ERR on old system
[linux-block.git] / arch / x86 / crypto / aesni-intel_glue.c
CommitLineData
54b6a1bd
HY
1/*
2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
4 *
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
12 */
13
14#include <linux/hardirq.h>
15#include <linux/types.h>
16#include <linux/crypto.h>
17#include <linux/err.h>
18#include <crypto/algapi.h>
19#include <crypto/aes.h>
20#include <crypto/cryptd.h>
21#include <asm/i387.h>
22#include <asm/aes.h>
23
2cf4ac8b
HY
24#if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
25#define HAS_CTR
26#endif
27
28#if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
29#define HAS_LRW
30#endif
31
32#if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
33#define HAS_PCBC
34#endif
35
36#if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
37#define HAS_XTS
38#endif
39
54b6a1bd
HY
40struct async_aes_ctx {
41 struct cryptd_ablkcipher *cryptd_tfm;
42};
43
44#define AESNI_ALIGN 16
45#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
46
47asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
48 unsigned int key_len);
49asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
50 const u8 *in);
51asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
52 const u8 *in);
53asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
54 const u8 *in, unsigned int len);
55asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
56 const u8 *in, unsigned int len);
57asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
58 const u8 *in, unsigned int len, u8 *iv);
59asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
60 const u8 *in, unsigned int len, u8 *iv);
61
62static inline int kernel_fpu_using(void)
63{
64 if (in_interrupt() && !(read_cr0() & X86_CR0_TS))
65 return 1;
66 return 0;
67}
68
69static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
70{
71 unsigned long addr = (unsigned long)raw_ctx;
72 unsigned long align = AESNI_ALIGN;
73
74 if (align <= crypto_tfm_ctx_alignment())
75 align = 1;
76 return (struct crypto_aes_ctx *)ALIGN(addr, align);
77}
78
79static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
80 const u8 *in_key, unsigned int key_len)
81{
82 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
83 u32 *flags = &tfm->crt_flags;
84 int err;
85
86 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
87 key_len != AES_KEYSIZE_256) {
88 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
89 return -EINVAL;
90 }
91
92 if (kernel_fpu_using())
93 err = crypto_aes_expand_key(ctx, in_key, key_len);
94 else {
95 kernel_fpu_begin();
96 err = aesni_set_key(ctx, in_key, key_len);
97 kernel_fpu_end();
98 }
99
100 return err;
101}
102
103static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
104 unsigned int key_len)
105{
106 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
107}
108
109static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
110{
111 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
112
113 if (kernel_fpu_using())
114 crypto_aes_encrypt_x86(ctx, dst, src);
115 else {
116 kernel_fpu_begin();
117 aesni_enc(ctx, dst, src);
118 kernel_fpu_end();
119 }
120}
121
122static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
123{
124 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
125
126 if (kernel_fpu_using())
127 crypto_aes_decrypt_x86(ctx, dst, src);
128 else {
129 kernel_fpu_begin();
130 aesni_dec(ctx, dst, src);
131 kernel_fpu_end();
132 }
133}
134
135static struct crypto_alg aesni_alg = {
136 .cra_name = "aes",
137 .cra_driver_name = "aes-aesni",
138 .cra_priority = 300,
139 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
140 .cra_blocksize = AES_BLOCK_SIZE,
141 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
142 .cra_alignmask = 0,
143 .cra_module = THIS_MODULE,
144 .cra_list = LIST_HEAD_INIT(aesni_alg.cra_list),
145 .cra_u = {
146 .cipher = {
147 .cia_min_keysize = AES_MIN_KEY_SIZE,
148 .cia_max_keysize = AES_MAX_KEY_SIZE,
149 .cia_setkey = aes_set_key,
150 .cia_encrypt = aes_encrypt,
151 .cia_decrypt = aes_decrypt
152 }
153 }
154};
155
2cf4ac8b
HY
156static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
157{
158 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
159
160 aesni_enc(ctx, dst, src);
161}
162
163static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
164{
165 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
166
167 aesni_dec(ctx, dst, src);
168}
169
170static struct crypto_alg __aesni_alg = {
171 .cra_name = "__aes-aesni",
172 .cra_driver_name = "__driver-aes-aesni",
173 .cra_priority = 0,
174 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
175 .cra_blocksize = AES_BLOCK_SIZE,
176 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
177 .cra_alignmask = 0,
178 .cra_module = THIS_MODULE,
179 .cra_list = LIST_HEAD_INIT(__aesni_alg.cra_list),
180 .cra_u = {
181 .cipher = {
182 .cia_min_keysize = AES_MIN_KEY_SIZE,
183 .cia_max_keysize = AES_MAX_KEY_SIZE,
184 .cia_setkey = aes_set_key,
185 .cia_encrypt = __aes_encrypt,
186 .cia_decrypt = __aes_decrypt
187 }
188 }
189};
190
54b6a1bd
HY
191static int ecb_encrypt(struct blkcipher_desc *desc,
192 struct scatterlist *dst, struct scatterlist *src,
193 unsigned int nbytes)
194{
195 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
196 struct blkcipher_walk walk;
197 int err;
198
199 blkcipher_walk_init(&walk, dst, src, nbytes);
200 err = blkcipher_walk_virt(desc, &walk);
9251b64f 201 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
54b6a1bd
HY
202
203 kernel_fpu_begin();
204 while ((nbytes = walk.nbytes)) {
205 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
206 nbytes & AES_BLOCK_MASK);
207 nbytes &= AES_BLOCK_SIZE - 1;
208 err = blkcipher_walk_done(desc, &walk, nbytes);
209 }
210 kernel_fpu_end();
211
212 return err;
213}
214
215static int ecb_decrypt(struct blkcipher_desc *desc,
216 struct scatterlist *dst, struct scatterlist *src,
217 unsigned int nbytes)
218{
219 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
220 struct blkcipher_walk walk;
221 int err;
222
223 blkcipher_walk_init(&walk, dst, src, nbytes);
224 err = blkcipher_walk_virt(desc, &walk);
9251b64f 225 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
54b6a1bd
HY
226
227 kernel_fpu_begin();
228 while ((nbytes = walk.nbytes)) {
229 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
230 nbytes & AES_BLOCK_MASK);
231 nbytes &= AES_BLOCK_SIZE - 1;
232 err = blkcipher_walk_done(desc, &walk, nbytes);
233 }
234 kernel_fpu_end();
235
236 return err;
237}
238
239static struct crypto_alg blk_ecb_alg = {
240 .cra_name = "__ecb-aes-aesni",
241 .cra_driver_name = "__driver-ecb-aes-aesni",
242 .cra_priority = 0,
243 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
244 .cra_blocksize = AES_BLOCK_SIZE,
245 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
246 .cra_alignmask = 0,
247 .cra_type = &crypto_blkcipher_type,
248 .cra_module = THIS_MODULE,
249 .cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
250 .cra_u = {
251 .blkcipher = {
252 .min_keysize = AES_MIN_KEY_SIZE,
253 .max_keysize = AES_MAX_KEY_SIZE,
254 .setkey = aes_set_key,
255 .encrypt = ecb_encrypt,
256 .decrypt = ecb_decrypt,
257 },
258 },
259};
260
261static int cbc_encrypt(struct blkcipher_desc *desc,
262 struct scatterlist *dst, struct scatterlist *src,
263 unsigned int nbytes)
264{
265 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
266 struct blkcipher_walk walk;
267 int err;
268
269 blkcipher_walk_init(&walk, dst, src, nbytes);
270 err = blkcipher_walk_virt(desc, &walk);
9251b64f 271 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
54b6a1bd
HY
272
273 kernel_fpu_begin();
274 while ((nbytes = walk.nbytes)) {
275 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
276 nbytes & AES_BLOCK_MASK, walk.iv);
277 nbytes &= AES_BLOCK_SIZE - 1;
278 err = blkcipher_walk_done(desc, &walk, nbytes);
279 }
280 kernel_fpu_end();
281
282 return err;
283}
284
285static int cbc_decrypt(struct blkcipher_desc *desc,
286 struct scatterlist *dst, struct scatterlist *src,
287 unsigned int nbytes)
288{
289 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
290 struct blkcipher_walk walk;
291 int err;
292
293 blkcipher_walk_init(&walk, dst, src, nbytes);
294 err = blkcipher_walk_virt(desc, &walk);
9251b64f 295 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
54b6a1bd
HY
296
297 kernel_fpu_begin();
298 while ((nbytes = walk.nbytes)) {
299 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
300 nbytes & AES_BLOCK_MASK, walk.iv);
301 nbytes &= AES_BLOCK_SIZE - 1;
302 err = blkcipher_walk_done(desc, &walk, nbytes);
303 }
304 kernel_fpu_end();
305
306 return err;
307}
308
309static struct crypto_alg blk_cbc_alg = {
310 .cra_name = "__cbc-aes-aesni",
311 .cra_driver_name = "__driver-cbc-aes-aesni",
312 .cra_priority = 0,
313 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
314 .cra_blocksize = AES_BLOCK_SIZE,
315 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
316 .cra_alignmask = 0,
317 .cra_type = &crypto_blkcipher_type,
318 .cra_module = THIS_MODULE,
319 .cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
320 .cra_u = {
321 .blkcipher = {
322 .min_keysize = AES_MIN_KEY_SIZE,
323 .max_keysize = AES_MAX_KEY_SIZE,
324 .setkey = aes_set_key,
325 .encrypt = cbc_encrypt,
326 .decrypt = cbc_decrypt,
327 },
328 },
329};
330
331static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
332 unsigned int key_len)
333{
334 struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
2cf4ac8b
HY
335 struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;
336 int err;
54b6a1bd 337
2cf4ac8b
HY
338 crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
339 crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)
340 & CRYPTO_TFM_REQ_MASK);
341 err = crypto_ablkcipher_setkey(child, key, key_len);
342 crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)
343 & CRYPTO_TFM_RES_MASK);
344 return err;
54b6a1bd
HY
345}
346
347static int ablk_encrypt(struct ablkcipher_request *req)
348{
349 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
350 struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
351
352 if (kernel_fpu_using()) {
353 struct ablkcipher_request *cryptd_req =
354 ablkcipher_request_ctx(req);
355 memcpy(cryptd_req, req, sizeof(*req));
356 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
357 return crypto_ablkcipher_encrypt(cryptd_req);
358 } else {
359 struct blkcipher_desc desc;
360 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
361 desc.info = req->info;
362 desc.flags = 0;
363 return crypto_blkcipher_crt(desc.tfm)->encrypt(
364 &desc, req->dst, req->src, req->nbytes);
365 }
366}
367
368static int ablk_decrypt(struct ablkcipher_request *req)
369{
370 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
371 struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
372
373 if (kernel_fpu_using()) {
374 struct ablkcipher_request *cryptd_req =
375 ablkcipher_request_ctx(req);
376 memcpy(cryptd_req, req, sizeof(*req));
377 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
378 return crypto_ablkcipher_decrypt(cryptd_req);
379 } else {
380 struct blkcipher_desc desc;
381 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
382 desc.info = req->info;
383 desc.flags = 0;
384 return crypto_blkcipher_crt(desc.tfm)->decrypt(
385 &desc, req->dst, req->src, req->nbytes);
386 }
387}
388
389static void ablk_exit(struct crypto_tfm *tfm)
390{
391 struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
392
393 cryptd_free_ablkcipher(ctx->cryptd_tfm);
394}
395
396static void ablk_init_common(struct crypto_tfm *tfm,
397 struct cryptd_ablkcipher *cryptd_tfm)
398{
399 struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
400
401 ctx->cryptd_tfm = cryptd_tfm;
402 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
403 crypto_ablkcipher_reqsize(&cryptd_tfm->base);
404}
405
406static int ablk_ecb_init(struct crypto_tfm *tfm)
407{
408 struct cryptd_ablkcipher *cryptd_tfm;
409
410 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
411 if (IS_ERR(cryptd_tfm))
412 return PTR_ERR(cryptd_tfm);
413 ablk_init_common(tfm, cryptd_tfm);
414 return 0;
415}
416
417static struct crypto_alg ablk_ecb_alg = {
418 .cra_name = "ecb(aes)",
419 .cra_driver_name = "ecb-aes-aesni",
420 .cra_priority = 400,
421 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
422 .cra_blocksize = AES_BLOCK_SIZE,
423 .cra_ctxsize = sizeof(struct async_aes_ctx),
424 .cra_alignmask = 0,
425 .cra_type = &crypto_ablkcipher_type,
426 .cra_module = THIS_MODULE,
427 .cra_list = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),
428 .cra_init = ablk_ecb_init,
429 .cra_exit = ablk_exit,
430 .cra_u = {
431 .ablkcipher = {
432 .min_keysize = AES_MIN_KEY_SIZE,
433 .max_keysize = AES_MAX_KEY_SIZE,
434 .setkey = ablk_set_key,
435 .encrypt = ablk_encrypt,
436 .decrypt = ablk_decrypt,
437 },
438 },
439};
440
441static int ablk_cbc_init(struct crypto_tfm *tfm)
442{
443 struct cryptd_ablkcipher *cryptd_tfm;
444
445 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
446 if (IS_ERR(cryptd_tfm))
447 return PTR_ERR(cryptd_tfm);
448 ablk_init_common(tfm, cryptd_tfm);
449 return 0;
450}
451
452static struct crypto_alg ablk_cbc_alg = {
453 .cra_name = "cbc(aes)",
454 .cra_driver_name = "cbc-aes-aesni",
455 .cra_priority = 400,
456 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
457 .cra_blocksize = AES_BLOCK_SIZE,
458 .cra_ctxsize = sizeof(struct async_aes_ctx),
459 .cra_alignmask = 0,
460 .cra_type = &crypto_ablkcipher_type,
461 .cra_module = THIS_MODULE,
462 .cra_list = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),
463 .cra_init = ablk_cbc_init,
464 .cra_exit = ablk_exit,
465 .cra_u = {
466 .ablkcipher = {
467 .min_keysize = AES_MIN_KEY_SIZE,
468 .max_keysize = AES_MAX_KEY_SIZE,
469 .ivsize = AES_BLOCK_SIZE,
470 .setkey = ablk_set_key,
471 .encrypt = ablk_encrypt,
472 .decrypt = ablk_decrypt,
473 },
474 },
475};
476
2cf4ac8b
HY
477#ifdef HAS_CTR
478static int ablk_ctr_init(struct crypto_tfm *tfm)
479{
480 struct cryptd_ablkcipher *cryptd_tfm;
481
482 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(ctr(__driver-aes-aesni))",
483 0, 0);
484 if (IS_ERR(cryptd_tfm))
485 return PTR_ERR(cryptd_tfm);
486 ablk_init_common(tfm, cryptd_tfm);
487 return 0;
488}
489
490static struct crypto_alg ablk_ctr_alg = {
491 .cra_name = "ctr(aes)",
492 .cra_driver_name = "ctr-aes-aesni",
493 .cra_priority = 400,
494 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
495 .cra_blocksize = 1,
496 .cra_ctxsize = sizeof(struct async_aes_ctx),
497 .cra_alignmask = 0,
498 .cra_type = &crypto_ablkcipher_type,
499 .cra_module = THIS_MODULE,
500 .cra_list = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),
501 .cra_init = ablk_ctr_init,
502 .cra_exit = ablk_exit,
503 .cra_u = {
504 .ablkcipher = {
505 .min_keysize = AES_MIN_KEY_SIZE,
506 .max_keysize = AES_MAX_KEY_SIZE,
507 .ivsize = AES_BLOCK_SIZE,
508 .setkey = ablk_set_key,
509 .encrypt = ablk_encrypt,
510 .decrypt = ablk_decrypt,
511 .geniv = "chainiv",
512 },
513 },
514};
515#endif
516
517#ifdef HAS_LRW
518static int ablk_lrw_init(struct crypto_tfm *tfm)
519{
520 struct cryptd_ablkcipher *cryptd_tfm;
521
522 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
523 0, 0);
524 if (IS_ERR(cryptd_tfm))
525 return PTR_ERR(cryptd_tfm);
526 ablk_init_common(tfm, cryptd_tfm);
527 return 0;
528}
529
530static struct crypto_alg ablk_lrw_alg = {
531 .cra_name = "lrw(aes)",
532 .cra_driver_name = "lrw-aes-aesni",
533 .cra_priority = 400,
534 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
535 .cra_blocksize = AES_BLOCK_SIZE,
536 .cra_ctxsize = sizeof(struct async_aes_ctx),
537 .cra_alignmask = 0,
538 .cra_type = &crypto_ablkcipher_type,
539 .cra_module = THIS_MODULE,
540 .cra_list = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),
541 .cra_init = ablk_lrw_init,
542 .cra_exit = ablk_exit,
543 .cra_u = {
544 .ablkcipher = {
545 .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
546 .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
547 .ivsize = AES_BLOCK_SIZE,
548 .setkey = ablk_set_key,
549 .encrypt = ablk_encrypt,
550 .decrypt = ablk_decrypt,
551 },
552 },
553};
554#endif
555
556#ifdef HAS_PCBC
557static int ablk_pcbc_init(struct crypto_tfm *tfm)
558{
559 struct cryptd_ablkcipher *cryptd_tfm;
560
561 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
562 0, 0);
563 if (IS_ERR(cryptd_tfm))
564 return PTR_ERR(cryptd_tfm);
565 ablk_init_common(tfm, cryptd_tfm);
566 return 0;
567}
568
569static struct crypto_alg ablk_pcbc_alg = {
570 .cra_name = "pcbc(aes)",
571 .cra_driver_name = "pcbc-aes-aesni",
572 .cra_priority = 400,
573 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
574 .cra_blocksize = AES_BLOCK_SIZE,
575 .cra_ctxsize = sizeof(struct async_aes_ctx),
576 .cra_alignmask = 0,
577 .cra_type = &crypto_ablkcipher_type,
578 .cra_module = THIS_MODULE,
579 .cra_list = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),
580 .cra_init = ablk_pcbc_init,
581 .cra_exit = ablk_exit,
582 .cra_u = {
583 .ablkcipher = {
584 .min_keysize = AES_MIN_KEY_SIZE,
585 .max_keysize = AES_MAX_KEY_SIZE,
586 .ivsize = AES_BLOCK_SIZE,
587 .setkey = ablk_set_key,
588 .encrypt = ablk_encrypt,
589 .decrypt = ablk_decrypt,
590 },
591 },
592};
593#endif
594
595#ifdef HAS_XTS
596static int ablk_xts_init(struct crypto_tfm *tfm)
597{
598 struct cryptd_ablkcipher *cryptd_tfm;
599
600 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
601 0, 0);
602 if (IS_ERR(cryptd_tfm))
603 return PTR_ERR(cryptd_tfm);
604 ablk_init_common(tfm, cryptd_tfm);
605 return 0;
606}
607
608static struct crypto_alg ablk_xts_alg = {
609 .cra_name = "xts(aes)",
610 .cra_driver_name = "xts-aes-aesni",
611 .cra_priority = 400,
612 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
613 .cra_blocksize = AES_BLOCK_SIZE,
614 .cra_ctxsize = sizeof(struct async_aes_ctx),
615 .cra_alignmask = 0,
616 .cra_type = &crypto_ablkcipher_type,
617 .cra_module = THIS_MODULE,
618 .cra_list = LIST_HEAD_INIT(ablk_xts_alg.cra_list),
619 .cra_init = ablk_xts_init,
620 .cra_exit = ablk_exit,
621 .cra_u = {
622 .ablkcipher = {
623 .min_keysize = 2 * AES_MIN_KEY_SIZE,
624 .max_keysize = 2 * AES_MAX_KEY_SIZE,
625 .ivsize = AES_BLOCK_SIZE,
626 .setkey = ablk_set_key,
627 .encrypt = ablk_encrypt,
628 .decrypt = ablk_decrypt,
629 },
630 },
631};
632#endif
633
54b6a1bd
HY
634static int __init aesni_init(void)
635{
636 int err;
637
638 if (!cpu_has_aes) {
c9944881 639 printk(KERN_INFO "Intel AES-NI instructions are not detected.\n");
54b6a1bd
HY
640 return -ENODEV;
641 }
642 if ((err = crypto_register_alg(&aesni_alg)))
643 goto aes_err;
2cf4ac8b
HY
644 if ((err = crypto_register_alg(&__aesni_alg)))
645 goto __aes_err;
54b6a1bd
HY
646 if ((err = crypto_register_alg(&blk_ecb_alg)))
647 goto blk_ecb_err;
648 if ((err = crypto_register_alg(&blk_cbc_alg)))
649 goto blk_cbc_err;
650 if ((err = crypto_register_alg(&ablk_ecb_alg)))
651 goto ablk_ecb_err;
652 if ((err = crypto_register_alg(&ablk_cbc_alg)))
653 goto ablk_cbc_err;
2cf4ac8b
HY
654#ifdef HAS_CTR
655 if ((err = crypto_register_alg(&ablk_ctr_alg)))
656 goto ablk_ctr_err;
657#endif
658#ifdef HAS_LRW
659 if ((err = crypto_register_alg(&ablk_lrw_alg)))
660 goto ablk_lrw_err;
661#endif
662#ifdef HAS_PCBC
663 if ((err = crypto_register_alg(&ablk_pcbc_alg)))
664 goto ablk_pcbc_err;
665#endif
666#ifdef HAS_XTS
667 if ((err = crypto_register_alg(&ablk_xts_alg)))
668 goto ablk_xts_err;
669#endif
54b6a1bd
HY
670
671 return err;
672
2cf4ac8b
HY
673#ifdef HAS_XTS
674ablk_xts_err:
675#endif
676#ifdef HAS_PCBC
677 crypto_unregister_alg(&ablk_pcbc_alg);
678ablk_pcbc_err:
679#endif
680#ifdef HAS_LRW
681 crypto_unregister_alg(&ablk_lrw_alg);
682ablk_lrw_err:
683#endif
684#ifdef HAS_CTR
685 crypto_unregister_alg(&ablk_ctr_alg);
686ablk_ctr_err:
687#endif
688 crypto_unregister_alg(&ablk_cbc_alg);
54b6a1bd
HY
689ablk_cbc_err:
690 crypto_unregister_alg(&ablk_ecb_alg);
691ablk_ecb_err:
692 crypto_unregister_alg(&blk_cbc_alg);
693blk_cbc_err:
694 crypto_unregister_alg(&blk_ecb_alg);
695blk_ecb_err:
2cf4ac8b
HY
696 crypto_unregister_alg(&__aesni_alg);
697__aes_err:
54b6a1bd
HY
698 crypto_unregister_alg(&aesni_alg);
699aes_err:
700 return err;
701}
702
703static void __exit aesni_exit(void)
704{
2cf4ac8b
HY
705#ifdef HAS_XTS
706 crypto_unregister_alg(&ablk_xts_alg);
707#endif
708#ifdef HAS_PCBC
709 crypto_unregister_alg(&ablk_pcbc_alg);
710#endif
711#ifdef HAS_LRW
712 crypto_unregister_alg(&ablk_lrw_alg);
713#endif
714#ifdef HAS_CTR
715 crypto_unregister_alg(&ablk_ctr_alg);
716#endif
54b6a1bd
HY
717 crypto_unregister_alg(&ablk_cbc_alg);
718 crypto_unregister_alg(&ablk_ecb_alg);
719 crypto_unregister_alg(&blk_cbc_alg);
720 crypto_unregister_alg(&blk_ecb_alg);
2cf4ac8b 721 crypto_unregister_alg(&__aesni_alg);
54b6a1bd
HY
722 crypto_unregister_alg(&aesni_alg);
723}
724
725module_init(aesni_init);
726module_exit(aesni_exit);
727
728MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
729MODULE_LICENSE("GPL");
730MODULE_ALIAS("aes");