sparc64: Adjust crypto priorities.
[linux-block.git] / arch / sparc / crypto / aes_glue.c
CommitLineData
9bf4852d
DM
1/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
2 *
3 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
4 *
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 */
16
17#include <linux/crypto.h>
18#include <linux/init.h>
19#include <linux/module.h>
20#include <linux/mm.h>
21#include <linux/types.h>
22#include <crypto/algapi.h>
23#include <crypto/aes.h>
24
25#include <asm/fpumacro.h>
26#include <asm/pstate.h>
27#include <asm/elf.h>
28
10803624
DM
29#include "opcodes.h"
30
0bdcaf74
DM
31struct aes_ops {
32 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
33 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
34 void (*load_encrypt_keys)(const u64 *key);
35 void (*load_decrypt_keys)(const u64 *key);
36 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
37 unsigned int len);
38 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
39 unsigned int len);
40 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
41 unsigned int len, u64 *iv);
42 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
43 unsigned int len, u64 *iv);
9fd130ec
DM
44 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
45 unsigned int len, u64 *iv);
0bdcaf74
DM
46};
47
9bf4852d 48struct crypto_sparc64_aes_ctx {
0bdcaf74 49 struct aes_ops *ops;
9bf4852d
DM
50 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
51 u32 key_length;
52 u32 expanded_key_length;
53};
54
0bdcaf74
DM
55extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
56 u32 *output);
57extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
58 u32 *output);
59extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
60 u32 *output);
61
62extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
63 u32 *output);
64extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
65 u32 *output);
66extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
67 u32 *output);
68
69extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
70extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
71extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
72
73extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
74extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
75extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
76
77extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
78 u64 *output, unsigned int len);
79extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
80 u64 *output, unsigned int len);
81extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
82 u64 *output, unsigned int len);
83
84extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
85 u64 *output, unsigned int len);
86extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
87 u64 *output, unsigned int len);
88extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
89 u64 *output, unsigned int len);
90
91extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
92 u64 *output, unsigned int len,
93 u64 *iv);
94
95extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
96 u64 *output, unsigned int len,
97 u64 *iv);
98
99extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
100 u64 *output, unsigned int len,
101 u64 *iv);
102
103extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
104 u64 *output, unsigned int len,
105 u64 *iv);
106
107extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
108 u64 *output, unsigned int len,
109 u64 *iv);
110
111extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
112 u64 *output, unsigned int len,
113 u64 *iv);
114
9fd130ec
DM
115extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
116 u64 *output, unsigned int len,
117 u64 *iv);
118extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
119 u64 *output, unsigned int len,
120 u64 *iv);
121extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
122 u64 *output, unsigned int len,
123 u64 *iv);
124
0bdcaf74
DM
125struct aes_ops aes128_ops = {
126 .encrypt = aes_sparc64_encrypt_128,
127 .decrypt = aes_sparc64_decrypt_128,
128 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
129 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
130 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
131 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
132 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
133 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
9fd130ec 134 .ctr_crypt = aes_sparc64_ctr_crypt_128,
0bdcaf74
DM
135};
136
137struct aes_ops aes192_ops = {
138 .encrypt = aes_sparc64_encrypt_192,
139 .decrypt = aes_sparc64_decrypt_192,
140 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
141 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
142 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
143 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
144 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
145 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
9fd130ec 146 .ctr_crypt = aes_sparc64_ctr_crypt_192,
0bdcaf74
DM
147};
148
149struct aes_ops aes256_ops = {
150 .encrypt = aes_sparc64_encrypt_256,
151 .decrypt = aes_sparc64_decrypt_256,
152 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
153 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
154 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
155 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
156 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
157 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
9fd130ec 158 .ctr_crypt = aes_sparc64_ctr_crypt_256,
0bdcaf74
DM
159};
160
9bf4852d
DM
161extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
162 unsigned int key_len);
163
164static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
165 unsigned int key_len)
166{
167 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
168 u32 *flags = &tfm->crt_flags;
169
170 switch (key_len) {
171 case AES_KEYSIZE_128:
172 ctx->expanded_key_length = 0xb0;
0bdcaf74 173 ctx->ops = &aes128_ops;
9bf4852d
DM
174 break;
175
176 case AES_KEYSIZE_192:
177 ctx->expanded_key_length = 0xd0;
0bdcaf74 178 ctx->ops = &aes192_ops;
9bf4852d
DM
179 break;
180
181 case AES_KEYSIZE_256:
182 ctx->expanded_key_length = 0xf0;
0bdcaf74 183 ctx->ops = &aes256_ops;
9bf4852d
DM
184 break;
185
186 default:
187 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
188 return -EINVAL;
189 }
190
191 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
192 ctx->key_length = key_len;
193
194 return 0;
195}
196
9bf4852d
DM
197static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
198{
199 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
200
0bdcaf74 201 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
9bf4852d
DM
202}
203
9bf4852d
DM
204static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
205{
206 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
207
0bdcaf74 208 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
9bf4852d
DM
209}
210
9bf4852d
DM
211#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
212
9bf4852d
DM
213static int ecb_encrypt(struct blkcipher_desc *desc,
214 struct scatterlist *dst, struct scatterlist *src,
215 unsigned int nbytes)
216{
217 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
218 struct blkcipher_walk walk;
219 int err;
220
221 blkcipher_walk_init(&walk, dst, src, nbytes);
222 err = blkcipher_walk_virt(desc, &walk);
223
0bdcaf74 224 ctx->ops->load_encrypt_keys(&ctx->key[0]);
9bf4852d
DM
225 while ((nbytes = walk.nbytes)) {
226 unsigned int block_len = nbytes & AES_BLOCK_MASK;
227
228 if (likely(block_len)) {
0bdcaf74
DM
229 ctx->ops->ecb_encrypt(&ctx->key[0],
230 (const u64 *)walk.src.virt.addr,
231 (u64 *) walk.dst.virt.addr,
232 block_len);
9bf4852d
DM
233 }
234 nbytes &= AES_BLOCK_SIZE - 1;
235 err = blkcipher_walk_done(desc, &walk, nbytes);
236 }
237 fprs_write(0);
238 return err;
239}
240
9bf4852d
DM
241static int ecb_decrypt(struct blkcipher_desc *desc,
242 struct scatterlist *dst, struct scatterlist *src,
243 unsigned int nbytes)
244{
245 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
246 struct blkcipher_walk walk;
247 u64 *key_end;
248 int err;
249
250 blkcipher_walk_init(&walk, dst, src, nbytes);
251 err = blkcipher_walk_virt(desc, &walk);
252
0bdcaf74 253 ctx->ops->load_decrypt_keys(&ctx->key[0]);
9bf4852d
DM
254 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
255 while ((nbytes = walk.nbytes)) {
256 unsigned int block_len = nbytes & AES_BLOCK_MASK;
257
0bdcaf74
DM
258 if (likely(block_len)) {
259 ctx->ops->ecb_decrypt(key_end,
260 (const u64 *) walk.src.virt.addr,
261 (u64 *) walk.dst.virt.addr, block_len);
262 }
9bf4852d
DM
263 nbytes &= AES_BLOCK_SIZE - 1;
264 err = blkcipher_walk_done(desc, &walk, nbytes);
265 }
266 fprs_write(0);
267
268 return err;
269}
270
9bf4852d
DM
271static int cbc_encrypt(struct blkcipher_desc *desc,
272 struct scatterlist *dst, struct scatterlist *src,
273 unsigned int nbytes)
274{
275 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
276 struct blkcipher_walk walk;
277 int err;
278
279 blkcipher_walk_init(&walk, dst, src, nbytes);
280 err = blkcipher_walk_virt(desc, &walk);
281
0bdcaf74 282 ctx->ops->load_encrypt_keys(&ctx->key[0]);
9bf4852d
DM
283 while ((nbytes = walk.nbytes)) {
284 unsigned int block_len = nbytes & AES_BLOCK_MASK;
285
286 if (likely(block_len)) {
0bdcaf74
DM
287 ctx->ops->cbc_encrypt(&ctx->key[0],
288 (const u64 *)walk.src.virt.addr,
289 (u64 *) walk.dst.virt.addr,
290 block_len, (u64 *) walk.iv);
9bf4852d
DM
291 }
292 nbytes &= AES_BLOCK_SIZE - 1;
293 err = blkcipher_walk_done(desc, &walk, nbytes);
294 }
295 fprs_write(0);
296 return err;
297}
298
9bf4852d
DM
299static int cbc_decrypt(struct blkcipher_desc *desc,
300 struct scatterlist *dst, struct scatterlist *src,
301 unsigned int nbytes)
302{
303 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
304 struct blkcipher_walk walk;
305 u64 *key_end;
306 int err;
307
308 blkcipher_walk_init(&walk, dst, src, nbytes);
309 err = blkcipher_walk_virt(desc, &walk);
310
0bdcaf74 311 ctx->ops->load_decrypt_keys(&ctx->key[0]);
9bf4852d
DM
312 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
313 while ((nbytes = walk.nbytes)) {
314 unsigned int block_len = nbytes & AES_BLOCK_MASK;
315
0bdcaf74
DM
316 if (likely(block_len)) {
317 ctx->ops->cbc_decrypt(key_end,
318 (const u64 *) walk.src.virt.addr,
319 (u64 *) walk.dst.virt.addr,
320 block_len, (u64 *) walk.iv);
321 }
9bf4852d
DM
322 nbytes &= AES_BLOCK_SIZE - 1;
323 err = blkcipher_walk_done(desc, &walk, nbytes);
324 }
325 fprs_write(0);
326
327 return err;
328}
329
9fd130ec
DM
330static int ctr_crypt(struct blkcipher_desc *desc,
331 struct scatterlist *dst, struct scatterlist *src,
332 unsigned int nbytes)
333{
334 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
335 struct blkcipher_walk walk;
336 int err;
337
338 blkcipher_walk_init(&walk, dst, src, nbytes);
339 err = blkcipher_walk_virt(desc, &walk);
340
341 ctx->ops->load_encrypt_keys(&ctx->key[0]);
342 while ((nbytes = walk.nbytes)) {
343 unsigned int block_len = nbytes & AES_BLOCK_MASK;
344
345 if (likely(block_len)) {
346 ctx->ops->ctr_crypt(&ctx->key[0],
347 (const u64 *)walk.src.virt.addr,
348 (u64 *) walk.dst.virt.addr,
349 block_len, (u64 *) walk.iv);
350 }
351 nbytes &= AES_BLOCK_SIZE - 1;
352 err = blkcipher_walk_done(desc, &walk, nbytes);
353 }
354 fprs_write(0);
355 return err;
356}
357
9bf4852d
DM
358static struct crypto_alg algs[] = { {
359 .cra_name = "aes",
360 .cra_driver_name = "aes-sparc64",
10803624 361 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
9bf4852d
DM
362 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
363 .cra_blocksize = AES_BLOCK_SIZE,
364 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
365 .cra_alignmask = 3,
366 .cra_module = THIS_MODULE,
367 .cra_u = {
368 .cipher = {
369 .cia_min_keysize = AES_MIN_KEY_SIZE,
370 .cia_max_keysize = AES_MAX_KEY_SIZE,
371 .cia_setkey = aes_set_key,
372 .cia_encrypt = aes_encrypt,
373 .cia_decrypt = aes_decrypt
374 }
375 }
376}, {
377 .cra_name = "ecb(aes)",
378 .cra_driver_name = "ecb-aes-sparc64",
10803624 379 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
9bf4852d
DM
380 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
381 .cra_blocksize = AES_BLOCK_SIZE,
382 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
383 .cra_alignmask = 7,
384 .cra_type = &crypto_blkcipher_type,
385 .cra_module = THIS_MODULE,
386 .cra_u = {
387 .blkcipher = {
388 .min_keysize = AES_MIN_KEY_SIZE,
389 .max_keysize = AES_MAX_KEY_SIZE,
390 .setkey = aes_set_key,
391 .encrypt = ecb_encrypt,
392 .decrypt = ecb_decrypt,
393 },
394 },
395}, {
396 .cra_name = "cbc(aes)",
397 .cra_driver_name = "cbc-aes-sparc64",
10803624 398 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
9bf4852d
DM
399 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
400 .cra_blocksize = AES_BLOCK_SIZE,
401 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
402 .cra_alignmask = 7,
403 .cra_type = &crypto_blkcipher_type,
404 .cra_module = THIS_MODULE,
405 .cra_u = {
406 .blkcipher = {
407 .min_keysize = AES_MIN_KEY_SIZE,
408 .max_keysize = AES_MAX_KEY_SIZE,
409 .setkey = aes_set_key,
410 .encrypt = cbc_encrypt,
411 .decrypt = cbc_decrypt,
412 },
413 },
9fd130ec
DM
414}, {
415 .cra_name = "ctr(aes)",
416 .cra_driver_name = "ctr-aes-sparc64",
10803624 417 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
9fd130ec
DM
418 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
419 .cra_blocksize = AES_BLOCK_SIZE,
420 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
421 .cra_alignmask = 7,
422 .cra_type = &crypto_blkcipher_type,
423 .cra_module = THIS_MODULE,
424 .cra_u = {
425 .blkcipher = {
426 .min_keysize = AES_MIN_KEY_SIZE,
427 .max_keysize = AES_MAX_KEY_SIZE,
428 .setkey = aes_set_key,
429 .encrypt = ctr_crypt,
430 .decrypt = ctr_crypt,
431 },
432 },
9bf4852d
DM
433} };
434
435static bool __init sparc64_has_aes_opcode(void)
436{
437 unsigned long cfr;
438
439 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
440 return false;
441
442 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
443 if (!(cfr & CFR_AES))
444 return false;
445
446 return true;
447}
448
449static int __init aes_sparc64_mod_init(void)
450{
451 int i;
452
453 for (i = 0; i < ARRAY_SIZE(algs); i++)
454 INIT_LIST_HEAD(&algs[i].cra_list);
455
456 if (sparc64_has_aes_opcode()) {
457 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
458 return crypto_register_algs(algs, ARRAY_SIZE(algs));
459 }
460 pr_info("sparc64 aes opcodes not available.\n");
461 return -ENODEV;
462}
463
464static void __exit aes_sparc64_mod_fini(void)
465{
466 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
467}
468
469module_init(aes_sparc64_mod_init);
470module_exit(aes_sparc64_mod_fini);
471
472MODULE_LICENSE("GPL");
473MODULE_DESCRIPTION("AES Secure Hash Algorithm, sparc64 aes opcode accelerated");
474
475MODULE_ALIAS("aes");