crypto: aes - rename local routines to prevent future clashes
[linux-block.git] / arch / sparc / crypto / aes_glue.c
CommitLineData
09c434b8 1// SPDX-License-Identifier: GPL-2.0-only
9bf4852d
DM
2/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
3 *
4 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
5 *
6 * Copyright (C) 2008, Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
8 *
9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10 * interface for 64-bit kernels.
11 * Authors: Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Aidan O'Mahony (aidan.o.mahony@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
16 */
17
71741680
DM
18#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
19
9bf4852d
DM
20#include <linux/crypto.h>
21#include <linux/init.h>
22#include <linux/module.h>
23#include <linux/mm.h>
24#include <linux/types.h>
25#include <crypto/algapi.h>
26#include <crypto/aes.h>
27
28#include <asm/fpumacro.h>
29#include <asm/pstate.h>
30#include <asm/elf.h>
31
10803624
DM
32#include "opcodes.h"
33
0bdcaf74
DM
34struct aes_ops {
35 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
36 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
37 void (*load_encrypt_keys)(const u64 *key);
38 void (*load_decrypt_keys)(const u64 *key);
39 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
40 unsigned int len);
41 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
42 unsigned int len);
43 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
44 unsigned int len, u64 *iv);
45 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
46 unsigned int len, u64 *iv);
9fd130ec
DM
47 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
48 unsigned int len, u64 *iv);
0bdcaf74
DM
49};
50
9bf4852d 51struct crypto_sparc64_aes_ctx {
0bdcaf74 52 struct aes_ops *ops;
9bf4852d
DM
53 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
54 u32 key_length;
55 u32 expanded_key_length;
56};
57
0bdcaf74
DM
58extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
59 u32 *output);
60extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
61 u32 *output);
62extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
63 u32 *output);
64
65extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
66 u32 *output);
67extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
68 u32 *output);
69extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
70 u32 *output);
71
72extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
73extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
74extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
75
76extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
77extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
78extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
79
80extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
81 u64 *output, unsigned int len);
82extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
83 u64 *output, unsigned int len);
84extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
85 u64 *output, unsigned int len);
86
87extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
88 u64 *output, unsigned int len);
89extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
90 u64 *output, unsigned int len);
91extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
92 u64 *output, unsigned int len);
93
94extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
95 u64 *output, unsigned int len,
96 u64 *iv);
97
98extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
99 u64 *output, unsigned int len,
100 u64 *iv);
101
102extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
103 u64 *output, unsigned int len,
104 u64 *iv);
105
106extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
107 u64 *output, unsigned int len,
108 u64 *iv);
109
110extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
111 u64 *output, unsigned int len,
112 u64 *iv);
113
114extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
115 u64 *output, unsigned int len,
116 u64 *iv);
117
9fd130ec
DM
118extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
119 u64 *output, unsigned int len,
120 u64 *iv);
121extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
122 u64 *output, unsigned int len,
123 u64 *iv);
124extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
125 u64 *output, unsigned int len,
126 u64 *iv);
127
756382cb 128static struct aes_ops aes128_ops = {
0bdcaf74
DM
129 .encrypt = aes_sparc64_encrypt_128,
130 .decrypt = aes_sparc64_decrypt_128,
131 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
132 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
133 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
134 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
135 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
136 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
9fd130ec 137 .ctr_crypt = aes_sparc64_ctr_crypt_128,
0bdcaf74
DM
138};
139
756382cb 140static struct aes_ops aes192_ops = {
0bdcaf74
DM
141 .encrypt = aes_sparc64_encrypt_192,
142 .decrypt = aes_sparc64_decrypt_192,
143 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
144 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
145 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
146 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
147 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
148 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
9fd130ec 149 .ctr_crypt = aes_sparc64_ctr_crypt_192,
0bdcaf74
DM
150};
151
756382cb 152static struct aes_ops aes256_ops = {
0bdcaf74
DM
153 .encrypt = aes_sparc64_encrypt_256,
154 .decrypt = aes_sparc64_decrypt_256,
155 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
156 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
157 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
158 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
159 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
160 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
9fd130ec 161 .ctr_crypt = aes_sparc64_ctr_crypt_256,
0bdcaf74
DM
162};
163
9bf4852d
DM
164extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
165 unsigned int key_len);
166
167static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
168 unsigned int key_len)
169{
170 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
171 u32 *flags = &tfm->crt_flags;
172
173 switch (key_len) {
174 case AES_KEYSIZE_128:
175 ctx->expanded_key_length = 0xb0;
0bdcaf74 176 ctx->ops = &aes128_ops;
9bf4852d
DM
177 break;
178
179 case AES_KEYSIZE_192:
180 ctx->expanded_key_length = 0xd0;
0bdcaf74 181 ctx->ops = &aes192_ops;
9bf4852d
DM
182 break;
183
184 case AES_KEYSIZE_256:
185 ctx->expanded_key_length = 0xf0;
0bdcaf74 186 ctx->ops = &aes256_ops;
9bf4852d
DM
187 break;
188
189 default:
190 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
191 return -EINVAL;
192 }
193
194 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
195 ctx->key_length = key_len;
196
197 return 0;
198}
199
724ecd3c 200static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
9bf4852d
DM
201{
202 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
203
0bdcaf74 204 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
9bf4852d
DM
205}
206
724ecd3c 207static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
9bf4852d
DM
208{
209 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
210
0bdcaf74 211 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
9bf4852d
DM
212}
213
9bf4852d
DM
214#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
215
9bf4852d
DM
216static int ecb_encrypt(struct blkcipher_desc *desc,
217 struct scatterlist *dst, struct scatterlist *src,
218 unsigned int nbytes)
219{
220 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
221 struct blkcipher_walk walk;
222 int err;
223
224 blkcipher_walk_init(&walk, dst, src, nbytes);
225 err = blkcipher_walk_virt(desc, &walk);
b35d282e 226 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
9bf4852d 227
0bdcaf74 228 ctx->ops->load_encrypt_keys(&ctx->key[0]);
9bf4852d
DM
229 while ((nbytes = walk.nbytes)) {
230 unsigned int block_len = nbytes & AES_BLOCK_MASK;
231
232 if (likely(block_len)) {
0bdcaf74
DM
233 ctx->ops->ecb_encrypt(&ctx->key[0],
234 (const u64 *)walk.src.virt.addr,
235 (u64 *) walk.dst.virt.addr,
236 block_len);
9bf4852d
DM
237 }
238 nbytes &= AES_BLOCK_SIZE - 1;
239 err = blkcipher_walk_done(desc, &walk, nbytes);
240 }
241 fprs_write(0);
242 return err;
243}
244
9bf4852d
DM
245static int ecb_decrypt(struct blkcipher_desc *desc,
246 struct scatterlist *dst, struct scatterlist *src,
247 unsigned int nbytes)
248{
249 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
250 struct blkcipher_walk walk;
251 u64 *key_end;
252 int err;
253
254 blkcipher_walk_init(&walk, dst, src, nbytes);
255 err = blkcipher_walk_virt(desc, &walk);
b35d282e 256 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
9bf4852d 257
0bdcaf74 258 ctx->ops->load_decrypt_keys(&ctx->key[0]);
9bf4852d
DM
259 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
260 while ((nbytes = walk.nbytes)) {
261 unsigned int block_len = nbytes & AES_BLOCK_MASK;
262
0bdcaf74
DM
263 if (likely(block_len)) {
264 ctx->ops->ecb_decrypt(key_end,
265 (const u64 *) walk.src.virt.addr,
266 (u64 *) walk.dst.virt.addr, block_len);
267 }
9bf4852d
DM
268 nbytes &= AES_BLOCK_SIZE - 1;
269 err = blkcipher_walk_done(desc, &walk, nbytes);
270 }
271 fprs_write(0);
272
273 return err;
274}
275
9bf4852d
DM
276static int cbc_encrypt(struct blkcipher_desc *desc,
277 struct scatterlist *dst, struct scatterlist *src,
278 unsigned int nbytes)
279{
280 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
281 struct blkcipher_walk walk;
282 int err;
283
284 blkcipher_walk_init(&walk, dst, src, nbytes);
285 err = blkcipher_walk_virt(desc, &walk);
b35d282e 286 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
9bf4852d 287
0bdcaf74 288 ctx->ops->load_encrypt_keys(&ctx->key[0]);
9bf4852d
DM
289 while ((nbytes = walk.nbytes)) {
290 unsigned int block_len = nbytes & AES_BLOCK_MASK;
291
292 if (likely(block_len)) {
0bdcaf74
DM
293 ctx->ops->cbc_encrypt(&ctx->key[0],
294 (const u64 *)walk.src.virt.addr,
295 (u64 *) walk.dst.virt.addr,
296 block_len, (u64 *) walk.iv);
9bf4852d
DM
297 }
298 nbytes &= AES_BLOCK_SIZE - 1;
299 err = blkcipher_walk_done(desc, &walk, nbytes);
300 }
301 fprs_write(0);
302 return err;
303}
304
9bf4852d
DM
305static int cbc_decrypt(struct blkcipher_desc *desc,
306 struct scatterlist *dst, struct scatterlist *src,
307 unsigned int nbytes)
308{
309 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
310 struct blkcipher_walk walk;
311 u64 *key_end;
312 int err;
313
314 blkcipher_walk_init(&walk, dst, src, nbytes);
315 err = blkcipher_walk_virt(desc, &walk);
b35d282e 316 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
9bf4852d 317
0bdcaf74 318 ctx->ops->load_decrypt_keys(&ctx->key[0]);
9bf4852d
DM
319 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
320 while ((nbytes = walk.nbytes)) {
321 unsigned int block_len = nbytes & AES_BLOCK_MASK;
322
0bdcaf74
DM
323 if (likely(block_len)) {
324 ctx->ops->cbc_decrypt(key_end,
325 (const u64 *) walk.src.virt.addr,
326 (u64 *) walk.dst.virt.addr,
327 block_len, (u64 *) walk.iv);
328 }
9bf4852d
DM
329 nbytes &= AES_BLOCK_SIZE - 1;
330 err = blkcipher_walk_done(desc, &walk, nbytes);
331 }
332 fprs_write(0);
333
334 return err;
335}
336
a8d97cef
DM
337static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
338 struct blkcipher_walk *walk)
339{
340 u8 *ctrblk = walk->iv;
341 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
342 u8 *src = walk->src.virt.addr;
343 u8 *dst = walk->dst.virt.addr;
344 unsigned int nbytes = walk->nbytes;
345
346 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
347 keystream, AES_BLOCK_SIZE);
45fe93df 348 crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
a8d97cef
DM
349 crypto_inc(ctrblk, AES_BLOCK_SIZE);
350}
351
9fd130ec
DM
352static int ctr_crypt(struct blkcipher_desc *desc,
353 struct scatterlist *dst, struct scatterlist *src,
354 unsigned int nbytes)
355{
356 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
357 struct blkcipher_walk walk;
358 int err;
359
360 blkcipher_walk_init(&walk, dst, src, nbytes);
a8d97cef
DM
361 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
362 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
9fd130ec
DM
363
364 ctx->ops->load_encrypt_keys(&ctx->key[0]);
a8d97cef 365 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
9fd130ec
DM
366 unsigned int block_len = nbytes & AES_BLOCK_MASK;
367
368 if (likely(block_len)) {
369 ctx->ops->ctr_crypt(&ctx->key[0],
370 (const u64 *)walk.src.virt.addr,
371 (u64 *) walk.dst.virt.addr,
372 block_len, (u64 *) walk.iv);
373 }
374 nbytes &= AES_BLOCK_SIZE - 1;
375 err = blkcipher_walk_done(desc, &walk, nbytes);
376 }
a8d97cef
DM
377 if (walk.nbytes) {
378 ctr_crypt_final(ctx, &walk);
379 err = blkcipher_walk_done(desc, &walk, 0);
380 }
9fd130ec
DM
381 fprs_write(0);
382 return err;
383}
384
9bf4852d
DM
385static struct crypto_alg algs[] = { {
386 .cra_name = "aes",
387 .cra_driver_name = "aes-sparc64",
10803624 388 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
9bf4852d
DM
389 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
390 .cra_blocksize = AES_BLOCK_SIZE,
391 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
392 .cra_alignmask = 3,
393 .cra_module = THIS_MODULE,
394 .cra_u = {
395 .cipher = {
396 .cia_min_keysize = AES_MIN_KEY_SIZE,
397 .cia_max_keysize = AES_MAX_KEY_SIZE,
398 .cia_setkey = aes_set_key,
724ecd3c
AB
399 .cia_encrypt = crypto_aes_encrypt,
400 .cia_decrypt = crypto_aes_decrypt
9bf4852d
DM
401 }
402 }
403}, {
404 .cra_name = "ecb(aes)",
405 .cra_driver_name = "ecb-aes-sparc64",
10803624 406 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
9bf4852d
DM
407 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
408 .cra_blocksize = AES_BLOCK_SIZE,
409 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
410 .cra_alignmask = 7,
411 .cra_type = &crypto_blkcipher_type,
412 .cra_module = THIS_MODULE,
413 .cra_u = {
414 .blkcipher = {
415 .min_keysize = AES_MIN_KEY_SIZE,
416 .max_keysize = AES_MAX_KEY_SIZE,
417 .setkey = aes_set_key,
418 .encrypt = ecb_encrypt,
419 .decrypt = ecb_decrypt,
420 },
421 },
422}, {
423 .cra_name = "cbc(aes)",
424 .cra_driver_name = "cbc-aes-sparc64",
10803624 425 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
9bf4852d
DM
426 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
427 .cra_blocksize = AES_BLOCK_SIZE,
428 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
429 .cra_alignmask = 7,
430 .cra_type = &crypto_blkcipher_type,
431 .cra_module = THIS_MODULE,
432 .cra_u = {
433 .blkcipher = {
434 .min_keysize = AES_MIN_KEY_SIZE,
435 .max_keysize = AES_MAX_KEY_SIZE,
a66d7f72 436 .ivsize = AES_BLOCK_SIZE,
9bf4852d
DM
437 .setkey = aes_set_key,
438 .encrypt = cbc_encrypt,
439 .decrypt = cbc_decrypt,
440 },
441 },
9fd130ec
DM
442}, {
443 .cra_name = "ctr(aes)",
444 .cra_driver_name = "ctr-aes-sparc64",
10803624 445 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
9fd130ec 446 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
a8d97cef 447 .cra_blocksize = 1,
9fd130ec
DM
448 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
449 .cra_alignmask = 7,
450 .cra_type = &crypto_blkcipher_type,
451 .cra_module = THIS_MODULE,
452 .cra_u = {
453 .blkcipher = {
454 .min_keysize = AES_MIN_KEY_SIZE,
455 .max_keysize = AES_MAX_KEY_SIZE,
a66d7f72 456 .ivsize = AES_BLOCK_SIZE,
9fd130ec
DM
457 .setkey = aes_set_key,
458 .encrypt = ctr_crypt,
459 .decrypt = ctr_crypt,
460 },
461 },
9bf4852d
DM
462} };
463
464static bool __init sparc64_has_aes_opcode(void)
465{
466 unsigned long cfr;
467
468 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
469 return false;
470
471 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
472 if (!(cfr & CFR_AES))
473 return false;
474
475 return true;
476}
477
478static int __init aes_sparc64_mod_init(void)
479{
9bf4852d
DM
480 if (sparc64_has_aes_opcode()) {
481 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
482 return crypto_register_algs(algs, ARRAY_SIZE(algs));
483 }
484 pr_info("sparc64 aes opcodes not available.\n");
485 return -ENODEV;
486}
487
488static void __exit aes_sparc64_mod_fini(void)
489{
490 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
491}
492
493module_init(aes_sparc64_mod_init);
494module_exit(aes_sparc64_mod_fini);
495
496MODULE_LICENSE("GPL");
b0126417 497MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
9bf4852d 498
5d26a105 499MODULE_ALIAS_CRYPTO("aes");
226f7cea
DM
500
501#include "crop_devid.c"