treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 152
[linux-block.git] / arch / x86 / crypto / camellia_aesni_avx2_glue.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
f3f935a7
JK
2/*
3 * Glue Code for x86_64/AVX2/AES-NI assembler optimized version of Camellia
4 *
5 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
f3f935a7
JK
6 */
7
f3f935a7 8#include <asm/crypto/camellia.h>
f3f935a7 9#include <asm/crypto/glue_helper.h>
44893bc2
EB
10#include <crypto/algapi.h>
11#include <crypto/internal/simd.h>
12#include <crypto/xts.h>
13#include <linux/crypto.h>
14#include <linux/err.h>
15#include <linux/module.h>
16#include <linux/types.h>
f3f935a7
JK
17
18#define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
19#define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
20
21/* 32-way AVX2/AES-NI parallel cipher functions */
22asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst,
23 const u8 *src);
24asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst,
25 const u8 *src);
26
27asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst,
28 const u8 *src);
29asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst,
30 const u8 *src, le128 *iv);
31
32asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst,
33 const u8 *src, le128 *iv);
34asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst,
35 const u8 *src, le128 *iv);
36
37static const struct common_glue_ctx camellia_enc = {
38 .num_funcs = 4,
39 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
40
41 .funcs = { {
42 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
43 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_32way) }
44 }, {
45 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
46 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
47 }, {
48 .num_blocks = 2,
49 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
50 }, {
51 .num_blocks = 1,
52 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
53 } }
54};
55
56static const struct common_glue_ctx camellia_ctr = {
57 .num_funcs = 4,
58 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
59
60 .funcs = { {
61 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
62 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_32way) }
63 }, {
64 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
65 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
66 }, {
67 .num_blocks = 2,
68 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
69 }, {
70 .num_blocks = 1,
71 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
72 } }
73};
74
75static const struct common_glue_ctx camellia_enc_xts = {
76 .num_funcs = 3,
77 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
78
79 .funcs = { {
80 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
81 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_32way) }
82 }, {
83 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
84 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
85 }, {
86 .num_blocks = 1,
87 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
88 } }
89};
90
91static const struct common_glue_ctx camellia_dec = {
92 .num_funcs = 4,
93 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
94
95 .funcs = { {
96 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
97 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_32way) }
98 }, {
99 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
100 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
101 }, {
102 .num_blocks = 2,
103 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
104 }, {
105 .num_blocks = 1,
106 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
107 } }
108};
109
110static const struct common_glue_ctx camellia_dec_cbc = {
111 .num_funcs = 4,
112 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
113
114 .funcs = { {
115 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
116 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_32way) }
117 }, {
118 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
119 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
120 }, {
121 .num_blocks = 2,
122 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
123 }, {
124 .num_blocks = 1,
125 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
126 } }
127};
128
129static const struct common_glue_ctx camellia_dec_xts = {
130 .num_funcs = 3,
131 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
132
133 .funcs = { {
134 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
135 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_32way) }
136 }, {
137 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
138 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
139 }, {
140 .num_blocks = 1,
141 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
142 } }
143};
144
44893bc2
EB
145static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
146 unsigned int keylen)
f3f935a7 147{
44893bc2
EB
148 return __camellia_setkey(crypto_skcipher_ctx(tfm), key, keylen,
149 &tfm->base.crt_flags);
f3f935a7
JK
150}
151
44893bc2 152static int ecb_encrypt(struct skcipher_request *req)
f3f935a7 153{
44893bc2 154 return glue_ecb_req_128bit(&camellia_enc, req);
f3f935a7
JK
155}
156
44893bc2 157static int ecb_decrypt(struct skcipher_request *req)
f3f935a7 158{
44893bc2 159 return glue_ecb_req_128bit(&camellia_dec, req);
f3f935a7
JK
160}
161
44893bc2 162static int cbc_encrypt(struct skcipher_request *req)
f3f935a7 163{
44893bc2
EB
164 return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(camellia_enc_blk),
165 req);
f3f935a7
JK
166}
167
44893bc2 168static int cbc_decrypt(struct skcipher_request *req)
f3f935a7 169{
44893bc2 170 return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc, req);
f3f935a7
JK
171}
172
44893bc2 173static int ctr_crypt(struct skcipher_request *req)
f3f935a7 174{
44893bc2 175 return glue_ctr_req_128bit(&camellia_ctr, req);
f3f935a7
JK
176}
177
44893bc2 178static int xts_encrypt(struct skcipher_request *req)
f3f935a7 179{
44893bc2
EB
180 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
181 struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
f3f935a7 182
44893bc2
EB
183 return glue_xts_req_128bit(&camellia_enc_xts, req,
184 XTS_TWEAK_CAST(camellia_enc_blk),
185 &ctx->tweak_ctx, &ctx->crypt_ctx);
f3f935a7
JK
186}
187
44893bc2 188static int xts_decrypt(struct skcipher_request *req)
f3f935a7 189{
44893bc2
EB
190 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
191 struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
f3f935a7 192
44893bc2
EB
193 return glue_xts_req_128bit(&camellia_dec_xts, req,
194 XTS_TWEAK_CAST(camellia_enc_blk),
195 &ctx->tweak_ctx, &ctx->crypt_ctx);
f3f935a7
JK
196}
197
44893bc2
EB
198static struct skcipher_alg camellia_algs[] = {
199 {
200 .base.cra_name = "__ecb(camellia)",
201 .base.cra_driver_name = "__ecb-camellia-aesni-avx2",
202 .base.cra_priority = 500,
203 .base.cra_flags = CRYPTO_ALG_INTERNAL,
204 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
205 .base.cra_ctxsize = sizeof(struct camellia_ctx),
206 .base.cra_module = THIS_MODULE,
207 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
208 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
209 .setkey = camellia_setkey,
210 .encrypt = ecb_encrypt,
211 .decrypt = ecb_decrypt,
212 }, {
213 .base.cra_name = "__cbc(camellia)",
214 .base.cra_driver_name = "__cbc-camellia-aesni-avx2",
215 .base.cra_priority = 500,
216 .base.cra_flags = CRYPTO_ALG_INTERNAL,
217 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
218 .base.cra_ctxsize = sizeof(struct camellia_ctx),
219 .base.cra_module = THIS_MODULE,
220 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
221 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
222 .ivsize = CAMELLIA_BLOCK_SIZE,
223 .setkey = camellia_setkey,
224 .encrypt = cbc_encrypt,
225 .decrypt = cbc_decrypt,
226 }, {
227 .base.cra_name = "__ctr(camellia)",
228 .base.cra_driver_name = "__ctr-camellia-aesni-avx2",
229 .base.cra_priority = 500,
230 .base.cra_flags = CRYPTO_ALG_INTERNAL,
231 .base.cra_blocksize = 1,
232 .base.cra_ctxsize = sizeof(struct camellia_ctx),
233 .base.cra_module = THIS_MODULE,
234 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
235 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
236 .ivsize = CAMELLIA_BLOCK_SIZE,
237 .chunksize = CAMELLIA_BLOCK_SIZE,
238 .setkey = camellia_setkey,
239 .encrypt = ctr_crypt,
240 .decrypt = ctr_crypt,
241 }, {
242 .base.cra_name = "__xts(camellia)",
243 .base.cra_driver_name = "__xts-camellia-aesni-avx2",
244 .base.cra_priority = 500,
245 .base.cra_flags = CRYPTO_ALG_INTERNAL,
246 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
247 .base.cra_ctxsize = sizeof(struct camellia_xts_ctx),
248 .base.cra_module = THIS_MODULE,
249 .min_keysize = 2 * CAMELLIA_MIN_KEY_SIZE,
250 .max_keysize = 2 * CAMELLIA_MAX_KEY_SIZE,
251 .ivsize = CAMELLIA_BLOCK_SIZE,
252 .setkey = xts_camellia_setkey,
253 .encrypt = xts_encrypt,
254 .decrypt = xts_decrypt,
f3f935a7 255 },
44893bc2
EB
256};
257
258static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
f3f935a7
JK
259
260static int __init camellia_aesni_init(void)
261{
7bc371fa 262 const char *feature_name;
f3f935a7 263
da154e82
BP
264 if (!boot_cpu_has(X86_FEATURE_AVX) ||
265 !boot_cpu_has(X86_FEATURE_AVX2) ||
1f4dd793 266 !boot_cpu_has(X86_FEATURE_AES) ||
ab4a56fa 267 !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
b54b4bbb
IM
268 pr_info("AVX2 or AES-NI instructions are not detected.\n");
269 return -ENODEV;
270 }
271
d91cab78
DH
272 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
273 &feature_name)) {
7bc371fa 274 pr_info("CPU feature '%s' is not supported.\n", feature_name);
f3f935a7
JK
275 return -ENODEV;
276 }
277
44893bc2
EB
278 return simd_register_skciphers_compat(camellia_algs,
279 ARRAY_SIZE(camellia_algs),
280 camellia_simd_algs);
f3f935a7
JK
281}
282
283static void __exit camellia_aesni_fini(void)
284{
44893bc2
EB
285 simd_unregister_skciphers(camellia_algs, ARRAY_SIZE(camellia_algs),
286 camellia_simd_algs);
f3f935a7
JK
287}
288
289module_init(camellia_aesni_init);
290module_exit(camellia_aesni_fini);
291
292MODULE_LICENSE("GPL");
293MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX2 optimized");
5d26a105
KC
294MODULE_ALIAS_CRYPTO("camellia");
295MODULE_ALIAS_CRYPTO("camellia-asm");