Commit | Line | Data |
---|---|---|
bf754ae8 JG |
1 | /* |
2 | * Cryptographic API. | |
3 | * | |
4 | * s390 implementation of the AES Cipher Algorithm. | |
5 | * | |
6 | * s390 Version: | |
86aa9fc2 | 7 | * Copyright IBM Corp. 2005,2007 |
bf754ae8 | 8 | * Author(s): Jan Glauber (jang@de.ibm.com) |
b0c3e75d | 9 | * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback |
bf754ae8 | 10 | * |
f8246af0 | 11 | * Derived from "crypto/aes_generic.c" |
bf754ae8 JG |
12 | * |
13 | * This program is free software; you can redistribute it and/or modify it | |
14 | * under the terms of the GNU General Public License as published by the Free | |
15 | * Software Foundation; either version 2 of the License, or (at your option) | |
16 | * any later version. | |
17 | * | |
18 | */ | |
19 | ||
89e12654 | 20 | #include <crypto/aes.h> |
a9e62fad | 21 | #include <crypto/algapi.h> |
b0c3e75d | 22 | #include <linux/err.h> |
bf754ae8 JG |
23 | #include <linux/module.h> |
24 | #include <linux/init.h> | |
bf754ae8 JG |
25 | #include "crypt_s390.h" |
26 | ||
86aa9fc2 JG |
27 | #define AES_KEYLEN_128 1 |
28 | #define AES_KEYLEN_192 2 | |
29 | #define AES_KEYLEN_256 4 | |
30 | ||
31 | static char keylen_flag = 0; | |
bf754ae8 JG |
32 | |
33 | struct s390_aes_ctx { | |
34 | u8 iv[AES_BLOCK_SIZE]; | |
35 | u8 key[AES_MAX_KEY_SIZE]; | |
a9e62fad HX |
36 | long enc; |
37 | long dec; | |
bf754ae8 | 38 | int key_len; |
b0c3e75d SS |
39 | union { |
40 | struct crypto_blkcipher *blk; | |
41 | struct crypto_cipher *cip; | |
42 | } fallback; | |
bf754ae8 JG |
43 | }; |
44 | ||
b0c3e75d SS |
45 | /* |
46 | * Check if the key_len is supported by the HW. | |
47 | * Returns 0 if it is, a positive number if it is not and software fallback is | |
48 | * required or a negative number in case the key size is not valid | |
49 | */ | |
50 | static int need_fallback(unsigned int key_len) | |
bf754ae8 | 51 | { |
bf754ae8 JG |
52 | switch (key_len) { |
53 | case 16: | |
86aa9fc2 | 54 | if (!(keylen_flag & AES_KEYLEN_128)) |
b0c3e75d | 55 | return 1; |
bf754ae8 JG |
56 | break; |
57 | case 24: | |
86aa9fc2 | 58 | if (!(keylen_flag & AES_KEYLEN_192)) |
b0c3e75d | 59 | return 1; |
bf754ae8 JG |
60 | break; |
61 | case 32: | |
86aa9fc2 | 62 | if (!(keylen_flag & AES_KEYLEN_256)) |
b0c3e75d | 63 | return 1; |
bf754ae8 JG |
64 | break; |
65 | default: | |
b0c3e75d | 66 | return -1; |
bf754ae8 JG |
67 | break; |
68 | } | |
b0c3e75d SS |
69 | return 0; |
70 | } | |
71 | ||
72 | static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key, | |
73 | unsigned int key_len) | |
74 | { | |
75 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | |
76 | int ret; | |
77 | ||
78 | sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; | |
79 | sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags & | |
80 | CRYPTO_TFM_REQ_MASK); | |
81 | ||
82 | ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); | |
83 | if (ret) { | |
84 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; | |
85 | tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags & | |
86 | CRYPTO_TFM_RES_MASK); | |
87 | } | |
88 | return ret; | |
89 | } | |
90 | ||
91 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | |
92 | unsigned int key_len) | |
93 | { | |
94 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | |
95 | u32 *flags = &tfm->crt_flags; | |
96 | int ret; | |
97 | ||
98 | ret = need_fallback(key_len); | |
99 | if (ret < 0) { | |
100 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | |
101 | return -EINVAL; | |
102 | } | |
bf754ae8 JG |
103 | |
104 | sctx->key_len = key_len; | |
b0c3e75d SS |
105 | if (!ret) { |
106 | memcpy(sctx->key, in_key, key_len); | |
107 | return 0; | |
108 | } | |
109 | ||
110 | return setkey_fallback_cip(tfm, in_key, key_len); | |
bf754ae8 JG |
111 | } |
112 | ||
6c2bb98b | 113 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
bf754ae8 | 114 | { |
6c2bb98b | 115 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
bf754ae8 | 116 | |
b0c3e75d SS |
117 | if (unlikely(need_fallback(sctx->key_len))) { |
118 | crypto_cipher_encrypt_one(sctx->fallback.cip, out, in); | |
119 | return; | |
120 | } | |
121 | ||
bf754ae8 JG |
122 | switch (sctx->key_len) { |
123 | case 16: | |
124 | crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, | |
125 | AES_BLOCK_SIZE); | |
126 | break; | |
127 | case 24: | |
128 | crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, | |
129 | AES_BLOCK_SIZE); | |
130 | break; | |
131 | case 32: | |
132 | crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, | |
133 | AES_BLOCK_SIZE); | |
134 | break; | |
135 | } | |
136 | } | |
137 | ||
6c2bb98b | 138 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
bf754ae8 | 139 | { |
6c2bb98b | 140 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
bf754ae8 | 141 | |
b0c3e75d SS |
142 | if (unlikely(need_fallback(sctx->key_len))) { |
143 | crypto_cipher_decrypt_one(sctx->fallback.cip, out, in); | |
144 | return; | |
145 | } | |
146 | ||
bf754ae8 JG |
147 | switch (sctx->key_len) { |
148 | case 16: | |
149 | crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, | |
150 | AES_BLOCK_SIZE); | |
151 | break; | |
152 | case 24: | |
153 | crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, | |
154 | AES_BLOCK_SIZE); | |
155 | break; | |
156 | case 32: | |
157 | crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, | |
158 | AES_BLOCK_SIZE); | |
159 | break; | |
160 | } | |
161 | } | |
162 | ||
b0c3e75d SS |
163 | static int fallback_init_cip(struct crypto_tfm *tfm) |
164 | { | |
165 | const char *name = tfm->__crt_alg->cra_name; | |
166 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | |
167 | ||
168 | sctx->fallback.cip = crypto_alloc_cipher(name, 0, | |
169 | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); | |
170 | ||
171 | if (IS_ERR(sctx->fallback.cip)) { | |
172 | printk(KERN_ERR "Error allocating fallback algo %s\n", name); | |
173 | return PTR_ERR(sctx->fallback.blk); | |
174 | } | |
175 | ||
176 | return 0; | |
177 | } | |
178 | ||
179 | static void fallback_exit_cip(struct crypto_tfm *tfm) | |
180 | { | |
181 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | |
182 | ||
183 | crypto_free_cipher(sctx->fallback.cip); | |
184 | sctx->fallback.cip = NULL; | |
185 | } | |
bf754ae8 JG |
186 | |
187 | static struct crypto_alg aes_alg = { | |
188 | .cra_name = "aes", | |
65b75c36 HX |
189 | .cra_driver_name = "aes-s390", |
190 | .cra_priority = CRYPT_S390_PRIORITY, | |
f67d1369 JG |
191 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER | |
192 | CRYPTO_ALG_NEED_FALLBACK, | |
bf754ae8 JG |
193 | .cra_blocksize = AES_BLOCK_SIZE, |
194 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | |
195 | .cra_module = THIS_MODULE, | |
196 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), | |
b0c3e75d SS |
197 | .cra_init = fallback_init_cip, |
198 | .cra_exit = fallback_exit_cip, | |
bf754ae8 JG |
199 | .cra_u = { |
200 | .cipher = { | |
201 | .cia_min_keysize = AES_MIN_KEY_SIZE, | |
202 | .cia_max_keysize = AES_MAX_KEY_SIZE, | |
203 | .cia_setkey = aes_set_key, | |
204 | .cia_encrypt = aes_encrypt, | |
205 | .cia_decrypt = aes_decrypt, | |
bf754ae8 JG |
206 | } |
207 | } | |
208 | }; | |
209 | ||
b0c3e75d SS |
210 | static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key, |
211 | unsigned int len) | |
212 | { | |
213 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | |
214 | unsigned int ret; | |
215 | ||
216 | sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; | |
217 | sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags & | |
218 | CRYPTO_TFM_REQ_MASK); | |
219 | ||
220 | ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len); | |
221 | if (ret) { | |
222 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; | |
223 | tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags & | |
224 | CRYPTO_TFM_RES_MASK); | |
225 | } | |
226 | return ret; | |
227 | } | |
228 | ||
229 | static int fallback_blk_dec(struct blkcipher_desc *desc, | |
230 | struct scatterlist *dst, struct scatterlist *src, | |
231 | unsigned int nbytes) | |
232 | { | |
233 | unsigned int ret; | |
234 | struct crypto_blkcipher *tfm; | |
235 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | |
236 | ||
b0c3e75d SS |
237 | tfm = desc->tfm; |
238 | desc->tfm = sctx->fallback.blk; | |
239 | ||
2d74d405 | 240 | ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); |
b0c3e75d SS |
241 | |
242 | desc->tfm = tfm; | |
243 | return ret; | |
244 | } | |
245 | ||
246 | static int fallback_blk_enc(struct blkcipher_desc *desc, | |
247 | struct scatterlist *dst, struct scatterlist *src, | |
248 | unsigned int nbytes) | |
249 | { | |
250 | unsigned int ret; | |
251 | struct crypto_blkcipher *tfm; | |
252 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | |
253 | ||
b0c3e75d SS |
254 | tfm = desc->tfm; |
255 | desc->tfm = sctx->fallback.blk; | |
256 | ||
2d74d405 | 257 | ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); |
b0c3e75d SS |
258 | |
259 | desc->tfm = tfm; | |
260 | return ret; | |
261 | } | |
262 | ||
a9e62fad HX |
263 | static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
264 | unsigned int key_len) | |
265 | { | |
266 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | |
b0c3e75d SS |
267 | int ret; |
268 | ||
269 | ret = need_fallback(key_len); | |
270 | if (ret > 0) { | |
271 | sctx->key_len = key_len; | |
272 | return setkey_fallback_blk(tfm, in_key, key_len); | |
273 | } | |
a9e62fad HX |
274 | |
275 | switch (key_len) { | |
276 | case 16: | |
277 | sctx->enc = KM_AES_128_ENCRYPT; | |
278 | sctx->dec = KM_AES_128_DECRYPT; | |
279 | break; | |
280 | case 24: | |
281 | sctx->enc = KM_AES_192_ENCRYPT; | |
282 | sctx->dec = KM_AES_192_DECRYPT; | |
283 | break; | |
284 | case 32: | |
285 | sctx->enc = KM_AES_256_ENCRYPT; | |
286 | sctx->dec = KM_AES_256_DECRYPT; | |
287 | break; | |
288 | } | |
289 | ||
290 | return aes_set_key(tfm, in_key, key_len); | |
291 | } | |
292 | ||
293 | static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param, | |
294 | struct blkcipher_walk *walk) | |
295 | { | |
296 | int ret = blkcipher_walk_virt(desc, walk); | |
297 | unsigned int nbytes; | |
298 | ||
299 | while ((nbytes = walk->nbytes)) { | |
300 | /* only use complete blocks */ | |
301 | unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); | |
302 | u8 *out = walk->dst.virt.addr; | |
303 | u8 *in = walk->src.virt.addr; | |
304 | ||
305 | ret = crypt_s390_km(func, param, out, in, n); | |
306 | BUG_ON((ret < 0) || (ret != n)); | |
307 | ||
308 | nbytes &= AES_BLOCK_SIZE - 1; | |
309 | ret = blkcipher_walk_done(desc, walk, nbytes); | |
310 | } | |
311 | ||
312 | return ret; | |
313 | } | |
314 | ||
315 | static int ecb_aes_encrypt(struct blkcipher_desc *desc, | |
316 | struct scatterlist *dst, struct scatterlist *src, | |
317 | unsigned int nbytes) | |
318 | { | |
319 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | |
320 | struct blkcipher_walk walk; | |
321 | ||
b0c3e75d SS |
322 | if (unlikely(need_fallback(sctx->key_len))) |
323 | return fallback_blk_enc(desc, dst, src, nbytes); | |
324 | ||
a9e62fad HX |
325 | blkcipher_walk_init(&walk, dst, src, nbytes); |
326 | return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); | |
327 | } | |
328 | ||
329 | static int ecb_aes_decrypt(struct blkcipher_desc *desc, | |
330 | struct scatterlist *dst, struct scatterlist *src, | |
331 | unsigned int nbytes) | |
332 | { | |
333 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | |
334 | struct blkcipher_walk walk; | |
335 | ||
b0c3e75d SS |
336 | if (unlikely(need_fallback(sctx->key_len))) |
337 | return fallback_blk_dec(desc, dst, src, nbytes); | |
338 | ||
a9e62fad HX |
339 | blkcipher_walk_init(&walk, dst, src, nbytes); |
340 | return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); | |
341 | } | |
342 | ||
b0c3e75d SS |
343 | static int fallback_init_blk(struct crypto_tfm *tfm) |
344 | { | |
345 | const char *name = tfm->__crt_alg->cra_name; | |
346 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | |
347 | ||
348 | sctx->fallback.blk = crypto_alloc_blkcipher(name, 0, | |
349 | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); | |
350 | ||
351 | if (IS_ERR(sctx->fallback.blk)) { | |
352 | printk(KERN_ERR "Error allocating fallback algo %s\n", name); | |
353 | return PTR_ERR(sctx->fallback.blk); | |
354 | } | |
355 | ||
356 | return 0; | |
357 | } | |
358 | ||
359 | static void fallback_exit_blk(struct crypto_tfm *tfm) | |
360 | { | |
361 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | |
362 | ||
363 | crypto_free_blkcipher(sctx->fallback.blk); | |
364 | sctx->fallback.blk = NULL; | |
365 | } | |
366 | ||
a9e62fad HX |
367 | static struct crypto_alg ecb_aes_alg = { |
368 | .cra_name = "ecb(aes)", | |
369 | .cra_driver_name = "ecb-aes-s390", | |
370 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | |
f67d1369 JG |
371 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
372 | CRYPTO_ALG_NEED_FALLBACK, | |
a9e62fad HX |
373 | .cra_blocksize = AES_BLOCK_SIZE, |
374 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | |
375 | .cra_type = &crypto_blkcipher_type, | |
376 | .cra_module = THIS_MODULE, | |
377 | .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), | |
b0c3e75d SS |
378 | .cra_init = fallback_init_blk, |
379 | .cra_exit = fallback_exit_blk, | |
a9e62fad HX |
380 | .cra_u = { |
381 | .blkcipher = { | |
382 | .min_keysize = AES_MIN_KEY_SIZE, | |
383 | .max_keysize = AES_MAX_KEY_SIZE, | |
384 | .setkey = ecb_aes_set_key, | |
385 | .encrypt = ecb_aes_encrypt, | |
386 | .decrypt = ecb_aes_decrypt, | |
387 | } | |
388 | } | |
389 | }; | |
390 | ||
391 | static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | |
392 | unsigned int key_len) | |
393 | { | |
394 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | |
b0c3e75d SS |
395 | int ret; |
396 | ||
397 | ret = need_fallback(key_len); | |
398 | if (ret > 0) { | |
399 | sctx->key_len = key_len; | |
400 | return setkey_fallback_blk(tfm, in_key, key_len); | |
401 | } | |
a9e62fad HX |
402 | |
403 | switch (key_len) { | |
404 | case 16: | |
405 | sctx->enc = KMC_AES_128_ENCRYPT; | |
406 | sctx->dec = KMC_AES_128_DECRYPT; | |
407 | break; | |
408 | case 24: | |
409 | sctx->enc = KMC_AES_192_ENCRYPT; | |
410 | sctx->dec = KMC_AES_192_DECRYPT; | |
411 | break; | |
412 | case 32: | |
413 | sctx->enc = KMC_AES_256_ENCRYPT; | |
414 | sctx->dec = KMC_AES_256_DECRYPT; | |
415 | break; | |
416 | } | |
417 | ||
418 | return aes_set_key(tfm, in_key, key_len); | |
419 | } | |
420 | ||
421 | static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param, | |
422 | struct blkcipher_walk *walk) | |
423 | { | |
424 | int ret = blkcipher_walk_virt(desc, walk); | |
425 | unsigned int nbytes = walk->nbytes; | |
426 | ||
427 | if (!nbytes) | |
428 | goto out; | |
429 | ||
430 | memcpy(param, walk->iv, AES_BLOCK_SIZE); | |
431 | do { | |
432 | /* only use complete blocks */ | |
433 | unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); | |
434 | u8 *out = walk->dst.virt.addr; | |
435 | u8 *in = walk->src.virt.addr; | |
436 | ||
437 | ret = crypt_s390_kmc(func, param, out, in, n); | |
438 | BUG_ON((ret < 0) || (ret != n)); | |
439 | ||
440 | nbytes &= AES_BLOCK_SIZE - 1; | |
441 | ret = blkcipher_walk_done(desc, walk, nbytes); | |
442 | } while ((nbytes = walk->nbytes)); | |
443 | memcpy(walk->iv, param, AES_BLOCK_SIZE); | |
444 | ||
445 | out: | |
446 | return ret; | |
447 | } | |
448 | ||
449 | static int cbc_aes_encrypt(struct blkcipher_desc *desc, | |
450 | struct scatterlist *dst, struct scatterlist *src, | |
451 | unsigned int nbytes) | |
452 | { | |
453 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | |
454 | struct blkcipher_walk walk; | |
455 | ||
b0c3e75d SS |
456 | if (unlikely(need_fallback(sctx->key_len))) |
457 | return fallback_blk_enc(desc, dst, src, nbytes); | |
458 | ||
a9e62fad HX |
459 | blkcipher_walk_init(&walk, dst, src, nbytes); |
460 | return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk); | |
461 | } | |
462 | ||
463 | static int cbc_aes_decrypt(struct blkcipher_desc *desc, | |
464 | struct scatterlist *dst, struct scatterlist *src, | |
465 | unsigned int nbytes) | |
466 | { | |
467 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | |
468 | struct blkcipher_walk walk; | |
469 | ||
b0c3e75d SS |
470 | if (unlikely(need_fallback(sctx->key_len))) |
471 | return fallback_blk_dec(desc, dst, src, nbytes); | |
472 | ||
a9e62fad HX |
473 | blkcipher_walk_init(&walk, dst, src, nbytes); |
474 | return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk); | |
475 | } | |
476 | ||
477 | static struct crypto_alg cbc_aes_alg = { | |
478 | .cra_name = "cbc(aes)", | |
479 | .cra_driver_name = "cbc-aes-s390", | |
480 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | |
f67d1369 JG |
481 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | |
482 | CRYPTO_ALG_NEED_FALLBACK, | |
a9e62fad HX |
483 | .cra_blocksize = AES_BLOCK_SIZE, |
484 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | |
485 | .cra_type = &crypto_blkcipher_type, | |
486 | .cra_module = THIS_MODULE, | |
487 | .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), | |
b0c3e75d SS |
488 | .cra_init = fallback_init_blk, |
489 | .cra_exit = fallback_exit_blk, | |
a9e62fad HX |
490 | .cra_u = { |
491 | .blkcipher = { | |
492 | .min_keysize = AES_MIN_KEY_SIZE, | |
493 | .max_keysize = AES_MAX_KEY_SIZE, | |
494 | .ivsize = AES_BLOCK_SIZE, | |
495 | .setkey = cbc_aes_set_key, | |
496 | .encrypt = cbc_aes_encrypt, | |
497 | .decrypt = cbc_aes_decrypt, | |
498 | } | |
499 | } | |
500 | }; | |
501 | ||
9f7819c1 | 502 | static int __init aes_s390_init(void) |
bf754ae8 JG |
503 | { |
504 | int ret; | |
505 | ||
506 | if (crypt_s390_func_available(KM_AES_128_ENCRYPT)) | |
86aa9fc2 | 507 | keylen_flag |= AES_KEYLEN_128; |
bf754ae8 | 508 | if (crypt_s390_func_available(KM_AES_192_ENCRYPT)) |
86aa9fc2 | 509 | keylen_flag |= AES_KEYLEN_192; |
bf754ae8 | 510 | if (crypt_s390_func_available(KM_AES_256_ENCRYPT)) |
86aa9fc2 JG |
511 | keylen_flag |= AES_KEYLEN_256; |
512 | ||
513 | if (!keylen_flag) | |
514 | return -EOPNOTSUPP; | |
bf754ae8 | 515 | |
86aa9fc2 | 516 | /* z9 109 and z9 BC/EC only support 128 bit key length */ |
b0c3e75d | 517 | if (keylen_flag == AES_KEYLEN_128) |
86aa9fc2 | 518 | printk(KERN_INFO |
cfe7381c | 519 | "aes_s390: hardware acceleration only available for " |
86aa9fc2 | 520 | "128 bit keys\n"); |
bf754ae8 JG |
521 | |
522 | ret = crypto_register_alg(&aes_alg); | |
86aa9fc2 | 523 | if (ret) |
a9e62fad | 524 | goto aes_err; |
a9e62fad HX |
525 | |
526 | ret = crypto_register_alg(&ecb_aes_alg); | |
86aa9fc2 | 527 | if (ret) |
a9e62fad | 528 | goto ecb_aes_err; |
a9e62fad HX |
529 | |
530 | ret = crypto_register_alg(&cbc_aes_alg); | |
86aa9fc2 | 531 | if (ret) |
a9e62fad | 532 | goto cbc_aes_err; |
a9e62fad HX |
533 | |
534 | out: | |
bf754ae8 | 535 | return ret; |
a9e62fad HX |
536 | |
537 | cbc_aes_err: | |
538 | crypto_unregister_alg(&ecb_aes_alg); | |
539 | ecb_aes_err: | |
540 | crypto_unregister_alg(&aes_alg); | |
541 | aes_err: | |
542 | goto out; | |
bf754ae8 JG |
543 | } |
544 | ||
9f7819c1 | 545 | static void __exit aes_s390_fini(void) |
bf754ae8 | 546 | { |
a9e62fad HX |
547 | crypto_unregister_alg(&cbc_aes_alg); |
548 | crypto_unregister_alg(&ecb_aes_alg); | |
bf754ae8 JG |
549 | crypto_unregister_alg(&aes_alg); |
550 | } | |
551 | ||
9f7819c1 HC |
552 | module_init(aes_s390_init); |
553 | module_exit(aes_s390_fini); | |
bf754ae8 JG |
554 | |
555 | MODULE_ALIAS("aes"); | |
556 | ||
557 | MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); | |
558 | MODULE_LICENSE("GPL"); |