crypto: remove CRYPTO_TFM_RES_BAD_KEY_LEN
[linux-block.git] / drivers / crypto / inside-secure / safexcel_cipher.c
CommitLineData
301422e3 1// SPDX-License-Identifier: GPL-2.0
1b44c5a6
AT
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
1b44c5a6
AT
6 */
7
4a593fb3 8#include <asm/unaligned.h>
1b44c5a6
AT
9#include <linux/device.h>
10#include <linux/dma-mapping.h>
11#include <linux/dmapool.h>
f6beaea3 12#include <crypto/aead.h>
1b44c5a6 13#include <crypto/aes.h>
f6beaea3 14#include <crypto/authenc.h>
4a593fb3 15#include <crypto/chacha.h>
f26882a3 16#include <crypto/ctr.h>
04007b0e 17#include <crypto/internal/des.h>
3e450886
PL
18#include <crypto/gcm.h>
19#include <crypto/ghash.h>
a6061921 20#include <crypto/poly1305.h>
f6beaea3 21#include <crypto/sha.h>
1769f704 22#include <crypto/sm3.h>
fcca797d 23#include <crypto/sm4.h>
c7da38a7 24#include <crypto/xts.h>
1b44c5a6 25#include <crypto/skcipher.h>
f6beaea3 26#include <crypto/internal/aead.h>
1eb7b403 27#include <crypto/internal/skcipher.h>
1b44c5a6
AT
28
29#include "safexcel.h"
30
31enum safexcel_cipher_direction {
32 SAFEXCEL_ENCRYPT,
33 SAFEXCEL_DECRYPT,
34};
35
a7dea8c0
OH
36enum safexcel_cipher_alg {
37 SAFEXCEL_DES,
62469879 38 SAFEXCEL_3DES,
a7dea8c0 39 SAFEXCEL_AES,
4a593fb3 40 SAFEXCEL_CHACHA20,
fcca797d 41 SAFEXCEL_SM4,
a7dea8c0
OH
42};
43
1b44c5a6
AT
44struct safexcel_cipher_ctx {
45 struct safexcel_context base;
46 struct safexcel_crypto_priv *priv;
47
1b44c5a6 48 u32 mode;
a7dea8c0 49 enum safexcel_cipher_alg alg;
098e51e5
PL
50 u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51 u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
52 u8 aadskip;
53 u8 blocksz;
54 u32 ivmask;
55 u32 ctrinit;
1b44c5a6 56
c7da38a7 57 __le32 key[16];
54f9e8fa 58 u32 nonce;
c7da38a7 59 unsigned int key_len, xts;
f6beaea3
AT
60
61 /* All the below is AEAD specific */
a7dea8c0 62 u32 hash_alg;
f6beaea3 63 u32 state_sz;
13a1bb93
PL
64 __be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
65 __be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
3e450886
PL
66
67 struct crypto_cipher *hkaes;
a6061921 68 struct crypto_aead *fback;
1b44c5a6
AT
69};
70
1eb7b403 71struct safexcel_cipher_req {
847ccfc5 72 enum safexcel_cipher_direction direction;
89332590
AT
73 /* Number of result descriptors associated to the request */
74 unsigned int rdescs;
1eb7b403 75 bool needs_inv;
19b347b3 76 int nr_src, nr_dst;
1eb7b403
OH
77};
78
098e51e5
PL
79static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
80 struct safexcel_command_desc *cdesc)
1b44c5a6 81{
098e51e5 82 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
493e289c 83 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
493e289c
PL
84 /* 32 bit nonce */
85 cdesc->control_data.token[0] = ctx->nonce;
86 /* 64 bit IV part */
87 memcpy(&cdesc->control_data.token[1], iv, 8);
098e51e5
PL
88 /* 32 bit counter, start at 0 or 1 (big endian!) */
89 cdesc->control_data.token[3] =
90 (__force u32)cpu_to_be32(ctx->ctrinit);
91 return 4;
92 }
93 if (ctx->alg == SAFEXCEL_CHACHA20) {
3e450886 94 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
a19052d4
PL
95 /* 96 bit nonce part */
96 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
97 /* 32 bit counter */
98 cdesc->control_data.token[3] = *(u32 *)iv;
098e51e5 99 return 4;
493e289c
PL
100 }
101
098e51e5
PL
102 cdesc->control_data.options |= ctx->ivmask;
103 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
104 return ctx->blocksz / sizeof(u32);
0e17e362
PL
105}
106
107static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
108 struct safexcel_command_desc *cdesc,
098e51e5 109 struct safexcel_token *atoken,
0e17e362
PL
110 u32 length)
111{
112 struct safexcel_token *token;
098e51e5 113 int ivlen;
0e17e362 114
098e51e5
PL
115 ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
116 if (ivlen == 4) {
117 /* No space in cdesc, instruction moves to atoken */
118 cdesc->additional_cdata_size = 1;
119 token = atoken;
120 } else {
121 /* Everything fits in cdesc */
122 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
123 /* Need to pad with NOP */
124 eip197_noop_token(&token[1]);
125 }
1b44c5a6 126
098e51e5
PL
127 token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
128 token->packet_length = length;
129 token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
130 EIP197_TOKEN_STAT_LAST_HASH;
131 token->instructions = EIP197_TOKEN_INS_LAST |
132 EIP197_TOKEN_INS_TYPE_CRYPTO |
133 EIP197_TOKEN_INS_TYPE_OUTPUT;
134}
1b44c5a6 135
098e51e5
PL
136static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
137 struct safexcel_command_desc *cdesc)
138{
139 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
140 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
141 /* 32 bit nonce */
142 cdesc->control_data.token[0] = ctx->nonce;
143 /* 64 bit IV part */
144 memcpy(&cdesc->control_data.token[1], iv, 8);
145 /* 32 bit counter, start at 0 or 1 (big endian!) */
146 cdesc->control_data.token[3] =
147 (__force u32)cpu_to_be32(ctx->ctrinit);
148 return;
149 }
150 if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
151 /* 96 bit IV part */
152 memcpy(&cdesc->control_data.token[0], iv, 12);
153 /* 32 bit counter, start at 0 or 1 (big endian!) */
154 cdesc->control_data.token[3] =
155 (__force u32)cpu_to_be32(ctx->ctrinit);
156 return;
157 }
158 /* CBC */
159 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
1b44c5a6
AT
160}
161
f6beaea3
AT
162static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
163 struct safexcel_command_desc *cdesc,
098e51e5 164 struct safexcel_token *atoken,
f6beaea3
AT
165 enum safexcel_cipher_direction direction,
166 u32 cryptlen, u32 assoclen, u32 digestsize)
167{
098e51e5
PL
168 struct safexcel_token *aadref;
169 int atoksize = 2; /* Start with minimum size */
170 int assocadj = assoclen - ctx->aadskip, aadalign;
f6beaea3 171
098e51e5
PL
172 /* Always 4 dwords of embedded IV for AEAD modes */
173 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
f6beaea3 174
098e51e5 175 if (direction == SAFEXCEL_DECRYPT)
d2d9e6fd
PL
176 cryptlen -= digestsize;
177
098e51e5
PL
178 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
179 /* Construct IV block B0 for the CBC-MAC */
180 u8 *final_iv = (u8 *)cdesc->control_data.token;
181 u8 *cbcmaciv = (u8 *)&atoken[1];
182 __le32 *aadlen = (__le32 *)&atoken[5];
183
184 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
185 /* Length + nonce */
186 cdesc->control_data.token[0] = ctx->nonce;
187 /* Fixup flags byte */
188 *(__le32 *)cbcmaciv =
189 cpu_to_le32(ctx->nonce |
190 ((assocadj > 0) << 6) |
191 ((digestsize - 2) << 2));
192 /* 64 bit IV part */
193 memcpy(&cdesc->control_data.token[1], iv, 8);
194 memcpy(cbcmaciv + 4, iv, 8);
195 /* Start counter at 0 */
196 cdesc->control_data.token[3] = 0;
197 /* Message length */
198 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
199 } else {
200 /* Variable length IV part */
201 memcpy(final_iv, iv, 15 - iv[0]);
202 memcpy(cbcmaciv, iv, 15 - iv[0]);
203 /* Start variable length counter at 0 */
204 memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
205 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
206 /* fixup flags byte */
207 cbcmaciv[0] |= ((assocadj > 0) << 6) |
208 ((digestsize - 2) << 2);
209 /* insert lower 2 bytes of message length */
210 cbcmaciv[14] = cryptlen >> 8;
211 cbcmaciv[15] = cryptlen & 255;
212 }
a6061921 213
098e51e5
PL
214 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
215 atoken->packet_length = AES_BLOCK_SIZE +
216 ((assocadj > 0) << 1);
217 atoken->stat = 0;
218 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
219 EIP197_TOKEN_INS_TYPE_HASH;
220
221 if (likely(assocadj)) {
222 *aadlen = cpu_to_le32((assocadj >> 8) |
223 (assocadj & 255) << 8);
224 atoken += 6;
225 atoksize += 7;
226 } else {
227 atoken += 5;
228 atoksize += 6;
229 }
3e450886 230
098e51e5
PL
231 /* Process AAD data */
232 aadref = atoken;
233 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
234 atoken->packet_length = assocadj;
235 atoken->stat = 0;
236 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
237 atoken++;
238
239 /* For CCM only, align AAD data towards hash engine */
240 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
241 aadalign = (assocadj + 2) & 15;
242 atoken->packet_length = assocadj && aadalign ?
243 16 - aadalign :
244 0;
245 if (likely(cryptlen)) {
246 atoken->stat = 0;
247 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
92c60cef 248 } else {
098e51e5
PL
249 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
250 atoken->instructions = EIP197_TOKEN_INS_LAST |
251 EIP197_TOKEN_INS_TYPE_HASH;
92c60cef 252 }
098e51e5
PL
253 } else {
254 safexcel_aead_iv(ctx, iv, cdesc);
255
256 /* Process AAD data */
257 aadref = atoken;
258 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
259 atoken->packet_length = assocadj;
260 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
261 atoken->instructions = EIP197_TOKEN_INS_LAST |
262 EIP197_TOKEN_INS_TYPE_HASH;
0e17e362 263 }
098e51e5 264 atoken++;
3e450886 265
098e51e5
PL
266 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
267 /* For ESP mode (and not GMAC), skip over the IV */
268 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
269 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
270 atoken->stat = 0;
271 atoken->instructions = 0;
272 atoken++;
273 atoksize++;
274 } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
275 direction == SAFEXCEL_DECRYPT)) {
276 /* Poly-chacha decryption needs a dummy NOP here ... */
277 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
278 atoken->packet_length = 16; /* According to Op Manual */
279 atoken->stat = 0;
280 atoken->instructions = 0;
281 atoken++;
282 atoksize++;
283 }
4eb76faf 284
098e51e5
PL
285 if (ctx->xcm) {
286 /* For GCM and CCM, obtain enc(Y0) */
287 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
288 atoken->packet_length = 0;
289 atoken->stat = 0;
290 atoken->instructions = AES_BLOCK_SIZE;
291 atoken++;
292
293 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
294 atoken->packet_length = AES_BLOCK_SIZE;
295 atoken->stat = 0;
296 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
297 EIP197_TOKEN_INS_TYPE_CRYPTO;
298 atoken++;
299 atoksize += 2;
300 }
4eb76faf 301
098e51e5
PL
302 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
303 /* Fixup stat field for AAD direction instruction */
304 aadref->stat = 0;
4eb76faf 305
098e51e5
PL
306 /* Process crypto data */
307 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
308 atoken->packet_length = cryptlen;
4eb76faf 309
098e51e5
PL
310 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
311 /* Fixup instruction field for AAD dir instruction */
312 aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
3e450886 313
098e51e5
PL
314 /* Do not send to crypt engine in case of GMAC */
315 atoken->instructions = EIP197_TOKEN_INS_LAST |
316 EIP197_TOKEN_INS_TYPE_HASH |
317 EIP197_TOKEN_INS_TYPE_OUTPUT;
318 } else {
319 atoken->instructions = EIP197_TOKEN_INS_LAST |
320 EIP197_TOKEN_INS_TYPE_CRYPTO |
321 EIP197_TOKEN_INS_TYPE_HASH |
322 EIP197_TOKEN_INS_TYPE_OUTPUT;
4eb76faf 323 }
3e450886 324
098e51e5
PL
325 cryptlen &= 15;
326 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
327 atoken->stat = 0;
328 /* For CCM only, pad crypto data to the hash engine */
329 atoken++;
330 atoksize++;
331 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
332 atoken->packet_length = 16 - cryptlen;
333 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
334 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
4eb76faf 335 } else {
098e51e5 336 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
4eb76faf 337 }
098e51e5
PL
338 atoken++;
339 atoksize++;
340 }
341
342 if (direction == SAFEXCEL_ENCRYPT) {
343 /* Append ICV */
344 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
345 atoken->packet_length = digestsize;
346 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
347 EIP197_TOKEN_STAT_LAST_PACKET;
348 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
349 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
350 } else {
351 /* Extract ICV */
352 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
353 atoken->packet_length = digestsize;
354 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
355 EIP197_TOKEN_STAT_LAST_PACKET;
356 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
357 atoken++;
358 atoksize++;
359
360 /* Verify ICV */
361 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
362 atoken->packet_length = digestsize |
363 EIP197_TOKEN_HASH_RESULT_VERIFY;
364 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
365 EIP197_TOKEN_STAT_LAST_PACKET;
366 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
3e450886 367 }
098e51e5
PL
368
369 /* Fixup length of the token in the command descriptor */
370 cdesc->additional_cdata_size = atoksize;
f6beaea3
AT
371}
372
8ac1283e
AT
373static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
374 const u8 *key, unsigned int len)
1b44c5a6
AT
375{
376 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
377 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
871df319 378 struct safexcel_crypto_priv *priv = ctx->priv;
1b44c5a6
AT
379 struct crypto_aes_ctx aes;
380 int ret, i;
381
363a90c2 382 ret = aes_expandkey(&aes, key, len);
674f368a 383 if (ret)
1b44c5a6 384 return ret;
1b44c5a6 385
53c83e91 386 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
c4daf4cc 387 for (i = 0; i < len / sizeof(u32); i++) {
13a1bb93 388 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
c4daf4cc
OH
389 ctx->base.needs_inv = true;
390 break;
391 }
1b44c5a6
AT
392 }
393 }
394
395 for (i = 0; i < len / sizeof(u32); i++)
396 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
397
398 ctx->key_len = len;
399
400 memzero_explicit(&aes, sizeof(aes));
401 return 0;
402}
403
77cdd4ef
PL
404static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
405 unsigned int len)
f6beaea3
AT
406{
407 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
408 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
409 struct safexcel_ahash_export_state istate, ostate;
410 struct safexcel_crypto_priv *priv = ctx->priv;
411 struct crypto_authenc_keys keys;
0e17e362 412 struct crypto_aes_ctx aes;
13a1bb93 413 int err = -EINVAL, i;
f6beaea3 414
1769f704 415 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
f6beaea3
AT
416 goto badkey;
417
0e17e362 418 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
1769f704
PL
419 /* Must have at least space for the nonce here */
420 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
0e17e362
PL
421 goto badkey;
422 /* last 4 bytes of key are the nonce! */
f26882a3
PL
423 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
424 CTR_RFC3686_NONCE_SIZE);
0e17e362 425 /* exclude the nonce here */
1769f704 426 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
0e17e362 427 }
f6beaea3
AT
428
429 /* Encryption key */
0e17e362 430 switch (ctx->alg) {
bb7679b8
PL
431 case SAFEXCEL_DES:
432 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
433 if (unlikely(err))
674f368a 434 goto badkey;
bb7679b8 435 break;
0e17e362 436 case SAFEXCEL_3DES:
21f5a15e 437 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
77cdd4ef 438 if (unlikely(err))
674f368a 439 goto badkey;
0e17e362
PL
440 break;
441 case SAFEXCEL_AES:
442 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
443 if (unlikely(err))
444 goto badkey;
445 break;
1769f704
PL
446 case SAFEXCEL_SM4:
447 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
448 goto badkey;
449 break;
0e17e362
PL
450 default:
451 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
452 goto badkey;
77cdd4ef
PL
453 }
454
13a1bb93
PL
455 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
456 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
b8151220
PL
457 if (le32_to_cpu(ctx->key[i]) !=
458 ((u32 *)keys.enckey)[i]) {
13a1bb93
PL
459 ctx->base.needs_inv = true;
460 break;
461 }
462 }
463 }
f6beaea3
AT
464
465 /* Auth key */
a7dea8c0 466 switch (ctx->hash_alg) {
01ba061d
AT
467 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
468 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
469 keys.authkeylen, &istate, &ostate))
470 goto badkey;
471 break;
678b2878
AT
472 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
473 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
474 keys.authkeylen, &istate, &ostate))
475 goto badkey;
476 break;
477 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
478 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
479 keys.authkeylen, &istate, &ostate))
480 goto badkey;
481 break;
ea23cb53
AT
482 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
483 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
484 keys.authkeylen, &istate, &ostate))
485 goto badkey;
486 break;
87eee125
AT
487 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
488 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
489 keys.authkeylen, &istate, &ostate))
490 goto badkey;
491 break;
1769f704
PL
492 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
493 if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
494 keys.authkeylen, &istate, &ostate))
495 goto badkey;
496 break;
678b2878 497 default:
1a61af28 498 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
f6beaea3 499 goto badkey;
678b2878 500 }
f6beaea3
AT
501
502 crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
503 CRYPTO_TFM_RES_MASK);
504
53c83e91 505 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
f6beaea3
AT
506 (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
507 memcmp(ctx->opad, ostate.state, ctx->state_sz)))
508 ctx->base.needs_inv = true;
509
510 /* Now copy the keys into the context */
13a1bb93 511 for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
b8151220 512 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
f6beaea3
AT
513 ctx->key_len = keys.enckeylen;
514
515 memcpy(ctx->ipad, &istate.state, ctx->state_sz);
516 memcpy(ctx->opad, &ostate.state, ctx->state_sz);
517
518 memzero_explicit(&keys, sizeof(keys));
519 return 0;
520
521badkey:
f6beaea3 522 memzero_explicit(&keys, sizeof(keys));
0e17e362 523 return err;
f6beaea3
AT
524}
525
1b44c5a6 526static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
847ccfc5 527 struct crypto_async_request *async,
8ac1283e 528 struct safexcel_cipher_req *sreq,
1b44c5a6
AT
529 struct safexcel_command_desc *cdesc)
530{
531 struct safexcel_crypto_priv *priv = ctx->priv;
d2d9e6fd
PL
532 int ctrl_size = ctx->key_len / sizeof(u32);
533
534 cdesc->control_data.control1 = ctx->mode;
1b44c5a6 535
f6beaea3 536 if (ctx->aead) {
d2d9e6fd 537 /* Take in account the ipad+opad digests */
3e450886
PL
538 if (ctx->xcm) {
539 ctrl_size += ctx->state_sz / sizeof(u32);
d2d9e6fd 540 cdesc->control_data.control0 =
d2d9e6fd 541 CONTEXT_CONTROL_KEY_EN |
3e450886 542 CONTEXT_CONTROL_DIGEST_XCM |
d2d9e6fd
PL
543 ctx->hash_alg |
544 CONTEXT_CONTROL_SIZE(ctrl_size);
a6061921
PL
545 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
546 /* Chacha20-Poly1305 */
547 cdesc->control_data.control0 =
548 CONTEXT_CONTROL_KEY_EN |
549 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
550 (sreq->direction == SAFEXCEL_ENCRYPT ?
551 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
552 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
553 ctx->hash_alg |
554 CONTEXT_CONTROL_SIZE(ctrl_size);
555 return 0;
3e450886
PL
556 } else {
557 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
d2d9e6fd 558 cdesc->control_data.control0 =
d2d9e6fd 559 CONTEXT_CONTROL_KEY_EN |
3e450886 560 CONTEXT_CONTROL_DIGEST_HMAC |
d2d9e6fd
PL
561 ctx->hash_alg |
562 CONTEXT_CONTROL_SIZE(ctrl_size);
3e450886 563 }
4eb76faf 564
92c60cef
PL
565 if (sreq->direction == SAFEXCEL_ENCRYPT &&
566 (ctx->xcm == EIP197_XCM_MODE_CCM ||
567 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
568 cdesc->control_data.control0 |=
569 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
570 else if (sreq->direction == SAFEXCEL_ENCRYPT)
571 cdesc->control_data.control0 |=
572 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
573 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
574 cdesc->control_data.control0 |=
575 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
3e450886
PL
576 else
577 cdesc->control_data.control0 |=
92c60cef 578 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
f6beaea3 579 } else {
d2d9e6fd
PL
580 if (sreq->direction == SAFEXCEL_ENCRYPT)
581 cdesc->control_data.control0 =
582 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
583 CONTEXT_CONTROL_KEY_EN |
584 CONTEXT_CONTROL_SIZE(ctrl_size);
585 else
586 cdesc->control_data.control0 =
587 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
588 CONTEXT_CONTROL_KEY_EN |
589 CONTEXT_CONTROL_SIZE(ctrl_size);
f6beaea3 590 }
1b44c5a6 591
a7dea8c0 592 if (ctx->alg == SAFEXCEL_DES) {
d2d9e6fd
PL
593 cdesc->control_data.control0 |=
594 CONTEXT_CONTROL_CRYPTO_ALG_DES;
62469879 595 } else if (ctx->alg == SAFEXCEL_3DES) {
d2d9e6fd
PL
596 cdesc->control_data.control0 |=
597 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
a7dea8c0 598 } else if (ctx->alg == SAFEXCEL_AES) {
c7da38a7 599 switch (ctx->key_len >> ctx->xts) {
a7dea8c0 600 case AES_KEYSIZE_128:
d2d9e6fd
PL
601 cdesc->control_data.control0 |=
602 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
a7dea8c0
OH
603 break;
604 case AES_KEYSIZE_192:
d2d9e6fd
PL
605 cdesc->control_data.control0 |=
606 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
a7dea8c0
OH
607 break;
608 case AES_KEYSIZE_256:
d2d9e6fd
PL
609 cdesc->control_data.control0 |=
610 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
a7dea8c0
OH
611 break;
612 default:
613 dev_err(priv->dev, "aes keysize not supported: %u\n",
c7da38a7 614 ctx->key_len >> ctx->xts);
a7dea8c0
OH
615 return -EINVAL;
616 }
4a593fb3
PL
617 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
618 cdesc->control_data.control0 |=
619 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
fcca797d
PL
620 } else if (ctx->alg == SAFEXCEL_SM4) {
621 cdesc->control_data.control0 |=
622 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
1b44c5a6 623 }
fef0cfe5 624
1b44c5a6
AT
625 return 0;
626}
627
1eb7b403
OH
628static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
629 struct crypto_async_request *async,
8ac1283e
AT
630 struct scatterlist *src,
631 struct scatterlist *dst,
632 unsigned int cryptlen,
633 struct safexcel_cipher_req *sreq,
1eb7b403 634 bool *should_complete, int *ret)
1b44c5a6 635{
5bdb6e6a
PL
636 struct skcipher_request *areq = skcipher_request_cast(async);
637 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
638 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
1b44c5a6
AT
639 struct safexcel_result_desc *rdesc;
640 int ndesc = 0;
641
642 *ret = 0;
643
89332590
AT
644 if (unlikely(!sreq->rdescs))
645 return 0;
646
647 while (sreq->rdescs--) {
1b44c5a6
AT
648 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
649 if (IS_ERR(rdesc)) {
650 dev_err(priv->dev,
651 "cipher: result: could not retrieve the result descriptor\n");
652 *ret = PTR_ERR(rdesc);
653 break;
654 }
655
bdfd1909
AT
656 if (likely(!*ret))
657 *ret = safexcel_rdesc_check_errors(priv, rdesc);
1b44c5a6
AT
658
659 ndesc++;
89332590 660 }
1b44c5a6
AT
661
662 safexcel_complete(priv, ring);
1b44c5a6 663
8ac1283e 664 if (src == dst) {
19b347b3 665 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
1b44c5a6 666 } else {
19b347b3
PL
667 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
668 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
1b44c5a6
AT
669 }
670
5bdb6e6a
PL
671 /*
672 * Update IV in req from last crypto output word for CBC modes
673 */
674 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
675 (sreq->direction == SAFEXCEL_ENCRYPT)) {
676 /* For encrypt take the last output word */
19b347b3 677 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
5bdb6e6a
PL
678 crypto_skcipher_ivsize(skcipher),
679 (cryptlen -
680 crypto_skcipher_ivsize(skcipher)));
681 }
682
1b44c5a6
AT
683 *should_complete = true;
684
685 return ndesc;
686}
687
a7dea8c0 688static int safexcel_send_req(struct crypto_async_request *base, int ring,
8ac1283e
AT
689 struct safexcel_cipher_req *sreq,
690 struct scatterlist *src, struct scatterlist *dst,
f6beaea3
AT
691 unsigned int cryptlen, unsigned int assoclen,
692 unsigned int digestsize, u8 *iv, int *commands,
8ac1283e 693 int *results)
1b44c5a6 694{
5bdb6e6a
PL
695 struct skcipher_request *areq = skcipher_request_cast(base);
696 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
8ac1283e 697 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1b44c5a6
AT
698 struct safexcel_crypto_priv *priv = ctx->priv;
699 struct safexcel_command_desc *cdesc;
19b347b3 700 struct safexcel_command_desc *first_cdesc = NULL;
e5c8ee1f 701 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
1b44c5a6 702 struct scatterlist *sg;
19b347b3
PL
703 unsigned int totlen;
704 unsigned int totlen_src = cryptlen + assoclen;
705 unsigned int totlen_dst = totlen_src;
098e51e5 706 struct safexcel_token *atoken;
19b347b3
PL
707 int n_cdesc = 0, n_rdesc = 0;
708 int queued, i, ret = 0;
709 bool first = true;
1b44c5a6 710
19b347b3
PL
711 sreq->nr_src = sg_nents_for_len(src, totlen_src);
712
713 if (ctx->aead) {
714 /*
715 * AEAD has auth tag appended to output for encrypt and
716 * removed from the output for decrypt!
717 */
718 if (sreq->direction == SAFEXCEL_DECRYPT)
719 totlen_dst -= digestsize;
720 else
721 totlen_dst += digestsize;
722
723 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
724 ctx->ipad, ctx->state_sz);
3e450886
PL
725 if (!ctx->xcm)
726 memcpy(ctx->base.ctxr->data + (ctx->key_len +
727 ctx->state_sz) / sizeof(u32), ctx->opad,
728 ctx->state_sz);
19b347b3
PL
729 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
730 (sreq->direction == SAFEXCEL_DECRYPT)) {
5bdb6e6a
PL
731 /*
732 * Save IV from last crypto input word for CBC modes in decrypt
733 * direction. Need to do this first in case of inplace operation
734 * as it will be overwritten.
735 */
19b347b3 736 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
5bdb6e6a 737 crypto_skcipher_ivsize(skcipher),
19b347b3 738 (totlen_src -
5bdb6e6a
PL
739 crypto_skcipher_ivsize(skcipher)));
740 }
741
19b347b3
PL
742 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
743
744 /*
745 * Remember actual input length, source buffer length may be
746 * updated in case of inline operation below.
747 */
748 totlen = totlen_src;
749 queued = totlen_src;
750
8ac1283e 751 if (src == dst) {
19b347b3
PL
752 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
753 sreq->nr_dst = sreq->nr_src;
754 if (unlikely((totlen_src || totlen_dst) &&
755 (sreq->nr_src <= 0))) {
756 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
757 max(totlen_src, totlen_dst));
1b44c5a6 758 return -EINVAL;
19b347b3
PL
759 }
760 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
1b44c5a6 761 } else {
19b347b3
PL
762 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
763 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
764 totlen_src);
1b44c5a6 765 return -EINVAL;
19b347b3
PL
766 }
767 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
1b44c5a6 768
19b347b3
PL
769 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
770 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
771 totlen_dst);
772 dma_unmap_sg(priv->dev, src, sreq->nr_src,
773 DMA_TO_DEVICE);
1b44c5a6
AT
774 return -EINVAL;
775 }
19b347b3 776 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
1b44c5a6
AT
777 }
778
779 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
780
cb97aa94
PL
781 if (!totlen) {
782 /*
783 * The EIP97 cannot deal with zero length input packets!
784 * So stuff a dummy command descriptor indicating a 1 byte
785 * (dummy) input packet, using the context record as source.
786 */
787 first_cdesc = safexcel_add_cdesc(priv, ring,
788 1, 1, ctx->base.ctxr_dma,
789 1, 1, ctx->base.ctxr_dma,
790 &atoken);
791 if (IS_ERR(first_cdesc)) {
792 /* No space left in the command descriptor ring */
793 ret = PTR_ERR(first_cdesc);
794 goto cdesc_rollback;
795 }
796 n_cdesc = 1;
797 goto skip_cdesc;
798 }
f6beaea3 799
1b44c5a6 800 /* command descriptors */
19b347b3 801 for_each_sg(src, sg, sreq->nr_src, i) {
1b44c5a6
AT
802 int len = sg_dma_len(sg);
803
804 /* Do not overflow the request */
cb97aa94 805 if (queued < len)
1b44c5a6
AT
806 len = queued;
807
19b347b3
PL
808 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
809 !(queued - len),
f6beaea3 810 sg_dma_address(sg), len, totlen,
098e51e5 811 ctx->base.ctxr_dma, &atoken);
1b44c5a6
AT
812 if (IS_ERR(cdesc)) {
813 /* No space left in the command descriptor ring */
814 ret = PTR_ERR(cdesc);
815 goto cdesc_rollback;
816 }
1b44c5a6 817
cb97aa94 818 if (!n_cdesc)
19b347b3 819 first_cdesc = cdesc;
1b44c5a6 820
cb97aa94 821 n_cdesc++;
1b44c5a6
AT
822 queued -= len;
823 if (!queued)
824 break;
825 }
cb97aa94 826skip_cdesc:
19b347b3
PL
827 /* Add context control words and token to first command descriptor */
828 safexcel_context_control(ctx, base, sreq, first_cdesc);
829 if (ctx->aead)
098e51e5 830 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
19b347b3
PL
831 sreq->direction, cryptlen,
832 assoclen, digestsize);
833 else
098e51e5 834 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
19b347b3
PL
835 cryptlen);
836
1b44c5a6 837 /* result descriptors */
19b347b3
PL
838 for_each_sg(dst, sg, sreq->nr_dst, i) {
839 bool last = (i == sreq->nr_dst - 1);
1b44c5a6
AT
840 u32 len = sg_dma_len(sg);
841
19b347b3
PL
842 /* only allow the part of the buffer we know we need */
843 if (len > totlen_dst)
844 len = totlen_dst;
845 if (unlikely(!len))
846 break;
847 totlen_dst -= len;
848
849 /* skip over AAD space in buffer - not written */
850 if (assoclen) {
851 if (assoclen >= len) {
852 assoclen -= len;
853 continue;
854 }
855 rdesc = safexcel_add_rdesc(priv, ring, first, last,
856 sg_dma_address(sg) +
857 assoclen,
858 len - assoclen);
859 assoclen = 0;
860 } else {
861 rdesc = safexcel_add_rdesc(priv, ring, first, last,
862 sg_dma_address(sg),
863 len);
864 }
1b44c5a6
AT
865 if (IS_ERR(rdesc)) {
866 /* No space left in the result descriptor ring */
867 ret = PTR_ERR(rdesc);
868 goto rdesc_rollback;
869 }
19b347b3 870 if (first) {
9744fec9 871 first_rdesc = rdesc;
19b347b3
PL
872 first = false;
873 }
1b44c5a6
AT
874 n_rdesc++;
875 }
876
19b347b3
PL
877 if (unlikely(first)) {
878 /*
879 * Special case: AEAD decrypt with only AAD data.
880 * In this case there is NO output data from the engine,
881 * but the engine still needs a result descriptor!
882 * Create a dummy one just for catching the result token.
883 */
884 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
885 if (IS_ERR(rdesc)) {
886 /* No space left in the result descriptor ring */
887 ret = PTR_ERR(rdesc);
888 goto rdesc_rollback;
889 }
890 first_rdesc = rdesc;
891 n_rdesc = 1;
892 }
893
9744fec9 894 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
97858434 895
1b44c5a6 896 *commands = n_cdesc;
152bdf4c 897 *results = n_rdesc;
1b44c5a6
AT
898 return 0;
899
900rdesc_rollback:
901 for (i = 0; i < n_rdesc; i++)
902 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
903cdesc_rollback:
904 for (i = 0; i < n_cdesc; i++)
905 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
906
8ac1283e 907 if (src == dst) {
19b347b3 908 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
1b44c5a6 909 } else {
19b347b3
PL
910 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
911 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
1b44c5a6
AT
912 }
913
914 return ret;
915}
916
917static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
918 int ring,
8ac1283e 919 struct crypto_async_request *base,
89332590 920 struct safexcel_cipher_req *sreq,
1b44c5a6
AT
921 bool *should_complete, int *ret)
922{
8ac1283e 923 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1b44c5a6
AT
924 struct safexcel_result_desc *rdesc;
925 int ndesc = 0, enq_ret;
926
927 *ret = 0;
928
89332590
AT
929 if (unlikely(!sreq->rdescs))
930 return 0;
931
932 while (sreq->rdescs--) {
1b44c5a6
AT
933 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
934 if (IS_ERR(rdesc)) {
935 dev_err(priv->dev,
936 "cipher: invalidate: could not retrieve the result descriptor\n");
937 *ret = PTR_ERR(rdesc);
938 break;
939 }
940
cda3e73a
AT
941 if (likely(!*ret))
942 *ret = safexcel_rdesc_check_errors(priv, rdesc);
1b44c5a6
AT
943
944 ndesc++;
89332590 945 }
1b44c5a6
AT
946
947 safexcel_complete(priv, ring);
1b44c5a6
AT
948
949 if (ctx->base.exit_inv) {
950 dma_pool_free(priv->context_pool, ctx->base.ctxr,
951 ctx->base.ctxr_dma);
952
953 *should_complete = true;
954
955 return ndesc;
956 }
957
86671abb
AT
958 ring = safexcel_select_ring(priv);
959 ctx->base.ring = ring;
1b44c5a6 960
86671abb 961 spin_lock_bh(&priv->ring[ring].queue_lock);
8ac1283e 962 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
86671abb 963 spin_unlock_bh(&priv->ring[ring].queue_lock);
1b44c5a6
AT
964
965 if (enq_ret != -EINPROGRESS)
966 *ret = enq_ret;
967
8472e778
AT
968 queue_work(priv->ring[ring].workqueue,
969 &priv->ring[ring].work_data.work);
86671abb 970
1b44c5a6
AT
971 *should_complete = false;
972
973 return ndesc;
974}
975
8ac1283e
AT
976static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
977 int ring,
978 struct crypto_async_request *async,
979 bool *should_complete, int *ret)
1eb7b403
OH
980{
981 struct skcipher_request *req = skcipher_request_cast(async);
982 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
983 int err;
984
985 if (sreq->needs_inv) {
986 sreq->needs_inv = false;
89332590 987 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1eb7b403
OH
988 should_complete, ret);
989 } else {
8ac1283e
AT
990 err = safexcel_handle_req_result(priv, ring, async, req->src,
991 req->dst, req->cryptlen, sreq,
1eb7b403
OH
992 should_complete, ret);
993 }
994
995 return err;
996}
997
f6beaea3
AT
998static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
999 int ring,
1000 struct crypto_async_request *async,
1001 bool *should_complete, int *ret)
1002{
1003 struct aead_request *req = aead_request_cast(async);
1004 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1005 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1006 int err;
1007
1008 if (sreq->needs_inv) {
1009 sreq->needs_inv = false;
89332590 1010 err = safexcel_handle_inv_result(priv, ring, async, sreq,
f6beaea3
AT
1011 should_complete, ret);
1012 } else {
1013 err = safexcel_handle_req_result(priv, ring, async, req->src,
1014 req->dst,
1015 req->cryptlen + crypto_aead_authsize(tfm),
1016 sreq, should_complete, ret);
1017 }
1018
1019 return err;
1020}
1021
8ac1283e 1022static int safexcel_cipher_send_inv(struct crypto_async_request *base,
9744fec9 1023 int ring, int *commands, int *results)
1b44c5a6 1024{
8ac1283e 1025 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1b44c5a6
AT
1026 struct safexcel_crypto_priv *priv = ctx->priv;
1027 int ret;
1028
9744fec9 1029 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1b44c5a6
AT
1030 if (unlikely(ret))
1031 return ret;
1032
1033 *commands = 1;
1034 *results = 1;
1035
1036 return 0;
1037}
1038
8ac1283e 1039static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
8ac1283e 1040 int *commands, int *results)
1eb7b403
OH
1041{
1042 struct skcipher_request *req = skcipher_request_cast(async);
871df319 1043 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1eb7b403 1044 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
871df319 1045 struct safexcel_crypto_priv *priv = ctx->priv;
1eb7b403
OH
1046 int ret;
1047
53c83e91 1048 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
871df319 1049
5bdb6e6a 1050 if (sreq->needs_inv) {
9744fec9 1051 ret = safexcel_cipher_send_inv(async, ring, commands, results);
5bdb6e6a
PL
1052 } else {
1053 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1054 u8 input_iv[AES_BLOCK_SIZE];
1055
1056 /*
1057 * Save input IV in case of CBC decrypt mode
1058 * Will be overwritten with output IV prior to use!
1059 */
1060 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1061
9744fec9 1062 ret = safexcel_send_req(async, ring, sreq, req->src,
5bdb6e6a 1063 req->dst, req->cryptlen, 0, 0, input_iv,
f6beaea3 1064 commands, results);
5bdb6e6a 1065 }
89332590
AT
1066
1067 sreq->rdescs = *results;
f6beaea3
AT
1068 return ret;
1069}
1070
1071static int safexcel_aead_send(struct crypto_async_request *async, int ring,
9744fec9 1072 int *commands, int *results)
f6beaea3
AT
1073{
1074 struct aead_request *req = aead_request_cast(async);
1075 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1076 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1077 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1078 struct safexcel_crypto_priv *priv = ctx->priv;
1079 int ret;
1080
53c83e91 1081 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
f6beaea3
AT
1082
1083 if (sreq->needs_inv)
9744fec9 1084 ret = safexcel_cipher_send_inv(async, ring, commands, results);
f6beaea3 1085 else
9744fec9
OH
1086 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1087 req->cryptlen, req->assoclen,
f6beaea3 1088 crypto_aead_authsize(tfm), req->iv,
1eb7b403 1089 commands, results);
89332590 1090 sreq->rdescs = *results;
1eb7b403
OH
1091 return ret;
1092}
1093
8ac1283e
AT
1094static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1095 struct crypto_async_request *base,
1096 struct safexcel_cipher_req *sreq,
1097 struct safexcel_inv_result *result)
1b44c5a6
AT
1098{
1099 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1100 struct safexcel_crypto_priv *priv = ctx->priv;
86671abb 1101 int ring = ctx->base.ring;
1b44c5a6 1102
8ac1283e 1103 init_completion(&result->completion);
1b44c5a6 1104
8ac1283e 1105 ctx = crypto_tfm_ctx(base->tfm);
1b44c5a6 1106 ctx->base.exit_inv = true;
1eb7b403 1107 sreq->needs_inv = true;
1b44c5a6 1108
86671abb 1109 spin_lock_bh(&priv->ring[ring].queue_lock);
8ac1283e 1110 crypto_enqueue_request(&priv->ring[ring].queue, base);
86671abb 1111 spin_unlock_bh(&priv->ring[ring].queue_lock);
1b44c5a6 1112
8472e778
AT
1113 queue_work(priv->ring[ring].workqueue,
1114 &priv->ring[ring].work_data.work);
1b44c5a6 1115
8ac1283e 1116 wait_for_completion(&result->completion);
1b44c5a6 1117
8ac1283e 1118 if (result->error) {
1b44c5a6
AT
1119 dev_warn(priv->dev,
1120 "cipher: sync: invalidate: completion error %d\n",
8ac1283e
AT
1121 result->error);
1122 return result->error;
1b44c5a6
AT
1123 }
1124
1125 return 0;
1126}
1127
8ac1283e 1128static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1b44c5a6 1129{
8ac1283e 1130 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1eb7b403 1131 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
8ac1283e
AT
1132 struct safexcel_inv_result result = {};
1133
1134 memset(req, 0, sizeof(struct skcipher_request));
1135
1136 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1137 safexcel_inv_complete, &result);
1138 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1139
1140 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1141}
1142
f6beaea3
AT
1143static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1144{
1145 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1146 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1147 struct safexcel_inv_result result = {};
1148
1149 memset(req, 0, sizeof(struct aead_request));
1150
1151 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1152 safexcel_inv_complete, &result);
1153 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1154
1155 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1156}
1157
a7dea8c0 1158static int safexcel_queue_req(struct crypto_async_request *base,
8ac1283e 1159 struct safexcel_cipher_req *sreq,
93369b5d 1160 enum safexcel_cipher_direction dir)
8ac1283e
AT
1161{
1162 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1b44c5a6 1163 struct safexcel_crypto_priv *priv = ctx->priv;
86671abb 1164 int ret, ring;
1b44c5a6 1165
1eb7b403 1166 sreq->needs_inv = false;
847ccfc5 1167 sreq->direction = dir;
1b44c5a6
AT
1168
1169 if (ctx->base.ctxr) {
53c83e91 1170 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1eb7b403
OH
1171 sreq->needs_inv = true;
1172 ctx->base.needs_inv = false;
1173 }
1b44c5a6
AT
1174 } else {
1175 ctx->base.ring = safexcel_select_ring(priv);
1b44c5a6 1176 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
8ac1283e 1177 EIP197_GFP_FLAGS(*base),
1b44c5a6
AT
1178 &ctx->base.ctxr_dma);
1179 if (!ctx->base.ctxr)
1180 return -ENOMEM;
1181 }
1182
86671abb
AT
1183 ring = ctx->base.ring;
1184
1185 spin_lock_bh(&priv->ring[ring].queue_lock);
8ac1283e 1186 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
86671abb 1187 spin_unlock_bh(&priv->ring[ring].queue_lock);
1b44c5a6 1188
8472e778
AT
1189 queue_work(priv->ring[ring].workqueue,
1190 &priv->ring[ring].work_data.work);
1b44c5a6
AT
1191
1192 return ret;
1193}
1194
93369b5d 1195static int safexcel_encrypt(struct skcipher_request *req)
1b44c5a6 1196{
a7dea8c0 1197 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
93369b5d 1198 SAFEXCEL_ENCRYPT);
1b44c5a6
AT
1199}
1200
93369b5d 1201static int safexcel_decrypt(struct skcipher_request *req)
1b44c5a6 1202{
a7dea8c0 1203 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
93369b5d 1204 SAFEXCEL_DECRYPT);
1b44c5a6
AT
1205}
1206
1207static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1208{
1209 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1210 struct safexcel_alg_template *tmpl =
1211 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1212 alg.skcipher.base);
1213
1eb7b403
OH
1214 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1215 sizeof(struct safexcel_cipher_req));
1b44c5a6 1216
8ac1283e
AT
1217 ctx->priv = tmpl->priv;
1218
1219 ctx->base.send = safexcel_skcipher_send;
1220 ctx->base.handle_result = safexcel_skcipher_handle_result;
098e51e5
PL
1221 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1222 ctx->ctrinit = 1;
1b44c5a6
AT
1223 return 0;
1224}
1225
8ac1283e 1226static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1b44c5a6
AT
1227{
1228 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1b44c5a6 1229
ce679559 1230 memzero_explicit(ctx->key, sizeof(ctx->key));
1b44c5a6
AT
1231
1232 /* context not allocated, skip invalidation */
1233 if (!ctx->base.ctxr)
8ac1283e 1234 return -ENOMEM;
1b44c5a6 1235
ce679559 1236 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
8ac1283e
AT
1237 return 0;
1238}
1239
1240static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1241{
1242 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1243 struct safexcel_crypto_priv *priv = ctx->priv;
1244 int ret;
1245
1246 if (safexcel_cipher_cra_exit(tfm))
1247 return;
1b44c5a6 1248
53c83e91 1249 if (priv->flags & EIP197_TRC_CACHE) {
8ac1283e 1250 ret = safexcel_skcipher_exit_inv(tfm);
871df319 1251 if (ret)
8ac1283e
AT
1252 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1253 ret);
871df319
AT
1254 } else {
1255 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1256 ctx->base.ctxr_dma);
1257 }
1b44c5a6
AT
1258}
1259
f6beaea3
AT
1260static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1261{
1262 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1263 struct safexcel_crypto_priv *priv = ctx->priv;
1264 int ret;
1265
1266 if (safexcel_cipher_cra_exit(tfm))
1267 return;
1268
53c83e91 1269 if (priv->flags & EIP197_TRC_CACHE) {
f6beaea3
AT
1270 ret = safexcel_aead_exit_inv(tfm);
1271 if (ret)
1272 dev_warn(priv->dev, "aead: invalidation error %d\n",
1273 ret);
1274 } else {
1275 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1276 ctx->base.ctxr_dma);
1277 }
1278}
1279
93369b5d
PL
1280static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1281{
1282 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1283
1284 safexcel_skcipher_cra_init(tfm);
1285 ctx->alg = SAFEXCEL_AES;
1286 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
098e51e5
PL
1287 ctx->blocksz = 0;
1288 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
93369b5d
PL
1289 return 0;
1290}
1291
1b44c5a6
AT
1292struct safexcel_alg_template safexcel_alg_ecb_aes = {
1293 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
062b64ca 1294 .algo_mask = SAFEXCEL_ALG_AES,
1b44c5a6 1295 .alg.skcipher = {
8ac1283e 1296 .setkey = safexcel_skcipher_aes_setkey,
93369b5d
PL
1297 .encrypt = safexcel_encrypt,
1298 .decrypt = safexcel_decrypt,
1b44c5a6
AT
1299 .min_keysize = AES_MIN_KEY_SIZE,
1300 .max_keysize = AES_MAX_KEY_SIZE,
1301 .base = {
1302 .cra_name = "ecb(aes)",
1303 .cra_driver_name = "safexcel-ecb-aes",
aa88f331 1304 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2c95e6d9 1305 .cra_flags = CRYPTO_ALG_ASYNC |
1b44c5a6
AT
1306 CRYPTO_ALG_KERN_DRIVER_ONLY,
1307 .cra_blocksize = AES_BLOCK_SIZE,
1308 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1309 .cra_alignmask = 0,
93369b5d 1310 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1b44c5a6
AT
1311 .cra_exit = safexcel_skcipher_cra_exit,
1312 .cra_module = THIS_MODULE,
1313 },
1314 },
1315};
1316
93369b5d 1317static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1b44c5a6 1318{
93369b5d 1319 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1b44c5a6 1320
93369b5d
PL
1321 safexcel_skcipher_cra_init(tfm);
1322 ctx->alg = SAFEXCEL_AES;
098e51e5 1323 ctx->blocksz = AES_BLOCK_SIZE;
93369b5d
PL
1324 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1325 return 0;
1b44c5a6
AT
1326}
1327
1328struct safexcel_alg_template safexcel_alg_cbc_aes = {
1329 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
062b64ca 1330 .algo_mask = SAFEXCEL_ALG_AES,
1b44c5a6 1331 .alg.skcipher = {
8ac1283e 1332 .setkey = safexcel_skcipher_aes_setkey,
93369b5d
PL
1333 .encrypt = safexcel_encrypt,
1334 .decrypt = safexcel_decrypt,
1b44c5a6
AT
1335 .min_keysize = AES_MIN_KEY_SIZE,
1336 .max_keysize = AES_MAX_KEY_SIZE,
1337 .ivsize = AES_BLOCK_SIZE,
1338 .base = {
1339 .cra_name = "cbc(aes)",
1340 .cra_driver_name = "safexcel-cbc-aes",
aa88f331 1341 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2c95e6d9 1342 .cra_flags = CRYPTO_ALG_ASYNC |
1b44c5a6
AT
1343 CRYPTO_ALG_KERN_DRIVER_ONLY,
1344 .cra_blocksize = AES_BLOCK_SIZE,
1345 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1346 .cra_alignmask = 0,
93369b5d 1347 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
48e97afa
PL
1348 .cra_exit = safexcel_skcipher_cra_exit,
1349 .cra_module = THIS_MODULE,
1350 },
1351 },
1352};
1353
1354static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1355{
1356 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1357
1358 safexcel_skcipher_cra_init(tfm);
1359 ctx->alg = SAFEXCEL_AES;
098e51e5 1360 ctx->blocksz = AES_BLOCK_SIZE;
48e97afa
PL
1361 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1362 return 0;
1363}
1364
1365struct safexcel_alg_template safexcel_alg_cfb_aes = {
1366 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1367 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1368 .alg.skcipher = {
1369 .setkey = safexcel_skcipher_aes_setkey,
1370 .encrypt = safexcel_encrypt,
1371 .decrypt = safexcel_decrypt,
1372 .min_keysize = AES_MIN_KEY_SIZE,
1373 .max_keysize = AES_MAX_KEY_SIZE,
1374 .ivsize = AES_BLOCK_SIZE,
1375 .base = {
1376 .cra_name = "cfb(aes)",
1377 .cra_driver_name = "safexcel-cfb-aes",
1378 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1379 .cra_flags = CRYPTO_ALG_ASYNC |
1380 CRYPTO_ALG_KERN_DRIVER_ONLY,
1381 .cra_blocksize = 1,
1382 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1383 .cra_alignmask = 0,
1384 .cra_init = safexcel_skcipher_aes_cfb_cra_init,
50485dfb
PL
1385 .cra_exit = safexcel_skcipher_cra_exit,
1386 .cra_module = THIS_MODULE,
1387 },
1388 },
1389};
1390
1391static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1392{
1393 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395 safexcel_skcipher_cra_init(tfm);
1396 ctx->alg = SAFEXCEL_AES;
098e51e5 1397 ctx->blocksz = AES_BLOCK_SIZE;
50485dfb
PL
1398 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1399 return 0;
1400}
1401
1402struct safexcel_alg_template safexcel_alg_ofb_aes = {
1403 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1405 .alg.skcipher = {
1406 .setkey = safexcel_skcipher_aes_setkey,
1407 .encrypt = safexcel_encrypt,
1408 .decrypt = safexcel_decrypt,
1409 .min_keysize = AES_MIN_KEY_SIZE,
1410 .max_keysize = AES_MAX_KEY_SIZE,
1411 .ivsize = AES_BLOCK_SIZE,
1412 .base = {
1413 .cra_name = "ofb(aes)",
1414 .cra_driver_name = "safexcel-ofb-aes",
1415 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1416 .cra_flags = CRYPTO_ALG_ASYNC |
1417 CRYPTO_ALG_KERN_DRIVER_ONLY,
1418 .cra_blocksize = 1,
1419 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1420 .cra_alignmask = 0,
1421 .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1b44c5a6
AT
1422 .cra_exit = safexcel_skcipher_cra_exit,
1423 .cra_module = THIS_MODULE,
1424 },
1425 },
1426};
f6beaea3 1427
54f9e8fa
PL
1428static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1429 const u8 *key, unsigned int len)
1430{
1431 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1432 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1433 struct safexcel_crypto_priv *priv = ctx->priv;
1434 struct crypto_aes_ctx aes;
1435 int ret, i;
1436 unsigned int keylen;
1437
1438 /* last 4 bytes of key are the nonce! */
f26882a3 1439 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
54f9e8fa 1440 /* exclude the nonce here */
f26882a3 1441 keylen = len - CTR_RFC3686_NONCE_SIZE;
54f9e8fa 1442 ret = aes_expandkey(&aes, key, keylen);
674f368a 1443 if (ret)
54f9e8fa 1444 return ret;
54f9e8fa
PL
1445
1446 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1447 for (i = 0; i < keylen / sizeof(u32); i++) {
13a1bb93 1448 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
54f9e8fa
PL
1449 ctx->base.needs_inv = true;
1450 break;
1451 }
1452 }
1453 }
1454
1455 for (i = 0; i < keylen / sizeof(u32); i++)
1456 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1457
1458 ctx->key_len = keylen;
1459
1460 memzero_explicit(&aes, sizeof(aes));
1461 return 0;
1462}
1463
93369b5d
PL
1464static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1465{
1466 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1467
1468 safexcel_skcipher_cra_init(tfm);
1469 ctx->alg = SAFEXCEL_AES;
098e51e5 1470 ctx->blocksz = AES_BLOCK_SIZE;
93369b5d
PL
1471 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1472 return 0;
1473}
1474
54f9e8fa
PL
1475struct safexcel_alg_template safexcel_alg_ctr_aes = {
1476 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
062b64ca 1477 .algo_mask = SAFEXCEL_ALG_AES,
54f9e8fa
PL
1478 .alg.skcipher = {
1479 .setkey = safexcel_skcipher_aesctr_setkey,
93369b5d
PL
1480 .encrypt = safexcel_encrypt,
1481 .decrypt = safexcel_decrypt,
f26882a3
PL
1482 /* Add nonce size */
1483 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1484 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1485 .ivsize = CTR_RFC3686_IV_SIZE,
54f9e8fa
PL
1486 .base = {
1487 .cra_name = "rfc3686(ctr(aes))",
1488 .cra_driver_name = "safexcel-ctr-aes",
aa88f331 1489 .cra_priority = SAFEXCEL_CRA_PRIORITY,
54f9e8fa
PL
1490 .cra_flags = CRYPTO_ALG_ASYNC |
1491 CRYPTO_ALG_KERN_DRIVER_ONLY,
1492 .cra_blocksize = 1,
1493 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1494 .cra_alignmask = 0,
93369b5d 1495 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
54f9e8fa
PL
1496 .cra_exit = safexcel_skcipher_cra_exit,
1497 .cra_module = THIS_MODULE,
1498 },
1499 },
1500};
1501
a7dea8c0
OH
1502static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1503 unsigned int len)
1504{
21f5a15e 1505 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
177e358c 1506 struct safexcel_crypto_priv *priv = ctx->priv;
a7dea8c0
OH
1507 int ret;
1508
21f5a15e
AB
1509 ret = verify_skcipher_des_key(ctfm, key);
1510 if (ret)
1511 return ret;
a7dea8c0
OH
1512
1513 /* if context exits and key changed, need to invalidate it */
177e358c 1514 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
a7dea8c0
OH
1515 if (memcmp(ctx->key, key, len))
1516 ctx->base.needs_inv = true;
1517
1518 memcpy(ctx->key, key, len);
1519 ctx->key_len = len;
1520
1521 return 0;
1522}
1523
93369b5d
PL
1524static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1525{
1526 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1527
1528 safexcel_skcipher_cra_init(tfm);
1529 ctx->alg = SAFEXCEL_DES;
098e51e5
PL
1530 ctx->blocksz = DES_BLOCK_SIZE;
1531 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
93369b5d
PL
1532 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1533 return 0;
1534}
1535
a7dea8c0
OH
1536struct safexcel_alg_template safexcel_alg_cbc_des = {
1537 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
062b64ca 1538 .algo_mask = SAFEXCEL_ALG_DES,
a7dea8c0
OH
1539 .alg.skcipher = {
1540 .setkey = safexcel_des_setkey,
93369b5d
PL
1541 .encrypt = safexcel_encrypt,
1542 .decrypt = safexcel_decrypt,
a7dea8c0
OH
1543 .min_keysize = DES_KEY_SIZE,
1544 .max_keysize = DES_KEY_SIZE,
1545 .ivsize = DES_BLOCK_SIZE,
1546 .base = {
1547 .cra_name = "cbc(des)",
1548 .cra_driver_name = "safexcel-cbc-des",
aa88f331 1549 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2b78aeb3 1550 .cra_flags = CRYPTO_ALG_ASYNC |
a7dea8c0
OH
1551 CRYPTO_ALG_KERN_DRIVER_ONLY,
1552 .cra_blocksize = DES_BLOCK_SIZE,
1553 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1554 .cra_alignmask = 0,
93369b5d 1555 .cra_init = safexcel_skcipher_des_cbc_cra_init,
a7dea8c0
OH
1556 .cra_exit = safexcel_skcipher_cra_exit,
1557 .cra_module = THIS_MODULE,
1558 },
1559 },
1560};
1561
93369b5d 1562static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
a7dea8c0 1563{
93369b5d 1564 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
a7dea8c0 1565
93369b5d
PL
1566 safexcel_skcipher_cra_init(tfm);
1567 ctx->alg = SAFEXCEL_DES;
1568 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
098e51e5
PL
1569 ctx->blocksz = 0;
1570 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
93369b5d 1571 return 0;
a7dea8c0
OH
1572}
1573
1574struct safexcel_alg_template safexcel_alg_ecb_des = {
1575 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
062b64ca 1576 .algo_mask = SAFEXCEL_ALG_DES,
a7dea8c0
OH
1577 .alg.skcipher = {
1578 .setkey = safexcel_des_setkey,
93369b5d
PL
1579 .encrypt = safexcel_encrypt,
1580 .decrypt = safexcel_decrypt,
a7dea8c0
OH
1581 .min_keysize = DES_KEY_SIZE,
1582 .max_keysize = DES_KEY_SIZE,
a7dea8c0
OH
1583 .base = {
1584 .cra_name = "ecb(des)",
1585 .cra_driver_name = "safexcel-ecb-des",
aa88f331 1586 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2b78aeb3 1587 .cra_flags = CRYPTO_ALG_ASYNC |
a7dea8c0
OH
1588 CRYPTO_ALG_KERN_DRIVER_ONLY,
1589 .cra_blocksize = DES_BLOCK_SIZE,
1590 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1591 .cra_alignmask = 0,
93369b5d 1592 .cra_init = safexcel_skcipher_des_ecb_cra_init,
a7dea8c0
OH
1593 .cra_exit = safexcel_skcipher_cra_exit,
1594 .cra_module = THIS_MODULE,
1595 },
1596 },
1597};
62469879 1598
62469879
OH
1599static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1600 const u8 *key, unsigned int len)
1601{
67ac62bf 1602 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
177e358c 1603 struct safexcel_crypto_priv *priv = ctx->priv;
67ac62bf 1604 int err;
62469879 1605
21f5a15e
AB
1606 err = verify_skcipher_des3_key(ctfm, key);
1607 if (err)
67ac62bf 1608 return err;
62469879
OH
1609
1610 /* if context exits and key changed, need to invalidate it */
177e358c 1611 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
62469879
OH
1612 if (memcmp(ctx->key, key, len))
1613 ctx->base.needs_inv = true;
62469879
OH
1614
1615 memcpy(ctx->key, key, len);
62469879
OH
1616 ctx->key_len = len;
1617
1618 return 0;
1619}
1620
93369b5d
PL
1621static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1622{
1623 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1624
1625 safexcel_skcipher_cra_init(tfm);
1626 ctx->alg = SAFEXCEL_3DES;
098e51e5
PL
1627 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1628 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
93369b5d
PL
1629 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1630 return 0;
1631}
1632
62469879
OH
1633struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1634 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
062b64ca 1635 .algo_mask = SAFEXCEL_ALG_DES,
62469879
OH
1636 .alg.skcipher = {
1637 .setkey = safexcel_des3_ede_setkey,
93369b5d
PL
1638 .encrypt = safexcel_encrypt,
1639 .decrypt = safexcel_decrypt,
62469879
OH
1640 .min_keysize = DES3_EDE_KEY_SIZE,
1641 .max_keysize = DES3_EDE_KEY_SIZE,
1642 .ivsize = DES3_EDE_BLOCK_SIZE,
1643 .base = {
1644 .cra_name = "cbc(des3_ede)",
1645 .cra_driver_name = "safexcel-cbc-des3_ede",
aa88f331 1646 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2b78aeb3 1647 .cra_flags = CRYPTO_ALG_ASYNC |
62469879
OH
1648 CRYPTO_ALG_KERN_DRIVER_ONLY,
1649 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1650 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1651 .cra_alignmask = 0,
93369b5d 1652 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
62469879
OH
1653 .cra_exit = safexcel_skcipher_cra_exit,
1654 .cra_module = THIS_MODULE,
1655 },
1656 },
1657};
1658
93369b5d 1659static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
62469879 1660{
93369b5d 1661 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
62469879 1662
93369b5d
PL
1663 safexcel_skcipher_cra_init(tfm);
1664 ctx->alg = SAFEXCEL_3DES;
1665 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
098e51e5
PL
1666 ctx->blocksz = 0;
1667 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
93369b5d 1668 return 0;
62469879
OH
1669}
1670
1671struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1672 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
062b64ca 1673 .algo_mask = SAFEXCEL_ALG_DES,
62469879
OH
1674 .alg.skcipher = {
1675 .setkey = safexcel_des3_ede_setkey,
93369b5d
PL
1676 .encrypt = safexcel_encrypt,
1677 .decrypt = safexcel_decrypt,
62469879
OH
1678 .min_keysize = DES3_EDE_KEY_SIZE,
1679 .max_keysize = DES3_EDE_KEY_SIZE,
62469879
OH
1680 .base = {
1681 .cra_name = "ecb(des3_ede)",
1682 .cra_driver_name = "safexcel-ecb-des3_ede",
aa88f331 1683 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2b78aeb3 1684 .cra_flags = CRYPTO_ALG_ASYNC |
62469879
OH
1685 CRYPTO_ALG_KERN_DRIVER_ONLY,
1686 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1687 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1688 .cra_alignmask = 0,
93369b5d 1689 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
62469879
OH
1690 .cra_exit = safexcel_skcipher_cra_exit,
1691 .cra_module = THIS_MODULE,
1692 },
1693 },
1694};
1695
93369b5d 1696static int safexcel_aead_encrypt(struct aead_request *req)
f6beaea3
AT
1697{
1698 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1699
93369b5d 1700 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
f6beaea3
AT
1701}
1702
93369b5d 1703static int safexcel_aead_decrypt(struct aead_request *req)
f6beaea3
AT
1704{
1705 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1706
93369b5d 1707 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
f6beaea3
AT
1708}
1709
1710static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1711{
1712 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1713 struct safexcel_alg_template *tmpl =
1714 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1715 alg.aead.base);
1716
1717 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1718 sizeof(struct safexcel_cipher_req));
1719
1720 ctx->priv = tmpl->priv;
1721
0e17e362 1722 ctx->alg = SAFEXCEL_AES; /* default */
098e51e5
PL
1723 ctx->blocksz = AES_BLOCK_SIZE;
1724 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1725 ctx->ctrinit = 1;
93369b5d 1726 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
f6beaea3
AT
1727 ctx->aead = true;
1728 ctx->base.send = safexcel_aead_send;
1729 ctx->base.handle_result = safexcel_aead_handle_result;
1730 return 0;
1731}
1732
01ba061d
AT
1733static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1734{
1735 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1736
1737 safexcel_aead_cra_init(tfm);
a7dea8c0 1738 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
01ba061d
AT
1739 ctx->state_sz = SHA1_DIGEST_SIZE;
1740 return 0;
1741}
1742
1743struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1744 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 1745 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
01ba061d 1746 .alg.aead = {
77cdd4ef 1747 .setkey = safexcel_aead_setkey,
93369b5d
PL
1748 .encrypt = safexcel_aead_encrypt,
1749 .decrypt = safexcel_aead_decrypt,
01ba061d
AT
1750 .ivsize = AES_BLOCK_SIZE,
1751 .maxauthsize = SHA1_DIGEST_SIZE,
1752 .base = {
1753 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1754 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
aa88f331 1755 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3f4a537a 1756 .cra_flags = CRYPTO_ALG_ASYNC |
01ba061d
AT
1757 CRYPTO_ALG_KERN_DRIVER_ONLY,
1758 .cra_blocksize = AES_BLOCK_SIZE,
1759 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1760 .cra_alignmask = 0,
1761 .cra_init = safexcel_aead_sha1_cra_init,
1762 .cra_exit = safexcel_aead_cra_exit,
1763 .cra_module = THIS_MODULE,
1764 },
1765 },
1766};
1767
f6beaea3
AT
1768static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1769{
1770 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1771
1772 safexcel_aead_cra_init(tfm);
a7dea8c0 1773 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
f6beaea3
AT
1774 ctx->state_sz = SHA256_DIGEST_SIZE;
1775 return 0;
1776}
1777
1778struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1779 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 1780 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
f6beaea3 1781 .alg.aead = {
77cdd4ef 1782 .setkey = safexcel_aead_setkey,
93369b5d
PL
1783 .encrypt = safexcel_aead_encrypt,
1784 .decrypt = safexcel_aead_decrypt,
f6beaea3
AT
1785 .ivsize = AES_BLOCK_SIZE,
1786 .maxauthsize = SHA256_DIGEST_SIZE,
1787 .base = {
1788 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1789 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
aa88f331 1790 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3f4a537a 1791 .cra_flags = CRYPTO_ALG_ASYNC |
f6beaea3
AT
1792 CRYPTO_ALG_KERN_DRIVER_ONLY,
1793 .cra_blocksize = AES_BLOCK_SIZE,
1794 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1795 .cra_alignmask = 0,
1796 .cra_init = safexcel_aead_sha256_cra_init,
1797 .cra_exit = safexcel_aead_cra_exit,
1798 .cra_module = THIS_MODULE,
1799 },
1800 },
1801};
678b2878
AT
1802
1803static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1804{
1805 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1806
1807 safexcel_aead_cra_init(tfm);
a7dea8c0 1808 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
678b2878
AT
1809 ctx->state_sz = SHA256_DIGEST_SIZE;
1810 return 0;
1811}
1812
1813struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1814 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 1815 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
678b2878 1816 .alg.aead = {
77cdd4ef 1817 .setkey = safexcel_aead_setkey,
93369b5d
PL
1818 .encrypt = safexcel_aead_encrypt,
1819 .decrypt = safexcel_aead_decrypt,
678b2878
AT
1820 .ivsize = AES_BLOCK_SIZE,
1821 .maxauthsize = SHA224_DIGEST_SIZE,
1822 .base = {
1823 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1824 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
aa88f331 1825 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3f4a537a 1826 .cra_flags = CRYPTO_ALG_ASYNC |
678b2878
AT
1827 CRYPTO_ALG_KERN_DRIVER_ONLY,
1828 .cra_blocksize = AES_BLOCK_SIZE,
1829 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1830 .cra_alignmask = 0,
1831 .cra_init = safexcel_aead_sha224_cra_init,
1832 .cra_exit = safexcel_aead_cra_exit,
1833 .cra_module = THIS_MODULE,
1834 },
1835 },
1836};
87eee125
AT
1837
1838static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1839{
1840 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1841
1842 safexcel_aead_cra_init(tfm);
a7dea8c0 1843 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
87eee125
AT
1844 ctx->state_sz = SHA512_DIGEST_SIZE;
1845 return 0;
1846}
1847
1848struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1849 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 1850 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
87eee125 1851 .alg.aead = {
77cdd4ef 1852 .setkey = safexcel_aead_setkey,
93369b5d
PL
1853 .encrypt = safexcel_aead_encrypt,
1854 .decrypt = safexcel_aead_decrypt,
87eee125
AT
1855 .ivsize = AES_BLOCK_SIZE,
1856 .maxauthsize = SHA512_DIGEST_SIZE,
1857 .base = {
1858 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1859 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
aa88f331 1860 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3f4a537a 1861 .cra_flags = CRYPTO_ALG_ASYNC |
87eee125
AT
1862 CRYPTO_ALG_KERN_DRIVER_ONLY,
1863 .cra_blocksize = AES_BLOCK_SIZE,
1864 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1865 .cra_alignmask = 0,
1866 .cra_init = safexcel_aead_sha512_cra_init,
1867 .cra_exit = safexcel_aead_cra_exit,
1868 .cra_module = THIS_MODULE,
1869 },
1870 },
1871};
ea23cb53
AT
1872
1873static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1874{
1875 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1876
1877 safexcel_aead_cra_init(tfm);
a7dea8c0 1878 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
ea23cb53
AT
1879 ctx->state_sz = SHA512_DIGEST_SIZE;
1880 return 0;
1881}
1882
1883struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1884 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 1885 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
ea23cb53 1886 .alg.aead = {
77cdd4ef 1887 .setkey = safexcel_aead_setkey,
93369b5d
PL
1888 .encrypt = safexcel_aead_encrypt,
1889 .decrypt = safexcel_aead_decrypt,
ea23cb53
AT
1890 .ivsize = AES_BLOCK_SIZE,
1891 .maxauthsize = SHA384_DIGEST_SIZE,
1892 .base = {
1893 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1894 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
aa88f331 1895 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3f4a537a 1896 .cra_flags = CRYPTO_ALG_ASYNC |
ea23cb53
AT
1897 CRYPTO_ALG_KERN_DRIVER_ONLY,
1898 .cra_blocksize = AES_BLOCK_SIZE,
1899 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1900 .cra_alignmask = 0,
1901 .cra_init = safexcel_aead_sha384_cra_init,
1902 .cra_exit = safexcel_aead_cra_exit,
1903 .cra_module = THIS_MODULE,
1904 },
1905 },
1906};
77cdd4ef 1907
0e17e362
PL
1908static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1909{
1910 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1911
1912 safexcel_aead_sha1_cra_init(tfm);
1913 ctx->alg = SAFEXCEL_3DES; /* override default */
098e51e5
PL
1914 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1915 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
0e17e362
PL
1916 return 0;
1917}
1918
77cdd4ef
PL
1919struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1920 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 1921 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
77cdd4ef
PL
1922 .alg.aead = {
1923 .setkey = safexcel_aead_setkey,
93369b5d
PL
1924 .encrypt = safexcel_aead_encrypt,
1925 .decrypt = safexcel_aead_decrypt,
77cdd4ef
PL
1926 .ivsize = DES3_EDE_BLOCK_SIZE,
1927 .maxauthsize = SHA1_DIGEST_SIZE,
1928 .base = {
1929 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1930 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
aa88f331 1931 .cra_priority = SAFEXCEL_CRA_PRIORITY,
77cdd4ef
PL
1932 .cra_flags = CRYPTO_ALG_ASYNC |
1933 CRYPTO_ALG_KERN_DRIVER_ONLY,
1934 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1935 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1936 .cra_alignmask = 0,
0e17e362
PL
1937 .cra_init = safexcel_aead_sha1_des3_cra_init,
1938 .cra_exit = safexcel_aead_cra_exit,
1939 .cra_module = THIS_MODULE,
1940 },
1941 },
1942};
1943
f0a8bdf0
PL
1944static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1945{
1946 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1947
1948 safexcel_aead_sha256_cra_init(tfm);
1949 ctx->alg = SAFEXCEL_3DES; /* override default */
098e51e5
PL
1950 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1951 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
f0a8bdf0
PL
1952 return 0;
1953}
1954
1955struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1956 .type = SAFEXCEL_ALG_TYPE_AEAD,
1957 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1958 .alg.aead = {
1959 .setkey = safexcel_aead_setkey,
1960 .encrypt = safexcel_aead_encrypt,
1961 .decrypt = safexcel_aead_decrypt,
1962 .ivsize = DES3_EDE_BLOCK_SIZE,
1963 .maxauthsize = SHA256_DIGEST_SIZE,
1964 .base = {
1965 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1966 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1967 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1968 .cra_flags = CRYPTO_ALG_ASYNC |
1969 CRYPTO_ALG_KERN_DRIVER_ONLY,
1970 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1971 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1972 .cra_alignmask = 0,
1973 .cra_init = safexcel_aead_sha256_des3_cra_init,
1974 .cra_exit = safexcel_aead_cra_exit,
1975 .cra_module = THIS_MODULE,
1976 },
1977 },
1978};
1979
1980static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1981{
1982 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1983
1984 safexcel_aead_sha224_cra_init(tfm);
1985 ctx->alg = SAFEXCEL_3DES; /* override default */
098e51e5
PL
1986 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1987 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
f0a8bdf0
PL
1988 return 0;
1989}
1990
1991struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1992 .type = SAFEXCEL_ALG_TYPE_AEAD,
1993 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1994 .alg.aead = {
1995 .setkey = safexcel_aead_setkey,
1996 .encrypt = safexcel_aead_encrypt,
1997 .decrypt = safexcel_aead_decrypt,
1998 .ivsize = DES3_EDE_BLOCK_SIZE,
1999 .maxauthsize = SHA224_DIGEST_SIZE,
2000 .base = {
2001 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
2002 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2003 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2004 .cra_flags = CRYPTO_ALG_ASYNC |
2005 CRYPTO_ALG_KERN_DRIVER_ONLY,
2006 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2007 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2008 .cra_alignmask = 0,
2009 .cra_init = safexcel_aead_sha224_des3_cra_init,
2010 .cra_exit = safexcel_aead_cra_exit,
2011 .cra_module = THIS_MODULE,
2012 },
2013 },
2014};
2015
2016static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2017{
2018 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2019
2020 safexcel_aead_sha512_cra_init(tfm);
2021 ctx->alg = SAFEXCEL_3DES; /* override default */
098e51e5
PL
2022 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2023 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
f0a8bdf0
PL
2024 return 0;
2025}
2026
2027struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2028 .type = SAFEXCEL_ALG_TYPE_AEAD,
2029 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2030 .alg.aead = {
2031 .setkey = safexcel_aead_setkey,
2032 .encrypt = safexcel_aead_encrypt,
2033 .decrypt = safexcel_aead_decrypt,
2034 .ivsize = DES3_EDE_BLOCK_SIZE,
2035 .maxauthsize = SHA512_DIGEST_SIZE,
2036 .base = {
2037 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2038 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2039 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2040 .cra_flags = CRYPTO_ALG_ASYNC |
2041 CRYPTO_ALG_KERN_DRIVER_ONLY,
2042 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2043 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2044 .cra_alignmask = 0,
2045 .cra_init = safexcel_aead_sha512_des3_cra_init,
2046 .cra_exit = safexcel_aead_cra_exit,
2047 .cra_module = THIS_MODULE,
2048 },
2049 },
2050};
2051
2052static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2053{
2054 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2055
2056 safexcel_aead_sha384_cra_init(tfm);
2057 ctx->alg = SAFEXCEL_3DES; /* override default */
098e51e5
PL
2058 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2059 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
f0a8bdf0
PL
2060 return 0;
2061}
2062
2063struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2064 .type = SAFEXCEL_ALG_TYPE_AEAD,
2065 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2066 .alg.aead = {
2067 .setkey = safexcel_aead_setkey,
2068 .encrypt = safexcel_aead_encrypt,
2069 .decrypt = safexcel_aead_decrypt,
2070 .ivsize = DES3_EDE_BLOCK_SIZE,
2071 .maxauthsize = SHA384_DIGEST_SIZE,
2072 .base = {
2073 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2074 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2075 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2076 .cra_flags = CRYPTO_ALG_ASYNC |
2077 CRYPTO_ALG_KERN_DRIVER_ONLY,
2078 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2079 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2080 .cra_alignmask = 0,
2081 .cra_init = safexcel_aead_sha384_des3_cra_init,
2082 .cra_exit = safexcel_aead_cra_exit,
2083 .cra_module = THIS_MODULE,
2084 },
2085 },
2086};
2087
bb7679b8
PL
2088static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2089{
2090 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2091
2092 safexcel_aead_sha1_cra_init(tfm);
2093 ctx->alg = SAFEXCEL_DES; /* override default */
098e51e5
PL
2094 ctx->blocksz = DES_BLOCK_SIZE;
2095 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
bb7679b8
PL
2096 return 0;
2097}
2098
2099struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2100 .type = SAFEXCEL_ALG_TYPE_AEAD,
2101 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2102 .alg.aead = {
2103 .setkey = safexcel_aead_setkey,
2104 .encrypt = safexcel_aead_encrypt,
2105 .decrypt = safexcel_aead_decrypt,
2106 .ivsize = DES_BLOCK_SIZE,
2107 .maxauthsize = SHA1_DIGEST_SIZE,
2108 .base = {
2109 .cra_name = "authenc(hmac(sha1),cbc(des))",
2110 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2111 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2112 .cra_flags = CRYPTO_ALG_ASYNC |
2113 CRYPTO_ALG_KERN_DRIVER_ONLY,
2114 .cra_blocksize = DES_BLOCK_SIZE,
2115 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2116 .cra_alignmask = 0,
2117 .cra_init = safexcel_aead_sha1_des_cra_init,
2118 .cra_exit = safexcel_aead_cra_exit,
2119 .cra_module = THIS_MODULE,
2120 },
2121 },
2122};
2123
457a6fdf
PL
2124static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2125{
2126 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2127
2128 safexcel_aead_sha256_cra_init(tfm);
2129 ctx->alg = SAFEXCEL_DES; /* override default */
098e51e5
PL
2130 ctx->blocksz = DES_BLOCK_SIZE;
2131 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
457a6fdf
PL
2132 return 0;
2133}
2134
2135struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2136 .type = SAFEXCEL_ALG_TYPE_AEAD,
2137 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2138 .alg.aead = {
2139 .setkey = safexcel_aead_setkey,
2140 .encrypt = safexcel_aead_encrypt,
2141 .decrypt = safexcel_aead_decrypt,
2142 .ivsize = DES_BLOCK_SIZE,
2143 .maxauthsize = SHA256_DIGEST_SIZE,
2144 .base = {
2145 .cra_name = "authenc(hmac(sha256),cbc(des))",
2146 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2147 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2148 .cra_flags = CRYPTO_ALG_ASYNC |
2149 CRYPTO_ALG_KERN_DRIVER_ONLY,
2150 .cra_blocksize = DES_BLOCK_SIZE,
2151 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2152 .cra_alignmask = 0,
2153 .cra_init = safexcel_aead_sha256_des_cra_init,
2154 .cra_exit = safexcel_aead_cra_exit,
2155 .cra_module = THIS_MODULE,
2156 },
2157 },
2158};
2159
2160static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2161{
2162 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2163
2164 safexcel_aead_sha224_cra_init(tfm);
2165 ctx->alg = SAFEXCEL_DES; /* override default */
098e51e5
PL
2166 ctx->blocksz = DES_BLOCK_SIZE;
2167 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
457a6fdf
PL
2168 return 0;
2169}
2170
2171struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2172 .type = SAFEXCEL_ALG_TYPE_AEAD,
2173 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2174 .alg.aead = {
2175 .setkey = safexcel_aead_setkey,
2176 .encrypt = safexcel_aead_encrypt,
2177 .decrypt = safexcel_aead_decrypt,
2178 .ivsize = DES_BLOCK_SIZE,
2179 .maxauthsize = SHA224_DIGEST_SIZE,
2180 .base = {
2181 .cra_name = "authenc(hmac(sha224),cbc(des))",
2182 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2183 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2184 .cra_flags = CRYPTO_ALG_ASYNC |
2185 CRYPTO_ALG_KERN_DRIVER_ONLY,
2186 .cra_blocksize = DES_BLOCK_SIZE,
2187 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2188 .cra_alignmask = 0,
2189 .cra_init = safexcel_aead_sha224_des_cra_init,
2190 .cra_exit = safexcel_aead_cra_exit,
2191 .cra_module = THIS_MODULE,
2192 },
2193 },
2194};
2195
2196static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2197{
2198 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2199
2200 safexcel_aead_sha512_cra_init(tfm);
2201 ctx->alg = SAFEXCEL_DES; /* override default */
098e51e5
PL
2202 ctx->blocksz = DES_BLOCK_SIZE;
2203 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
457a6fdf
PL
2204 return 0;
2205}
2206
2207struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2208 .type = SAFEXCEL_ALG_TYPE_AEAD,
2209 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2210 .alg.aead = {
2211 .setkey = safexcel_aead_setkey,
2212 .encrypt = safexcel_aead_encrypt,
2213 .decrypt = safexcel_aead_decrypt,
2214 .ivsize = DES_BLOCK_SIZE,
2215 .maxauthsize = SHA512_DIGEST_SIZE,
2216 .base = {
2217 .cra_name = "authenc(hmac(sha512),cbc(des))",
2218 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2219 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2220 .cra_flags = CRYPTO_ALG_ASYNC |
2221 CRYPTO_ALG_KERN_DRIVER_ONLY,
2222 .cra_blocksize = DES_BLOCK_SIZE,
2223 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2224 .cra_alignmask = 0,
2225 .cra_init = safexcel_aead_sha512_des_cra_init,
2226 .cra_exit = safexcel_aead_cra_exit,
2227 .cra_module = THIS_MODULE,
2228 },
2229 },
2230};
2231
2232static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2233{
2234 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2235
2236 safexcel_aead_sha384_cra_init(tfm);
2237 ctx->alg = SAFEXCEL_DES; /* override default */
098e51e5
PL
2238 ctx->blocksz = DES_BLOCK_SIZE;
2239 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
457a6fdf
PL
2240 return 0;
2241}
2242
2243struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2244 .type = SAFEXCEL_ALG_TYPE_AEAD,
2245 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2246 .alg.aead = {
2247 .setkey = safexcel_aead_setkey,
2248 .encrypt = safexcel_aead_encrypt,
2249 .decrypt = safexcel_aead_decrypt,
2250 .ivsize = DES_BLOCK_SIZE,
2251 .maxauthsize = SHA384_DIGEST_SIZE,
2252 .base = {
2253 .cra_name = "authenc(hmac(sha384),cbc(des))",
2254 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2255 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2256 .cra_flags = CRYPTO_ALG_ASYNC |
2257 CRYPTO_ALG_KERN_DRIVER_ONLY,
2258 .cra_blocksize = DES_BLOCK_SIZE,
2259 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2260 .cra_alignmask = 0,
2261 .cra_init = safexcel_aead_sha384_des_cra_init,
2262 .cra_exit = safexcel_aead_cra_exit,
2263 .cra_module = THIS_MODULE,
2264 },
2265 },
2266};
2267
0e17e362
PL
2268static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2269{
2270 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2271
2272 safexcel_aead_sha1_cra_init(tfm);
2273 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2274 return 0;
2275}
2276
2277struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2278 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 2279 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
0e17e362
PL
2280 .alg.aead = {
2281 .setkey = safexcel_aead_setkey,
93369b5d
PL
2282 .encrypt = safexcel_aead_encrypt,
2283 .decrypt = safexcel_aead_decrypt,
f26882a3 2284 .ivsize = CTR_RFC3686_IV_SIZE,
0e17e362
PL
2285 .maxauthsize = SHA1_DIGEST_SIZE,
2286 .base = {
2287 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2288 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
aa88f331 2289 .cra_priority = SAFEXCEL_CRA_PRIORITY,
0e17e362
PL
2290 .cra_flags = CRYPTO_ALG_ASYNC |
2291 CRYPTO_ALG_KERN_DRIVER_ONLY,
2292 .cra_blocksize = 1,
2293 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2294 .cra_alignmask = 0,
2295 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2296 .cra_exit = safexcel_aead_cra_exit,
2297 .cra_module = THIS_MODULE,
2298 },
2299 },
2300};
2301
2302static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2303{
2304 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2305
2306 safexcel_aead_sha256_cra_init(tfm);
2307 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2308 return 0;
2309}
2310
2311struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2312 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 2313 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
0e17e362
PL
2314 .alg.aead = {
2315 .setkey = safexcel_aead_setkey,
93369b5d
PL
2316 .encrypt = safexcel_aead_encrypt,
2317 .decrypt = safexcel_aead_decrypt,
f26882a3 2318 .ivsize = CTR_RFC3686_IV_SIZE,
0e17e362
PL
2319 .maxauthsize = SHA256_DIGEST_SIZE,
2320 .base = {
2321 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2322 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
aa88f331 2323 .cra_priority = SAFEXCEL_CRA_PRIORITY,
0e17e362
PL
2324 .cra_flags = CRYPTO_ALG_ASYNC |
2325 CRYPTO_ALG_KERN_DRIVER_ONLY,
2326 .cra_blocksize = 1,
2327 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2328 .cra_alignmask = 0,
2329 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2330 .cra_exit = safexcel_aead_cra_exit,
2331 .cra_module = THIS_MODULE,
2332 },
2333 },
2334};
2335
2336static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2337{
2338 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2339
2340 safexcel_aead_sha224_cra_init(tfm);
2341 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2342 return 0;
2343}
2344
2345struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2346 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 2347 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
0e17e362
PL
2348 .alg.aead = {
2349 .setkey = safexcel_aead_setkey,
93369b5d
PL
2350 .encrypt = safexcel_aead_encrypt,
2351 .decrypt = safexcel_aead_decrypt,
f26882a3 2352 .ivsize = CTR_RFC3686_IV_SIZE,
0e17e362
PL
2353 .maxauthsize = SHA224_DIGEST_SIZE,
2354 .base = {
2355 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2356 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
aa88f331 2357 .cra_priority = SAFEXCEL_CRA_PRIORITY,
0e17e362
PL
2358 .cra_flags = CRYPTO_ALG_ASYNC |
2359 CRYPTO_ALG_KERN_DRIVER_ONLY,
2360 .cra_blocksize = 1,
2361 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2362 .cra_alignmask = 0,
2363 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2364 .cra_exit = safexcel_aead_cra_exit,
2365 .cra_module = THIS_MODULE,
2366 },
2367 },
2368};
2369
2370static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2371{
2372 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2373
2374 safexcel_aead_sha512_cra_init(tfm);
2375 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2376 return 0;
2377}
2378
2379struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2380 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 2381 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
0e17e362
PL
2382 .alg.aead = {
2383 .setkey = safexcel_aead_setkey,
93369b5d
PL
2384 .encrypt = safexcel_aead_encrypt,
2385 .decrypt = safexcel_aead_decrypt,
f26882a3 2386 .ivsize = CTR_RFC3686_IV_SIZE,
0e17e362
PL
2387 .maxauthsize = SHA512_DIGEST_SIZE,
2388 .base = {
2389 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2390 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
aa88f331 2391 .cra_priority = SAFEXCEL_CRA_PRIORITY,
0e17e362
PL
2392 .cra_flags = CRYPTO_ALG_ASYNC |
2393 CRYPTO_ALG_KERN_DRIVER_ONLY,
2394 .cra_blocksize = 1,
2395 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2396 .cra_alignmask = 0,
2397 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2398 .cra_exit = safexcel_aead_cra_exit,
2399 .cra_module = THIS_MODULE,
2400 },
2401 },
2402};
2403
2404static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2405{
2406 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2407
2408 safexcel_aead_sha384_cra_init(tfm);
2409 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2410 return 0;
2411}
2412
2413struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2414 .type = SAFEXCEL_ALG_TYPE_AEAD,
062b64ca 2415 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
0e17e362
PL
2416 .alg.aead = {
2417 .setkey = safexcel_aead_setkey,
93369b5d
PL
2418 .encrypt = safexcel_aead_encrypt,
2419 .decrypt = safexcel_aead_decrypt,
f26882a3 2420 .ivsize = CTR_RFC3686_IV_SIZE,
0e17e362
PL
2421 .maxauthsize = SHA384_DIGEST_SIZE,
2422 .base = {
2423 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2424 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
aa88f331 2425 .cra_priority = SAFEXCEL_CRA_PRIORITY,
0e17e362
PL
2426 .cra_flags = CRYPTO_ALG_ASYNC |
2427 CRYPTO_ALG_KERN_DRIVER_ONLY,
2428 .cra_blocksize = 1,
2429 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2430 .cra_alignmask = 0,
2431 .cra_init = safexcel_aead_sha384_ctr_cra_init,
77cdd4ef
PL
2432 .cra_exit = safexcel_aead_cra_exit,
2433 .cra_module = THIS_MODULE,
2434 },
2435 },
2436};
c7da38a7
PL
2437
2438static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2439 const u8 *key, unsigned int len)
2440{
2441 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2442 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2443 struct safexcel_crypto_priv *priv = ctx->priv;
2444 struct crypto_aes_ctx aes;
2445 int ret, i;
2446 unsigned int keylen;
2447
2448 /* Check for illegal XTS keys */
2449 ret = xts_verify_key(ctfm, key, len);
2450 if (ret)
2451 return ret;
2452
2453 /* Only half of the key data is cipher key */
2454 keylen = (len >> 1);
2455 ret = aes_expandkey(&aes, key, keylen);
674f368a 2456 if (ret)
c7da38a7 2457 return ret;
c7da38a7
PL
2458
2459 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2460 for (i = 0; i < keylen / sizeof(u32); i++) {
13a1bb93 2461 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
c7da38a7
PL
2462 ctx->base.needs_inv = true;
2463 break;
2464 }
2465 }
2466 }
2467
2468 for (i = 0; i < keylen / sizeof(u32); i++)
2469 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2470
2471 /* The other half is the tweak key */
2472 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
674f368a 2473 if (ret)
c7da38a7 2474 return ret;
c7da38a7
PL
2475
2476 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2477 for (i = 0; i < keylen / sizeof(u32); i++) {
13a1bb93
PL
2478 if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2479 aes.key_enc[i]) {
c7da38a7
PL
2480 ctx->base.needs_inv = true;
2481 break;
2482 }
2483 }
2484 }
2485
2486 for (i = 0; i < keylen / sizeof(u32); i++)
2487 ctx->key[i + keylen / sizeof(u32)] =
2488 cpu_to_le32(aes.key_enc[i]);
2489
2490 ctx->key_len = keylen << 1;
2491
2492 memzero_explicit(&aes, sizeof(aes));
2493 return 0;
2494}
2495
2496static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2497{
2498 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2499
2500 safexcel_skcipher_cra_init(tfm);
2501 ctx->alg = SAFEXCEL_AES;
098e51e5 2502 ctx->blocksz = AES_BLOCK_SIZE;
c7da38a7
PL
2503 ctx->xts = 1;
2504 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2505 return 0;
2506}
2507
2508static int safexcel_encrypt_xts(struct skcipher_request *req)
2509{
2510 if (req->cryptlen < XTS_BLOCK_SIZE)
2511 return -EINVAL;
2512 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2513 SAFEXCEL_ENCRYPT);
2514}
2515
2516static int safexcel_decrypt_xts(struct skcipher_request *req)
2517{
2518 if (req->cryptlen < XTS_BLOCK_SIZE)
2519 return -EINVAL;
2520 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2521 SAFEXCEL_DECRYPT);
2522}
2523
2524struct safexcel_alg_template safexcel_alg_xts_aes = {
2525 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
062b64ca 2526 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
c7da38a7
PL
2527 .alg.skcipher = {
2528 .setkey = safexcel_skcipher_aesxts_setkey,
2529 .encrypt = safexcel_encrypt_xts,
2530 .decrypt = safexcel_decrypt_xts,
2531 /* XTS actually uses 2 AES keys glued together */
2532 .min_keysize = AES_MIN_KEY_SIZE * 2,
2533 .max_keysize = AES_MAX_KEY_SIZE * 2,
2534 .ivsize = XTS_BLOCK_SIZE,
2535 .base = {
2536 .cra_name = "xts(aes)",
2537 .cra_driver_name = "safexcel-xts-aes",
aa88f331 2538 .cra_priority = SAFEXCEL_CRA_PRIORITY,
c7da38a7
PL
2539 .cra_flags = CRYPTO_ALG_ASYNC |
2540 CRYPTO_ALG_KERN_DRIVER_ONLY,
2541 .cra_blocksize = XTS_BLOCK_SIZE,
2542 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2543 .cra_alignmask = 0,
2544 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2545 .cra_exit = safexcel_skcipher_cra_exit,
2546 .cra_module = THIS_MODULE,
2547 },
2548 },
2549};
3e450886
PL
2550
2551static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2552 unsigned int len)
2553{
2554 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2555 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2556 struct safexcel_crypto_priv *priv = ctx->priv;
2557 struct crypto_aes_ctx aes;
2558 u32 hashkey[AES_BLOCK_SIZE >> 2];
2559 int ret, i;
2560
2561 ret = aes_expandkey(&aes, key, len);
2562 if (ret) {
3e450886
PL
2563 memzero_explicit(&aes, sizeof(aes));
2564 return ret;
2565 }
2566
2567 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2568 for (i = 0; i < len / sizeof(u32); i++) {
13a1bb93 2569 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
3e450886
PL
2570 ctx->base.needs_inv = true;
2571 break;
2572 }
2573 }
2574 }
2575
2576 for (i = 0; i < len / sizeof(u32); i++)
2577 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2578
2579 ctx->key_len = len;
2580
2581 /* Compute hash key by encrypting zeroes with cipher key */
2582 crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2583 crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2584 CRYPTO_TFM_REQ_MASK);
2585 ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2586 crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
2587 CRYPTO_TFM_RES_MASK);
2588 if (ret)
2589 return ret;
2590
2591 memset(hashkey, 0, AES_BLOCK_SIZE);
2592 crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2593
2594 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2595 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
13a1bb93 2596 if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
3e450886
PL
2597 ctx->base.needs_inv = true;
2598 break;
2599 }
2600 }
2601 }
2602
2603 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2604 ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2605
2606 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2607 memzero_explicit(&aes, sizeof(aes));
2608 return 0;
2609}
2610
2611static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2612{
2613 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2614
2615 safexcel_aead_cra_init(tfm);
2616 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2617 ctx->state_sz = GHASH_BLOCK_SIZE;
4eb76faf 2618 ctx->xcm = EIP197_XCM_MODE_GCM;
3e450886
PL
2619 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2620
2621 ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
3f61b052 2622 return PTR_ERR_OR_ZERO(ctx->hkaes);
3e450886
PL
2623}
2624
2625static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2626{
2627 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2628
2629 crypto_free_cipher(ctx->hkaes);
2630 safexcel_aead_cra_exit(tfm);
2631}
2632
2633static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2634 unsigned int authsize)
2635{
2636 return crypto_gcm_check_authsize(authsize);
2637}
2638
2639struct safexcel_alg_template safexcel_alg_gcm = {
2640 .type = SAFEXCEL_ALG_TYPE_AEAD,
2641 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2642 .alg.aead = {
2643 .setkey = safexcel_aead_gcm_setkey,
2644 .setauthsize = safexcel_aead_gcm_setauthsize,
2645 .encrypt = safexcel_aead_encrypt,
2646 .decrypt = safexcel_aead_decrypt,
2647 .ivsize = GCM_AES_IV_SIZE,
2648 .maxauthsize = GHASH_DIGEST_SIZE,
2649 .base = {
2650 .cra_name = "gcm(aes)",
2651 .cra_driver_name = "safexcel-gcm-aes",
2652 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2653 .cra_flags = CRYPTO_ALG_ASYNC |
2654 CRYPTO_ALG_KERN_DRIVER_ONLY,
2655 .cra_blocksize = 1,
2656 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2657 .cra_alignmask = 0,
2658 .cra_init = safexcel_aead_gcm_cra_init,
2659 .cra_exit = safexcel_aead_gcm_cra_exit,
2660 .cra_module = THIS_MODULE,
2661 },
2662 },
2663};
4eb76faf
PL
2664
2665static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2666 unsigned int len)
2667{
2668 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2669 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2670 struct safexcel_crypto_priv *priv = ctx->priv;
2671 struct crypto_aes_ctx aes;
2672 int ret, i;
2673
2674 ret = aes_expandkey(&aes, key, len);
2675 if (ret) {
4eb76faf
PL
2676 memzero_explicit(&aes, sizeof(aes));
2677 return ret;
2678 }
2679
2680 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2681 for (i = 0; i < len / sizeof(u32); i++) {
13a1bb93 2682 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
4eb76faf
PL
2683 ctx->base.needs_inv = true;
2684 break;
2685 }
2686 }
2687 }
2688
2689 for (i = 0; i < len / sizeof(u32); i++) {
2690 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2691 ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2692 cpu_to_be32(aes.key_enc[i]);
2693 }
2694
2695 ctx->key_len = len;
2696 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2697
2698 if (len == AES_KEYSIZE_192)
2699 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2700 else if (len == AES_KEYSIZE_256)
2701 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2702 else
2703 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2704
2705 memzero_explicit(&aes, sizeof(aes));
2706 return 0;
2707}
2708
2709static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2710{
2711 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2712
2713 safexcel_aead_cra_init(tfm);
2714 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2715 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2716 ctx->xcm = EIP197_XCM_MODE_CCM;
2717 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
098e51e5 2718 ctx->ctrinit = 0;
4eb76faf
PL
2719 return 0;
2720}
2721
2722static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2723 unsigned int authsize)
2724{
2725 /* Borrowed from crypto/ccm.c */
2726 switch (authsize) {
2727 case 4:
2728 case 6:
2729 case 8:
2730 case 10:
2731 case 12:
2732 case 14:
2733 case 16:
2734 break;
2735 default:
2736 return -EINVAL;
2737 }
2738
2739 return 0;
2740}
2741
2742static int safexcel_ccm_encrypt(struct aead_request *req)
2743{
2744 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2745
2746 if (req->iv[0] < 1 || req->iv[0] > 7)
2747 return -EINVAL;
2748
2749 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2750}
2751
2752static int safexcel_ccm_decrypt(struct aead_request *req)
2753{
2754 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2755
2756 if (req->iv[0] < 1 || req->iv[0] > 7)
2757 return -EINVAL;
2758
2759 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2760}
2761
2762struct safexcel_alg_template safexcel_alg_ccm = {
2763 .type = SAFEXCEL_ALG_TYPE_AEAD,
2764 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2765 .alg.aead = {
2766 .setkey = safexcel_aead_ccm_setkey,
2767 .setauthsize = safexcel_aead_ccm_setauthsize,
2768 .encrypt = safexcel_ccm_encrypt,
2769 .decrypt = safexcel_ccm_decrypt,
2770 .ivsize = AES_BLOCK_SIZE,
2771 .maxauthsize = AES_BLOCK_SIZE,
2772 .base = {
2773 .cra_name = "ccm(aes)",
2774 .cra_driver_name = "safexcel-ccm-aes",
2775 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2776 .cra_flags = CRYPTO_ALG_ASYNC |
2777 CRYPTO_ALG_KERN_DRIVER_ONLY,
2778 .cra_blocksize = 1,
2779 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2780 .cra_alignmask = 0,
2781 .cra_init = safexcel_aead_ccm_cra_init,
2782 .cra_exit = safexcel_aead_cra_exit,
2783 .cra_module = THIS_MODULE,
2784 },
2785 },
2786};
4a593fb3 2787
a6061921
PL
2788static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2789 const u8 *key)
4a593fb3 2790{
4a593fb3 2791 struct safexcel_crypto_priv *priv = ctx->priv;
4a593fb3 2792
13a1bb93
PL
2793 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2794 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2795 ctx->base.needs_inv = true;
4a593fb3 2796
13a1bb93 2797 memcpy(ctx->key, key, CHACHA_KEY_SIZE);
4a593fb3 2798 ctx->key_len = CHACHA_KEY_SIZE;
a6061921
PL
2799}
2800
2801static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2802 const u8 *key, unsigned int len)
2803{
2804 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2805
674f368a 2806 if (len != CHACHA_KEY_SIZE)
a6061921 2807 return -EINVAL;
674f368a 2808
a6061921 2809 safexcel_chacha20_setkey(ctx, key);
4a593fb3
PL
2810
2811 return 0;
2812}
2813
2814static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2815{
2816 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2817
2818 safexcel_skcipher_cra_init(tfm);
2819 ctx->alg = SAFEXCEL_CHACHA20;
098e51e5 2820 ctx->ctrinit = 0;
4a593fb3
PL
2821 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2822 return 0;
2823}
2824
2825struct safexcel_alg_template safexcel_alg_chacha20 = {
2826 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2827 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2828 .alg.skcipher = {
2829 .setkey = safexcel_skcipher_chacha20_setkey,
2830 .encrypt = safexcel_encrypt,
2831 .decrypt = safexcel_decrypt,
2832 .min_keysize = CHACHA_KEY_SIZE,
2833 .max_keysize = CHACHA_KEY_SIZE,
2834 .ivsize = CHACHA_IV_SIZE,
2835 .base = {
2836 .cra_name = "chacha20",
2837 .cra_driver_name = "safexcel-chacha20",
2838 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2839 .cra_flags = CRYPTO_ALG_ASYNC |
2840 CRYPTO_ALG_KERN_DRIVER_ONLY,
2841 .cra_blocksize = 1,
2842 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2843 .cra_alignmask = 0,
2844 .cra_init = safexcel_skcipher_chacha20_cra_init,
2845 .cra_exit = safexcel_skcipher_cra_exit,
2846 .cra_module = THIS_MODULE,
2847 },
2848 },
2849};
a6061921
PL
2850
2851static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2852 const u8 *key, unsigned int len)
2853{
2854 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2855
2856 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2857 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2858 /* ESP variant has nonce appended to key */
2859 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2860 ctx->nonce = *(u32 *)(key + len);
2861 }
674f368a 2862 if (len != CHACHA_KEY_SIZE)
a6061921 2863 return -EINVAL;
674f368a 2864
a6061921
PL
2865 safexcel_chacha20_setkey(ctx, key);
2866
2867 return 0;
2868}
2869
2870static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2871 unsigned int authsize)
2872{
2873 if (authsize != POLY1305_DIGEST_SIZE)
2874 return -EINVAL;
2875 return 0;
2876}
2877
2878static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2879 enum safexcel_cipher_direction dir)
2880{
2881 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2882 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2883 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2884 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2885 struct aead_request *subreq = aead_request_ctx(req);
2886 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
13a1bb93 2887 int ret = 0;
a6061921
PL
2888
2889 /*
2890 * Instead of wasting time detecting umpteen silly corner cases,
2891 * just dump all "small" requests to the fallback implementation.
2892 * HW would not be faster on such small requests anyway.
2893 */
2894 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2895 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2896 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2897 return safexcel_queue_req(&req->base, creq, dir);
2898 }
2899
2900 /* HW cannot do full (AAD+payload) zero length, use fallback */
13a1bb93 2901 memcpy(key, ctx->key, CHACHA_KEY_SIZE);
a6061921
PL
2902 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2903 /* ESP variant has nonce appended to the key */
2904 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2905 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2906 CHACHA_KEY_SIZE +
2907 EIP197_AEAD_IPSEC_NONCE_SIZE);
2908 } else {
2909 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2910 CHACHA_KEY_SIZE);
2911 }
2912 if (ret) {
2913 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2914 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2915 CRYPTO_TFM_REQ_MASK);
2916 return ret;
2917 }
2918
2919 aead_request_set_tfm(subreq, ctx->fback);
2920 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2921 req->base.data);
2922 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2923 req->iv);
2924 aead_request_set_ad(subreq, req->assoclen);
2925
2926 return (dir == SAFEXCEL_ENCRYPT) ?
2927 crypto_aead_encrypt(subreq) :
2928 crypto_aead_decrypt(subreq);
2929}
2930
2931static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2932{
2933 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2934}
2935
2936static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2937{
2938 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2939}
2940
1769f704 2941static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
a6061921
PL
2942{
2943 struct crypto_aead *aead = __crypto_aead_cast(tfm);
2944 struct aead_alg *alg = crypto_aead_alg(aead);
2945 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2946
2947 safexcel_aead_cra_init(tfm);
a6061921
PL
2948
2949 /* Allocate fallback implementation */
2950 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2951 CRYPTO_ALG_ASYNC |
2952 CRYPTO_ALG_NEED_FALLBACK);
2953 if (IS_ERR(ctx->fback))
2954 return PTR_ERR(ctx->fback);
2955
2956 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2957 sizeof(struct aead_request) +
2958 crypto_aead_reqsize(ctx->fback)));
2959
2960 return 0;
2961}
2962
1769f704
PL
2963static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2964{
2965 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2966
2967 safexcel_aead_fallback_cra_init(tfm);
2968 ctx->alg = SAFEXCEL_CHACHA20;
2969 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2970 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
098e51e5 2971 ctx->ctrinit = 0;
1769f704
PL
2972 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2973 ctx->state_sz = 0; /* Precomputed by HW */
2974 return 0;
2975}
2976
2977static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
a6061921
PL
2978{
2979 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2980
2981 crypto_free_aead(ctx->fback);
2982 safexcel_aead_cra_exit(tfm);
2983}
2984
2985struct safexcel_alg_template safexcel_alg_chachapoly = {
2986 .type = SAFEXCEL_ALG_TYPE_AEAD,
2987 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2988 .alg.aead = {
2989 .setkey = safexcel_aead_chachapoly_setkey,
2990 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2991 .encrypt = safexcel_aead_chachapoly_encrypt,
2992 .decrypt = safexcel_aead_chachapoly_decrypt,
2993 .ivsize = CHACHAPOLY_IV_SIZE,
2994 .maxauthsize = POLY1305_DIGEST_SIZE,
2995 .base = {
2996 .cra_name = "rfc7539(chacha20,poly1305)",
2997 .cra_driver_name = "safexcel-chacha20-poly1305",
2998 /* +1 to put it above HW chacha + SW poly */
2999 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3000 .cra_flags = CRYPTO_ALG_ASYNC |
3001 CRYPTO_ALG_KERN_DRIVER_ONLY |
3002 CRYPTO_ALG_NEED_FALLBACK,
3003 .cra_blocksize = 1,
3004 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3005 .cra_alignmask = 0,
3006 .cra_init = safexcel_aead_chachapoly_cra_init,
1769f704 3007 .cra_exit = safexcel_aead_fallback_cra_exit,
a6061921
PL
3008 .cra_module = THIS_MODULE,
3009 },
3010 },
3011};
3012
3013static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3014{
3015 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3016 int ret;
3017
3018 ret = safexcel_aead_chachapoly_cra_init(tfm);
3019 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
098e51e5 3020 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
a6061921
PL
3021 return ret;
3022}
3023
3024struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3025 .type = SAFEXCEL_ALG_TYPE_AEAD,
3026 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3027 .alg.aead = {
3028 .setkey = safexcel_aead_chachapoly_setkey,
3029 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3030 .encrypt = safexcel_aead_chachapoly_encrypt,
3031 .decrypt = safexcel_aead_chachapoly_decrypt,
3032 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3033 .maxauthsize = POLY1305_DIGEST_SIZE,
3034 .base = {
3035 .cra_name = "rfc7539esp(chacha20,poly1305)",
3036 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3037 /* +1 to put it above HW chacha + SW poly */
3038 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3039 .cra_flags = CRYPTO_ALG_ASYNC |
3040 CRYPTO_ALG_KERN_DRIVER_ONLY |
3041 CRYPTO_ALG_NEED_FALLBACK,
3042 .cra_blocksize = 1,
3043 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3044 .cra_alignmask = 0,
3045 .cra_init = safexcel_aead_chachapolyesp_cra_init,
1769f704 3046 .cra_exit = safexcel_aead_fallback_cra_exit,
a6061921
PL
3047 .cra_module = THIS_MODULE,
3048 },
3049 },
3050};
fcca797d
PL
3051
3052static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3053 const u8 *key, unsigned int len)
3054{
3055 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3056 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3057 struct safexcel_crypto_priv *priv = ctx->priv;
fcca797d 3058
674f368a 3059 if (len != SM4_KEY_SIZE)
fcca797d 3060 return -EINVAL;
fcca797d 3061
13a1bb93
PL
3062 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3063 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3064 ctx->base.needs_inv = true;
fcca797d 3065
13a1bb93 3066 memcpy(ctx->key, key, SM4_KEY_SIZE);
fcca797d
PL
3067 ctx->key_len = SM4_KEY_SIZE;
3068
3069 return 0;
3070}
3071
3072static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3073{
3074 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3075 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3076 return -EINVAL;
3077 else
3078 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3079 SAFEXCEL_ENCRYPT);
3080}
3081
3082static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3083{
3084 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3085 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3086 return -EINVAL;
3087 else
3088 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3089 SAFEXCEL_DECRYPT);
3090}
3091
3092static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3093{
3094 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3095
3096 safexcel_skcipher_cra_init(tfm);
3097 ctx->alg = SAFEXCEL_SM4;
3098 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
098e51e5
PL
3099 ctx->blocksz = 0;
3100 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
fcca797d
PL
3101 return 0;
3102}
3103
3104struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3105 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3106 .algo_mask = SAFEXCEL_ALG_SM4,
3107 .alg.skcipher = {
3108 .setkey = safexcel_skcipher_sm4_setkey,
3109 .encrypt = safexcel_sm4_blk_encrypt,
3110 .decrypt = safexcel_sm4_blk_decrypt,
3111 .min_keysize = SM4_KEY_SIZE,
3112 .max_keysize = SM4_KEY_SIZE,
3113 .base = {
3114 .cra_name = "ecb(sm4)",
3115 .cra_driver_name = "safexcel-ecb-sm4",
3116 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3117 .cra_flags = CRYPTO_ALG_ASYNC |
3118 CRYPTO_ALG_KERN_DRIVER_ONLY,
3119 .cra_blocksize = SM4_BLOCK_SIZE,
3120 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3121 .cra_alignmask = 0,
3122 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3123 .cra_exit = safexcel_skcipher_cra_exit,
3124 .cra_module = THIS_MODULE,
3125 },
3126 },
3127};
6f2d1428
PL
3128
3129static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3130{
3131 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3132
3133 safexcel_skcipher_cra_init(tfm);
3134 ctx->alg = SAFEXCEL_SM4;
098e51e5 3135 ctx->blocksz = SM4_BLOCK_SIZE;
6f2d1428
PL
3136 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3137 return 0;
3138}
3139
3140struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3141 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3142 .algo_mask = SAFEXCEL_ALG_SM4,
3143 .alg.skcipher = {
3144 .setkey = safexcel_skcipher_sm4_setkey,
3145 .encrypt = safexcel_sm4_blk_encrypt,
3146 .decrypt = safexcel_sm4_blk_decrypt,
3147 .min_keysize = SM4_KEY_SIZE,
3148 .max_keysize = SM4_KEY_SIZE,
3149 .ivsize = SM4_BLOCK_SIZE,
3150 .base = {
3151 .cra_name = "cbc(sm4)",
3152 .cra_driver_name = "safexcel-cbc-sm4",
3153 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3154 .cra_flags = CRYPTO_ALG_ASYNC |
3155 CRYPTO_ALG_KERN_DRIVER_ONLY,
3156 .cra_blocksize = SM4_BLOCK_SIZE,
3157 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3158 .cra_alignmask = 0,
3159 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3160 .cra_exit = safexcel_skcipher_cra_exit,
3161 .cra_module = THIS_MODULE,
3162 },
3163 },
3164};
03a6cfb9
PL
3165
3166static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3167{
3168 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3169
3170 safexcel_skcipher_cra_init(tfm);
3171 ctx->alg = SAFEXCEL_SM4;
098e51e5 3172 ctx->blocksz = SM4_BLOCK_SIZE;
03a6cfb9
PL
3173 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3174 return 0;
3175}
3176
3177struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3178 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3179 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3180 .alg.skcipher = {
3181 .setkey = safexcel_skcipher_sm4_setkey,
3182 .encrypt = safexcel_encrypt,
3183 .decrypt = safexcel_decrypt,
3184 .min_keysize = SM4_KEY_SIZE,
3185 .max_keysize = SM4_KEY_SIZE,
3186 .ivsize = SM4_BLOCK_SIZE,
3187 .base = {
3188 .cra_name = "ofb(sm4)",
3189 .cra_driver_name = "safexcel-ofb-sm4",
3190 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3191 .cra_flags = CRYPTO_ALG_ASYNC |
3192 CRYPTO_ALG_KERN_DRIVER_ONLY,
3193 .cra_blocksize = 1,
3194 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3195 .cra_alignmask = 0,
3196 .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3197 .cra_exit = safexcel_skcipher_cra_exit,
3198 .cra_module = THIS_MODULE,
3199 },
3200 },
3201};
7468ab22
PL
3202
3203static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3204{
3205 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3206
3207 safexcel_skcipher_cra_init(tfm);
3208 ctx->alg = SAFEXCEL_SM4;
098e51e5 3209 ctx->blocksz = SM4_BLOCK_SIZE;
7468ab22
PL
3210 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3211 return 0;
3212}
3213
3214struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3215 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3216 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3217 .alg.skcipher = {
3218 .setkey = safexcel_skcipher_sm4_setkey,
3219 .encrypt = safexcel_encrypt,
3220 .decrypt = safexcel_decrypt,
3221 .min_keysize = SM4_KEY_SIZE,
3222 .max_keysize = SM4_KEY_SIZE,
3223 .ivsize = SM4_BLOCK_SIZE,
3224 .base = {
3225 .cra_name = "cfb(sm4)",
3226 .cra_driver_name = "safexcel-cfb-sm4",
3227 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3228 .cra_flags = CRYPTO_ALG_ASYNC |
3229 CRYPTO_ALG_KERN_DRIVER_ONLY,
3230 .cra_blocksize = 1,
3231 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3232 .cra_alignmask = 0,
3233 .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3234 .cra_exit = safexcel_skcipher_cra_exit,
3235 .cra_module = THIS_MODULE,
3236 },
3237 },
3238};
f77e5dc0
PL
3239
3240static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3241 const u8 *key, unsigned int len)
3242{
3243 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3244 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3245
3246 /* last 4 bytes of key are the nonce! */
3247 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3248 /* exclude the nonce here */
3249 len -= CTR_RFC3686_NONCE_SIZE;
3250
3251 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3252}
3253
3254static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3255{
3256 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3257
3258 safexcel_skcipher_cra_init(tfm);
3259 ctx->alg = SAFEXCEL_SM4;
098e51e5 3260 ctx->blocksz = SM4_BLOCK_SIZE;
f77e5dc0
PL
3261 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3262 return 0;
3263}
3264
3265struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3266 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3267 .algo_mask = SAFEXCEL_ALG_SM4,
3268 .alg.skcipher = {
3269 .setkey = safexcel_skcipher_sm4ctr_setkey,
3270 .encrypt = safexcel_encrypt,
3271 .decrypt = safexcel_decrypt,
3272 /* Add nonce size */
3273 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3274 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3275 .ivsize = CTR_RFC3686_IV_SIZE,
3276 .base = {
3277 .cra_name = "rfc3686(ctr(sm4))",
3278 .cra_driver_name = "safexcel-ctr-sm4",
3279 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3280 .cra_flags = CRYPTO_ALG_ASYNC |
3281 CRYPTO_ALG_KERN_DRIVER_ONLY,
3282 .cra_blocksize = 1,
3283 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3284 .cra_alignmask = 0,
3285 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3286 .cra_exit = safexcel_skcipher_cra_exit,
3287 .cra_module = THIS_MODULE,
3288 },
3289 },
3290};
1769f704
PL
3291
3292static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3293{
3294 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3295 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3296 return -EINVAL;
3297
3298 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3299 SAFEXCEL_ENCRYPT);
3300}
3301
3302static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3303{
3304 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3305
3306 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3307 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3308 return -EINVAL;
3309
3310 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3311 SAFEXCEL_DECRYPT);
3312}
3313
3314static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3315{
3316 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3317
3318 safexcel_aead_cra_init(tfm);
3319 ctx->alg = SAFEXCEL_SM4;
098e51e5 3320 ctx->blocksz = SM4_BLOCK_SIZE;
1769f704
PL
3321 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3322 ctx->state_sz = SHA1_DIGEST_SIZE;
3323 return 0;
3324}
3325
3326struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3327 .type = SAFEXCEL_ALG_TYPE_AEAD,
3328 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3329 .alg.aead = {
3330 .setkey = safexcel_aead_setkey,
3331 .encrypt = safexcel_aead_sm4_blk_encrypt,
3332 .decrypt = safexcel_aead_sm4_blk_decrypt,
3333 .ivsize = SM4_BLOCK_SIZE,
3334 .maxauthsize = SHA1_DIGEST_SIZE,
3335 .base = {
3336 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3337 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3338 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3339 .cra_flags = CRYPTO_ALG_ASYNC |
3340 CRYPTO_ALG_KERN_DRIVER_ONLY,
3341 .cra_blocksize = SM4_BLOCK_SIZE,
3342 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3343 .cra_alignmask = 0,
3344 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3345 .cra_exit = safexcel_aead_cra_exit,
3346 .cra_module = THIS_MODULE,
3347 },
3348 },
3349};
3350
3351static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3352 const u8 *key, unsigned int len)
3353{
3354 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3355 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3356
3357 /* Keep fallback cipher synchronized */
3358 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3359 safexcel_aead_setkey(ctfm, key, len);
3360}
3361
3362static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3363 unsigned int authsize)
3364{
3365 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3366 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3367
3368 /* Keep fallback cipher synchronized */
3369 return crypto_aead_setauthsize(ctx->fback, authsize);
3370}
3371
3372static int safexcel_aead_fallback_crypt(struct aead_request *req,
3373 enum safexcel_cipher_direction dir)
3374{
3375 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3376 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3377 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3378 struct aead_request *subreq = aead_request_ctx(req);
3379
3380 aead_request_set_tfm(subreq, ctx->fback);
3381 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3382 req->base.data);
3383 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3384 req->iv);
3385 aead_request_set_ad(subreq, req->assoclen);
3386
3387 return (dir == SAFEXCEL_ENCRYPT) ?
3388 crypto_aead_encrypt(subreq) :
3389 crypto_aead_decrypt(subreq);
3390}
3391
3392static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3393{
3394 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3395
3396 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3397 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3398 return -EINVAL;
3399 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3400 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3401
3402 /* HW cannot do full (AAD+payload) zero length, use fallback */
3403 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3404}
3405
3406static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3407{
3408 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3409 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3410
3411 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3412 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3413 return -EINVAL;
3414 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3415 /* If input length > 0 only */
3416 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3417
3418 /* HW cannot do full (AAD+payload) zero length, use fallback */
3419 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3420}
3421
3422static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3423{
3424 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3425
3426 safexcel_aead_fallback_cra_init(tfm);
3427 ctx->alg = SAFEXCEL_SM4;
098e51e5 3428 ctx->blocksz = SM4_BLOCK_SIZE;
1769f704
PL
3429 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3430 ctx->state_sz = SM3_DIGEST_SIZE;
3431 return 0;
3432}
3433
3434struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3435 .type = SAFEXCEL_ALG_TYPE_AEAD,
3436 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3437 .alg.aead = {
3438 .setkey = safexcel_aead_fallback_setkey,
3439 .setauthsize = safexcel_aead_fallback_setauthsize,
3440 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3441 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3442 .ivsize = SM4_BLOCK_SIZE,
3443 .maxauthsize = SM3_DIGEST_SIZE,
3444 .base = {
3445 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3446 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3447 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3448 .cra_flags = CRYPTO_ALG_ASYNC |
3449 CRYPTO_ALG_KERN_DRIVER_ONLY |
3450 CRYPTO_ALG_NEED_FALLBACK,
3451 .cra_blocksize = SM4_BLOCK_SIZE,
3452 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3453 .cra_alignmask = 0,
3454 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3455 .cra_exit = safexcel_aead_fallback_cra_exit,
3456 .cra_module = THIS_MODULE,
3457 },
3458 },
3459};
3460
3461static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3462{
3463 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3464
3465 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3466 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3467 return 0;
3468}
3469
3470struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3471 .type = SAFEXCEL_ALG_TYPE_AEAD,
3472 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3473 .alg.aead = {
3474 .setkey = safexcel_aead_setkey,
3475 .encrypt = safexcel_aead_encrypt,
3476 .decrypt = safexcel_aead_decrypt,
3477 .ivsize = CTR_RFC3686_IV_SIZE,
3478 .maxauthsize = SHA1_DIGEST_SIZE,
3479 .base = {
3480 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3481 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3482 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3483 .cra_flags = CRYPTO_ALG_ASYNC |
3484 CRYPTO_ALG_KERN_DRIVER_ONLY,
3485 .cra_blocksize = 1,
3486 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3487 .cra_alignmask = 0,
3488 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3489 .cra_exit = safexcel_aead_cra_exit,
3490 .cra_module = THIS_MODULE,
3491 },
3492 },
3493};
3494
3495static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3496{
3497 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3498
3499 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3500 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3501 return 0;
3502}
3503
3504struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3505 .type = SAFEXCEL_ALG_TYPE_AEAD,
3506 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3507 .alg.aead = {
3508 .setkey = safexcel_aead_setkey,
3509 .encrypt = safexcel_aead_encrypt,
3510 .decrypt = safexcel_aead_decrypt,
3511 .ivsize = CTR_RFC3686_IV_SIZE,
3512 .maxauthsize = SM3_DIGEST_SIZE,
3513 .base = {
3514 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3515 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3516 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3517 .cra_flags = CRYPTO_ALG_ASYNC |
3518 CRYPTO_ALG_KERN_DRIVER_ONLY,
3519 .cra_blocksize = 1,
3520 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3521 .cra_alignmask = 0,
3522 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3523 .cra_exit = safexcel_aead_cra_exit,
3524 .cra_module = THIS_MODULE,
3525 },
3526 },
3527};
a19052d4
PL
3528
3529static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3530 unsigned int len)
3531{
3532 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3533 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3534
3535 /* last 4 bytes of key are the nonce! */
3536 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3537
3538 len -= CTR_RFC3686_NONCE_SIZE;
3539 return safexcel_aead_gcm_setkey(ctfm, key, len);
3540}
3541
3542static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3543 unsigned int authsize)
3544{
3545 return crypto_rfc4106_check_authsize(authsize);
3546}
3547
3548static int safexcel_rfc4106_encrypt(struct aead_request *req)
3549{
3550 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3551 safexcel_aead_encrypt(req);
3552}
3553
3554static int safexcel_rfc4106_decrypt(struct aead_request *req)
3555{
3556 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3557 safexcel_aead_decrypt(req);
3558}
3559
3560static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3561{
3562 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3563 int ret;
3564
3565 ret = safexcel_aead_gcm_cra_init(tfm);
3566 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
098e51e5 3567 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
a19052d4
PL
3568 return ret;
3569}
3570
3571struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3572 .type = SAFEXCEL_ALG_TYPE_AEAD,
3573 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3574 .alg.aead = {
3575 .setkey = safexcel_rfc4106_gcm_setkey,
3576 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3577 .encrypt = safexcel_rfc4106_encrypt,
3578 .decrypt = safexcel_rfc4106_decrypt,
3579 .ivsize = GCM_RFC4106_IV_SIZE,
3580 .maxauthsize = GHASH_DIGEST_SIZE,
3581 .base = {
3582 .cra_name = "rfc4106(gcm(aes))",
3583 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3584 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3585 .cra_flags = CRYPTO_ALG_ASYNC |
3586 CRYPTO_ALG_KERN_DRIVER_ONLY,
3587 .cra_blocksize = 1,
3588 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3589 .cra_alignmask = 0,
3590 .cra_init = safexcel_rfc4106_gcm_cra_init,
3591 .cra_exit = safexcel_aead_gcm_cra_exit,
3592 },
3593 },
3594};
92c60cef
PL
3595
3596static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3597 unsigned int authsize)
3598{
3599 if (authsize != GHASH_DIGEST_SIZE)
3600 return -EINVAL;
3601
3602 return 0;
3603}
3604
3605static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3606{
3607 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3608 int ret;
3609
3610 ret = safexcel_aead_gcm_cra_init(tfm);
3611 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3612 return ret;
3613}
3614
3615struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3616 .type = SAFEXCEL_ALG_TYPE_AEAD,
3617 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3618 .alg.aead = {
3619 .setkey = safexcel_rfc4106_gcm_setkey,
3620 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3621 .encrypt = safexcel_rfc4106_encrypt,
3622 .decrypt = safexcel_rfc4106_decrypt,
3623 .ivsize = GCM_RFC4543_IV_SIZE,
3624 .maxauthsize = GHASH_DIGEST_SIZE,
3625 .base = {
3626 .cra_name = "rfc4543(gcm(aes))",
3627 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3628 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3629 .cra_flags = CRYPTO_ALG_ASYNC |
3630 CRYPTO_ALG_KERN_DRIVER_ONLY,
3631 .cra_blocksize = 1,
3632 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3633 .cra_alignmask = 0,
3634 .cra_init = safexcel_rfc4543_gcm_cra_init,
3635 .cra_exit = safexcel_aead_gcm_cra_exit,
3636 },
3637 },
3638};
a9a89624
PL
3639
3640static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3641 unsigned int len)
3642{
3643 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3644 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3645
3646 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3647 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3648 /* last 3 bytes of key are the nonce! */
3649 memcpy((u8 *)&ctx->nonce + 1, key + len -
3650 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3651 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3652
3653 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3654 return safexcel_aead_ccm_setkey(ctfm, key, len);
3655}
3656
3657static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3658 unsigned int authsize)
3659{
3660 /* Borrowed from crypto/ccm.c */
3661 switch (authsize) {
3662 case 8:
3663 case 12:
3664 case 16:
3665 break;
3666 default:
3667 return -EINVAL;
3668 }
3669
3670 return 0;
3671}
3672
3673static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3674{
3675 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3676
3677 /* Borrowed from crypto/ccm.c */
3678 if (req->assoclen != 16 && req->assoclen != 20)
3679 return -EINVAL;
3680
3681 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3682}
3683
3684static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3685{
3686 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3687
3688 /* Borrowed from crypto/ccm.c */
3689 if (req->assoclen != 16 && req->assoclen != 20)
3690 return -EINVAL;
3691
3692 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3693}
3694
3695static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3696{
3697 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3698 int ret;
3699
3700 ret = safexcel_aead_ccm_cra_init(tfm);
3701 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
098e51e5 3702 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
a9a89624
PL
3703 return ret;
3704}
3705
3706struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3707 .type = SAFEXCEL_ALG_TYPE_AEAD,
3708 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3709 .alg.aead = {
3710 .setkey = safexcel_rfc4309_ccm_setkey,
3711 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3712 .encrypt = safexcel_rfc4309_ccm_encrypt,
3713 .decrypt = safexcel_rfc4309_ccm_decrypt,
3714 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3715 .maxauthsize = AES_BLOCK_SIZE,
3716 .base = {
3717 .cra_name = "rfc4309(ccm(aes))",
3718 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3719 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3720 .cra_flags = CRYPTO_ALG_ASYNC |
3721 CRYPTO_ALG_KERN_DRIVER_ONLY,
3722 .cra_blocksize = 1,
3723 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3724 .cra_alignmask = 0,
3725 .cra_init = safexcel_rfc4309_ccm_cra_init,
3726 .cra_exit = safexcel_aead_cra_exit,
3727 .cra_module = THIS_MODULE,
3728 },
3729 },
3730};