crypto: remove CRYPTO_TFM_RES_BAD_KEY_LEN
[linux-2.6-block.git] / drivers / crypto / inside-secure / safexcel_hash.c
CommitLineData
301422e3 1// SPDX-License-Identifier: GPL-2.0
1b44c5a6
AT
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
1b44c5a6
AT
6 */
7
b98687bb 8#include <crypto/aes.h>
aed3731e 9#include <crypto/hmac.h>
293f89cf 10#include <crypto/md5.h>
1b44c5a6 11#include <crypto/sha.h>
aaf5a383 12#include <crypto/sha3.h>
38f21b4b 13#include <crypto/skcipher.h>
0f2bc131 14#include <crypto/sm3.h>
1b44c5a6
AT
15#include <linux/device.h>
16#include <linux/dma-mapping.h>
17#include <linux/dmapool.h>
18
1b44c5a6
AT
19#include "safexcel.h"
20
21struct safexcel_ahash_ctx {
22 struct safexcel_context base;
23 struct safexcel_crypto_priv *priv;
24
25 u32 alg;
b98687bb 26 u8 key_sz;
38f21b4b 27 bool cbcmac;
aaf5a383
PL
28 bool do_fallback;
29 bool fb_init_done;
6c1c09b3 30 bool fb_do_setkey;
1b44c5a6 31
13a1bb93
PL
32 __le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
33 __le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
38f21b4b
PL
34
35 struct crypto_cipher *kaes;
aaf5a383 36 struct crypto_ahash *fback;
6c1c09b3
PL
37 struct crypto_shash *shpre;
38 struct shash_desc *shdesc;
1b44c5a6
AT
39};
40
41struct safexcel_ahash_req {
42 bool last_req;
43 bool finish;
44 bool hmac;
1eb7b403 45 bool needs_inv;
85b36ee8
PL
46 bool hmac_zlen;
47 bool len_is_le;
b98687bb
PL
48 bool not_first;
49 bool xcbcmac;
1b44c5a6 50
c957f8b3 51 int nents;
b8592027 52 dma_addr_t result_dma;
c957f8b3 53
b869648c
AT
54 u32 digest;
55
41abed7d
PL
56 u8 state_sz; /* expected state size, only set once */
57 u8 block_sz; /* block size, only set once */
6c1c09b3 58 u8 digest_sz; /* output digest size, only set once */
13a1bb93
PL
59 __le32 state[SHA3_512_BLOCK_SIZE /
60 sizeof(__le32)] __aligned(sizeof(__le32));
1b44c5a6 61
31fb084c
PL
62 u64 len;
63 u64 processed;
1b44c5a6 64
41abed7d 65 u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
cff9a175
AT
66 dma_addr_t cache_dma;
67 unsigned int cache_sz;
68
41abed7d 69 u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
1b44c5a6
AT
70};
71
b460edb6
AT
72static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
73{
31fb084c 74 return req->len - req->processed;
b460edb6
AT
75}
76
1b44c5a6 77static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
b98687bb 78 u32 input_length, u32 result_length,
38f21b4b 79 bool cbcmac)
1b44c5a6
AT
80{
81 struct safexcel_token *token =
82 (struct safexcel_token *)cdesc->control_data.token;
83
84 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
85 token[0].packet_length = input_length;
1b44c5a6
AT
86 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
87
b98687bb 88 input_length &= 15;
38f21b4b 89 if (unlikely(cbcmac && input_length)) {
098e51e5 90 token[0].stat = 0;
b98687bb
PL
91 token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
92 token[1].packet_length = 16 - input_length;
93 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
94 token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
95 } else {
96 token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
098e51e5 97 eip197_noop_token(&token[1]);
b98687bb
PL
98 }
99
100 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
101 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
1b44c5a6 102 EIP197_TOKEN_STAT_LAST_PACKET;
b98687bb
PL
103 token[2].packet_length = result_length;
104 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
1b44c5a6 105 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
098e51e5
PL
106
107 eip197_noop_token(&token[3]);
1b44c5a6
AT
108}
109
110static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
111 struct safexcel_ahash_req *req,
41abed7d 112 struct safexcel_command_desc *cdesc)
1b44c5a6 113{
b460edb6 114 struct safexcel_crypto_priv *priv = ctx->priv;
41abed7d 115 u64 count = 0;
1b44c5a6 116
a7cf8658 117 cdesc->control_data.control0 = ctx->alg;
098e51e5 118 cdesc->control_data.control1 = 0;
41abed7d
PL
119
120 /*
121 * Copy the input digest if needed, and setup the context
122 * fields. Do this now as we need it to setup the first command
123 * descriptor.
124 */
a7cf8658 125 if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
b98687bb
PL
126 if (req->xcbcmac)
127 memcpy(ctx->base.ctxr->data, ctx->ipad, ctx->key_sz);
128 else
129 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
a7cf8658 130
b98687bb
PL
131 if (!req->finish && req->xcbcmac)
132 cdesc->control_data.control0 |=
133 CONTEXT_CONTROL_DIGEST_XCM |
134 CONTEXT_CONTROL_TYPE_HASH_OUT |
135 CONTEXT_CONTROL_NO_FINISH_HASH |
136 CONTEXT_CONTROL_SIZE(req->state_sz /
137 sizeof(u32));
138 else
139 cdesc->control_data.control0 |=
140 CONTEXT_CONTROL_DIGEST_XCM |
141 CONTEXT_CONTROL_TYPE_HASH_OUT |
142 CONTEXT_CONTROL_SIZE(req->state_sz /
143 sizeof(u32));
a7cf8658
PL
144 return;
145 } else if (!req->processed) {
41abed7d 146 /* First - and possibly only - block of basic hash only */
b98687bb 147 if (req->finish)
a7cf8658 148 cdesc->control_data.control0 |= req->digest |
41abed7d
PL
149 CONTEXT_CONTROL_TYPE_HASH_OUT |
150 CONTEXT_CONTROL_RESTART_HASH |
151 /* ensure its not 0! */
152 CONTEXT_CONTROL_SIZE(1);
b98687bb 153 else
a7cf8658 154 cdesc->control_data.control0 |= req->digest |
41abed7d
PL
155 CONTEXT_CONTROL_TYPE_HASH_OUT |
156 CONTEXT_CONTROL_RESTART_HASH |
157 CONTEXT_CONTROL_NO_FINISH_HASH |
158 /* ensure its not 0! */
159 CONTEXT_CONTROL_SIZE(1);
41abed7d
PL
160 return;
161 }
1b44c5a6 162
41abed7d
PL
163 /* Hash continuation or HMAC, setup (inner) digest from state */
164 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
165
166 if (req->finish) {
167 /* Compute digest count for hash/HMAC finish operations */
168 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
31fb084c
PL
169 req->hmac_zlen || (req->processed != req->block_sz)) {
170 count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
41abed7d
PL
171
172 /* This is a hardware limitation, as the
173 * counter must fit into an u32. This represents
174 * a fairly big amount of input data, so we
175 * shouldn't see this.
176 */
177 if (unlikely(count & 0xffffffff00000000ULL)) {
178 dev_warn(priv->dev,
179 "Input data is too big\n");
180 return;
b460edb6 181 }
1b44c5a6 182 }
1b44c5a6 183
41abed7d 184 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
85b36ee8
PL
185 /* Special case: zero length HMAC */
186 req->hmac_zlen ||
41abed7d 187 /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
31fb084c 188 (req->processed != req->block_sz)) {
41abed7d
PL
189 /* Basic hash continue operation, need digest + cnt */
190 cdesc->control_data.control0 |=
191 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
192 CONTEXT_CONTROL_TYPE_HASH_OUT |
193 CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
85b36ee8
PL
194 /* For zero-len HMAC, don't finalize, already padded! */
195 if (req->hmac_zlen)
196 cdesc->control_data.control0 |=
197 CONTEXT_CONTROL_NO_FINISH_HASH;
41abed7d
PL
198 cdesc->control_data.control1 |=
199 CONTEXT_CONTROL_DIGEST_CNT;
200 ctx->base.ctxr->data[req->state_sz >> 2] =
201 cpu_to_le32(count);
202 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
85b36ee8
PL
203
204 /* Clear zero-length HMAC flag for next operation! */
205 req->hmac_zlen = false;
41abed7d
PL
206 } else { /* HMAC */
207 /* Need outer digest for HMAC finalization */
208 memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
209 ctx->opad, req->state_sz);
210
211 /* Single pass HMAC - no digest count */
212 cdesc->control_data.control0 |=
213 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
214 CONTEXT_CONTROL_TYPE_HASH_OUT |
215 CONTEXT_CONTROL_DIGEST_HMAC;
216 }
217 } else { /* Hash continuation, do not finish yet */
218 cdesc->control_data.control0 |=
219 CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
220 CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
221 CONTEXT_CONTROL_TYPE_HASH_OUT |
222 CONTEXT_CONTROL_NO_FINISH_HASH;
1b44c5a6
AT
223 }
224}
225
41abed7d
PL
226static int safexcel_ahash_enqueue(struct ahash_request *areq);
227
228static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
229 int ring,
1eb7b403
OH
230 struct crypto_async_request *async,
231 bool *should_complete, int *ret)
1b44c5a6
AT
232{
233 struct safexcel_result_desc *rdesc;
234 struct ahash_request *areq = ahash_request_cast(async);
235 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
236 struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
41abed7d 237 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
b460edb6 238 u64 cache_len;
1b44c5a6
AT
239
240 *ret = 0;
241
1b44c5a6
AT
242 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
243 if (IS_ERR(rdesc)) {
244 dev_err(priv->dev,
245 "hash: result: could not retrieve the result descriptor\n");
246 *ret = PTR_ERR(rdesc);
bdfd1909
AT
247 } else {
248 *ret = safexcel_rdesc_check_errors(priv, rdesc);
1b44c5a6
AT
249 }
250
251 safexcel_complete(priv, ring);
1b44c5a6 252
c957f8b3
AT
253 if (sreq->nents) {
254 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
255 sreq->nents = 0;
256 }
1b44c5a6 257
b8592027 258 if (sreq->result_dma) {
6c1c09b3 259 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
b8592027
OH
260 DMA_FROM_DEVICE);
261 sreq->result_dma = 0;
262 }
263
cff9a175
AT
264 if (sreq->cache_dma) {
265 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
266 DMA_TO_DEVICE);
267 sreq->cache_dma = 0;
aa524286 268 sreq->cache_sz = 0;
cff9a175 269 }
1b44c5a6 270
41abed7d
PL
271 if (sreq->finish) {
272 if (sreq->hmac &&
273 (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
274 /* Faking HMAC using hash - need to do outer hash */
275 memcpy(sreq->cache, sreq->state,
276 crypto_ahash_digestsize(ahash));
277
6c1c09b3 278 memcpy(sreq->state, ctx->opad, sreq->digest_sz);
41abed7d 279
31fb084c
PL
280 sreq->len = sreq->block_sz +
281 crypto_ahash_digestsize(ahash);
282 sreq->processed = sreq->block_sz;
41abed7d
PL
283 sreq->hmac = 0;
284
177e358c
PL
285 if (priv->flags & EIP197_TRC_CACHE)
286 ctx->base.needs_inv = true;
41abed7d
PL
287 areq->nbytes = 0;
288 safexcel_ahash_enqueue(areq);
289
290 *should_complete = false; /* Not done yet */
291 return 1;
292 }
293
b98687bb
PL
294 if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
295 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
a7cf8658 296 /* Undo final XOR with 0xffffffff ...*/
13a1bb93 297 *(__le32 *)areq->result = ~sreq->state[0];
a7cf8658
PL
298 } else {
299 memcpy(areq->result, sreq->state,
300 crypto_ahash_digestsize(ahash));
301 }
41abed7d 302 }
b89a8159 303
b460edb6 304 cache_len = safexcel_queued_len(sreq);
1b44c5a6
AT
305 if (cache_len)
306 memcpy(sreq->cache, sreq->cache_next, cache_len);
307
308 *should_complete = true;
309
310 return 1;
311}
312
1eb7b403 313static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
1eb7b403 314 int *commands, int *results)
1b44c5a6
AT
315{
316 struct ahash_request *areq = ahash_request_cast(async);
1b44c5a6
AT
317 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
318 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
319 struct safexcel_crypto_priv *priv = ctx->priv;
320 struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
321 struct safexcel_result_desc *rdesc;
322 struct scatterlist *sg;
098e51e5 323 struct safexcel_token *dmmy;
6c1c09b3 324 int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
b98687bb 325 u64 queued, len;
1b44c5a6 326
b98687bb 327 queued = safexcel_queued_len(req);
41abed7d 328 if (queued <= HASH_CACHE_SIZE)
1b44c5a6
AT
329 cache_len = queued;
330 else
331 cache_len = queued - areq->nbytes;
332
41abed7d 333 if (!req->finish && !req->last_req) {
809778e0 334 /* If this is not the last request and the queued data does not
41abed7d 335 * fit into full cache blocks, cache it for the next send call.
809778e0 336 */
41abed7d 337 extra = queued & (HASH_CACHE_SIZE - 1);
082ec2d4 338
dd4306a6
AT
339 /* If this is not the last request and the queued data
340 * is a multiple of a block, cache the last one for now.
341 */
809778e0 342 if (!extra)
41abed7d 343 extra = HASH_CACHE_SIZE;
809778e0 344
709ecc10
AT
345 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
346 req->cache_next, extra,
347 areq->nbytes - extra);
348
349 queued -= extra;
dc5268b6
PL
350
351 if (!queued) {
352 *commands = 0;
353 *results = 0;
354 return 0;
355 }
b98687bb
PL
356
357 extra = 0;
358 }
359
360 if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
361 if (unlikely(cache_len < AES_BLOCK_SIZE)) {
362 /*
363 * Cache contains less than 1 full block, complete.
364 */
365 extra = AES_BLOCK_SIZE - cache_len;
366 if (queued > cache_len) {
367 /* More data follows: borrow bytes */
368 u64 tmp = queued - cache_len;
369
370 skip = min_t(u64, tmp, extra);
371 sg_pcopy_to_buffer(areq->src,
372 sg_nents(areq->src),
373 req->cache + cache_len,
374 skip, 0);
375 }
376 extra -= skip;
377 memset(req->cache + cache_len + skip, 0, extra);
38f21b4b
PL
378 if (!ctx->cbcmac && extra) {
379 // 10- padding for XCBCMAC & CMAC
380 req->cache[cache_len + skip] = 0x80;
381 // HW will use K2 iso K3 - compensate!
382 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
13a1bb93
PL
383 ((__be32 *)req->cache)[i] ^=
384 cpu_to_be32(le32_to_cpu(
385 ctx->ipad[i] ^ ctx->ipad[i + 4]));
38f21b4b 386 }
b98687bb
PL
387 cache_len = AES_BLOCK_SIZE;
388 queued = queued + extra;
389 }
390
391 /* XCBC continue: XOR previous result into 1st word */
392 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
1b44c5a6
AT
393 }
394
b98687bb 395 len = queued;
1b44c5a6
AT
396 /* Add a command descriptor for the cached data, if any */
397 if (cache_len) {
cff9a175
AT
398 req->cache_dma = dma_map_single(priv->dev, req->cache,
399 cache_len, DMA_TO_DEVICE);
9744fec9 400 if (dma_mapping_error(priv->dev, req->cache_dma))
cff9a175 401 return -EINVAL;
1b44c5a6 402
cff9a175 403 req->cache_sz = cache_len;
1b44c5a6
AT
404 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
405 (cache_len == len),
b98687bb 406 req->cache_dma, cache_len,
098e51e5
PL
407 len, ctx->base.ctxr_dma,
408 &dmmy);
1b44c5a6
AT
409 if (IS_ERR(first_cdesc)) {
410 ret = PTR_ERR(first_cdesc);
411 goto unmap_cache;
412 }
413 n_cdesc++;
414
415 queued -= cache_len;
416 if (!queued)
417 goto send_command;
418 }
419
420 /* Now handle the current ahash request buffer(s) */
41abed7d
PL
421 req->nents = dma_map_sg(priv->dev, areq->src,
422 sg_nents_for_len(areq->src,
423 areq->nbytes),
c957f8b3
AT
424 DMA_TO_DEVICE);
425 if (!req->nents) {
1b44c5a6
AT
426 ret = -ENOMEM;
427 goto cdesc_rollback;
428 }
429
c957f8b3 430 for_each_sg(areq->src, sg, req->nents, i) {
1b44c5a6
AT
431 int sglen = sg_dma_len(sg);
432
b98687bb
PL
433 if (unlikely(sglen <= skip)) {
434 skip -= sglen;
435 continue;
436 }
437
1b44c5a6 438 /* Do not overflow the request */
b98687bb 439 if ((queued + skip) <= sglen)
1b44c5a6 440 sglen = queued;
b98687bb
PL
441 else
442 sglen -= skip;
1b44c5a6
AT
443
444 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
41abed7d 445 !(queued - sglen),
b98687bb 446 sg_dma_address(sg) + skip, sglen,
098e51e5 447 len, ctx->base.ctxr_dma, &dmmy);
1b44c5a6
AT
448 if (IS_ERR(cdesc)) {
449 ret = PTR_ERR(cdesc);
57433b58 450 goto unmap_sg;
1b44c5a6 451 }
1b44c5a6 452
b98687bb 453 if (!n_cdesc)
1b44c5a6 454 first_cdesc = cdesc;
b98687bb 455 n_cdesc++;
1b44c5a6
AT
456
457 queued -= sglen;
458 if (!queued)
459 break;
b98687bb 460 skip = 0;
1b44c5a6
AT
461 }
462
463send_command:
464 /* Setup the context options */
41abed7d 465 safexcel_context_control(ctx, req, first_cdesc);
1b44c5a6 466
6c1c09b3
PL
467 /* Add the token */
468 safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
1b44c5a6 469
6c1c09b3 470 req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
b8592027
OH
471 DMA_FROM_DEVICE);
472 if (dma_mapping_error(priv->dev, req->result_dma)) {
1b44c5a6 473 ret = -EINVAL;
57433b58 474 goto unmap_sg;
1b44c5a6
AT
475 }
476
477 /* Add a result descriptor */
b8592027 478 rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
6c1c09b3 479 req->digest_sz);
1b44c5a6
AT
480 if (IS_ERR(rdesc)) {
481 ret = PTR_ERR(rdesc);
57240a78 482 goto unmap_result;
1b44c5a6
AT
483 }
484
9744fec9 485 safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
1b44c5a6 486
b98687bb 487 req->processed += len - extra;
b460edb6 488
1b44c5a6
AT
489 *commands = n_cdesc;
490 *results = 1;
491 return 0;
492
57240a78 493unmap_result:
6c1c09b3 494 dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
57433b58
AT
495 DMA_FROM_DEVICE);
496unmap_sg:
b98687bb
PL
497 if (req->nents) {
498 dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
499 req->nents = 0;
500 }
1b44c5a6
AT
501cdesc_rollback:
502 for (i = 0; i < n_cdesc; i++)
503 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
504unmap_cache:
cff9a175
AT
505 if (req->cache_dma) {
506 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
507 DMA_TO_DEVICE);
aa524286 508 req->cache_dma = 0;
cff9a175 509 req->cache_sz = 0;
1b44c5a6 510 }
1b44c5a6 511
1b44c5a6
AT
512 return ret;
513}
514
1b44c5a6
AT
515static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
516 int ring,
517 struct crypto_async_request *async,
518 bool *should_complete, int *ret)
519{
520 struct safexcel_result_desc *rdesc;
521 struct ahash_request *areq = ahash_request_cast(async);
522 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
523 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
524 int enq_ret;
525
526 *ret = 0;
527
1b44c5a6
AT
528 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
529 if (IS_ERR(rdesc)) {
530 dev_err(priv->dev,
531 "hash: invalidate: could not retrieve the result descriptor\n");
532 *ret = PTR_ERR(rdesc);
cda3e73a
AT
533 } else {
534 *ret = safexcel_rdesc_check_errors(priv, rdesc);
1b44c5a6
AT
535 }
536
537 safexcel_complete(priv, ring);
1b44c5a6
AT
538
539 if (ctx->base.exit_inv) {
540 dma_pool_free(priv->context_pool, ctx->base.ctxr,
541 ctx->base.ctxr_dma);
542
543 *should_complete = true;
544 return 1;
545 }
546
86671abb
AT
547 ring = safexcel_select_ring(priv);
548 ctx->base.ring = ring;
1b44c5a6 549
86671abb
AT
550 spin_lock_bh(&priv->ring[ring].queue_lock);
551 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
552 spin_unlock_bh(&priv->ring[ring].queue_lock);
1b44c5a6
AT
553
554 if (enq_ret != -EINPROGRESS)
555 *ret = enq_ret;
556
8472e778
AT
557 queue_work(priv->ring[ring].workqueue,
558 &priv->ring[ring].work_data.work);
86671abb 559
1b44c5a6
AT
560 *should_complete = false;
561
562 return 1;
563}
564
1eb7b403
OH
565static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
566 struct crypto_async_request *async,
567 bool *should_complete, int *ret)
568{
569 struct ahash_request *areq = ahash_request_cast(async);
570 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
571 int err;
572
53c83e91 573 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
871df319 574
1eb7b403
OH
575 if (req->needs_inv) {
576 req->needs_inv = false;
577 err = safexcel_handle_inv_result(priv, ring, async,
578 should_complete, ret);
579 } else {
580 err = safexcel_handle_req_result(priv, ring, async,
581 should_complete, ret);
582 }
583
584 return err;
585}
586
1b44c5a6 587static int safexcel_ahash_send_inv(struct crypto_async_request *async,
9744fec9 588 int ring, int *commands, int *results)
1b44c5a6
AT
589{
590 struct ahash_request *areq = ahash_request_cast(async);
591 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
592 int ret;
593
5290ad6e 594 ret = safexcel_invalidate_cache(async, ctx->priv,
9744fec9 595 ctx->base.ctxr_dma, ring);
1b44c5a6
AT
596 if (unlikely(ret))
597 return ret;
598
599 *commands = 1;
600 *results = 1;
601
602 return 0;
603}
604
1eb7b403 605static int safexcel_ahash_send(struct crypto_async_request *async,
9744fec9 606 int ring, int *commands, int *results)
1eb7b403
OH
607{
608 struct ahash_request *areq = ahash_request_cast(async);
609 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
610 int ret;
611
612 if (req->needs_inv)
9744fec9 613 ret = safexcel_ahash_send_inv(async, ring, commands, results);
1eb7b403 614 else
9744fec9
OH
615 ret = safexcel_ahash_send_req(async, ring, commands, results);
616
1eb7b403
OH
617 return ret;
618}
619
1b44c5a6
AT
620static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
621{
622 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
623 struct safexcel_crypto_priv *priv = ctx->priv;
61824806 624 EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
7cad2fab 625 struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
3e1166b9 626 struct safexcel_inv_result result = {};
86671abb 627 int ring = ctx->base.ring;
1b44c5a6 628
b926213d 629 memset(req, 0, EIP197_AHASH_REQ_SIZE);
1b44c5a6
AT
630
631 /* create invalidation request */
632 init_completion(&result.completion);
7cad2fab 633 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1b44c5a6
AT
634 safexcel_inv_complete, &result);
635
7cad2fab
AT
636 ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
637 ctx = crypto_tfm_ctx(req->base.tfm);
1b44c5a6 638 ctx->base.exit_inv = true;
1eb7b403 639 rctx->needs_inv = true;
1b44c5a6 640
86671abb 641 spin_lock_bh(&priv->ring[ring].queue_lock);
7cad2fab 642 crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
86671abb 643 spin_unlock_bh(&priv->ring[ring].queue_lock);
1b44c5a6 644
8472e778
AT
645 queue_work(priv->ring[ring].workqueue,
646 &priv->ring[ring].work_data.work);
1b44c5a6 647
b7007dbc 648 wait_for_completion(&result.completion);
1b44c5a6
AT
649
650 if (result.error) {
651 dev_warn(priv->dev, "hash: completion error (%d)\n",
652 result.error);
653 return result.error;
654 }
655
656 return 0;
657}
658
cc75f5ce
AT
659/* safexcel_ahash_cache: cache data until at least one request can be sent to
660 * the engine, aka. when there is at least 1 block size in the pipe.
661 */
41abed7d 662static int safexcel_ahash_cache(struct ahash_request *areq)
1b44c5a6
AT
663{
664 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
41abed7d 665 u64 cache_len;
1b44c5a6 666
b460edb6
AT
667 /* cache_len: everything accepted by the driver but not sent yet,
668 * tot sz handled by update() - last req sz - tot sz handled by send()
669 */
41abed7d 670 cache_len = safexcel_queued_len(req);
1b44c5a6
AT
671
672 /*
673 * In case there isn't enough bytes to proceed (less than a
674 * block size), cache the data until we have enough.
675 */
41abed7d 676 if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
1b44c5a6
AT
677 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
678 req->cache + cache_len,
679 areq->nbytes, 0);
41abed7d 680 return 0;
1b44c5a6
AT
681 }
682
dfbcc08f 683 /* We couldn't cache all the data */
1b44c5a6
AT
684 return -E2BIG;
685}
686
687static int safexcel_ahash_enqueue(struct ahash_request *areq)
688{
689 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
690 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
691 struct safexcel_crypto_priv *priv = ctx->priv;
86671abb 692 int ret, ring;
1b44c5a6 693
1eb7b403 694 req->needs_inv = false;
1b44c5a6 695
1b44c5a6 696 if (ctx->base.ctxr) {
53c83e91 697 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
b98687bb
PL
698 /* invalidate for *any* non-XCBC continuation */
699 ((req->not_first && !req->xcbcmac) ||
41abed7d
PL
700 /* invalidate if (i)digest changed */
701 memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
41abed7d 702 /* invalidate for HMAC finish with odigest changed */
a7cf8658 703 (req->finish && req->hmac &&
41abed7d
PL
704 memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
705 ctx->opad, req->state_sz))))
706 /*
707 * We're still setting needs_inv here, even though it is
c4daf4cc
OH
708 * cleared right away, because the needs_inv flag can be
709 * set in other functions and we want to keep the same
710 * logic.
711 */
41abed7d 712 ctx->base.needs_inv = true;
c4daf4cc 713
1eb7b403
OH
714 if (ctx->base.needs_inv) {
715 ctx->base.needs_inv = false;
716 req->needs_inv = true;
717 }
1b44c5a6
AT
718 } else {
719 ctx->base.ring = safexcel_select_ring(priv);
720 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
721 EIP197_GFP_FLAGS(areq->base),
722 &ctx->base.ctxr_dma);
723 if (!ctx->base.ctxr)
724 return -ENOMEM;
725 }
b98687bb 726 req->not_first = true;
1b44c5a6 727
86671abb
AT
728 ring = ctx->base.ring;
729
730 spin_lock_bh(&priv->ring[ring].queue_lock);
731 ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
732 spin_unlock_bh(&priv->ring[ring].queue_lock);
1b44c5a6 733
8472e778
AT
734 queue_work(priv->ring[ring].workqueue,
735 &priv->ring[ring].work_data.work);
1b44c5a6
AT
736
737 return ret;
738}
739
740static int safexcel_ahash_update(struct ahash_request *areq)
741{
1b44c5a6 742 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
41abed7d 743 int ret;
1b44c5a6
AT
744
745 /* If the request is 0 length, do nothing */
746 if (!areq->nbytes)
747 return 0;
748
41abed7d
PL
749 /* Add request to the cache if it fits */
750 ret = safexcel_ahash_cache(areq);
751
752 /* Update total request length */
31fb084c 753 req->len += areq->nbytes;
1b44c5a6 754
41abed7d
PL
755 /* If not all data could fit into the cache, go process the excess.
756 * Also go process immediately for an HMAC IV precompute, which
757 * will never be finished at all, but needs to be processed anyway.
1b44c5a6 758 */
41abed7d 759 if ((ret && !req->finish) || req->last_req)
1b44c5a6
AT
760 return safexcel_ahash_enqueue(areq);
761
762 return 0;
763}
764
765static int safexcel_ahash_final(struct ahash_request *areq)
766{
767 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
768 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
769
1b44c5a6
AT
770 req->finish = true;
771
31fb084c 772 if (unlikely(!req->len && !areq->nbytes)) {
85695b09
PL
773 /*
774 * If we have an overall 0 length *hash* request:
775 * The HW cannot do 0 length hash, so we provide the correct
776 * result directly here.
777 */
293f89cf
OH
778 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
779 memcpy(areq->result, md5_zero_message_hash,
780 MD5_DIGEST_SIZE);
781 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
1b44c5a6
AT
782 memcpy(areq->result, sha1_zero_message_hash,
783 SHA1_DIGEST_SIZE);
784 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
785 memcpy(areq->result, sha224_zero_message_hash,
786 SHA224_DIGEST_SIZE);
787 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
788 memcpy(areq->result, sha256_zero_message_hash,
789 SHA256_DIGEST_SIZE);
9e46eafd
AT
790 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
791 memcpy(areq->result, sha384_zero_message_hash,
792 SHA384_DIGEST_SIZE);
b460edb6
AT
793 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
794 memcpy(areq->result, sha512_zero_message_hash,
795 SHA512_DIGEST_SIZE);
0f2bc131 796 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
756497cb
PL
797 memcpy(areq->result,
798 EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
0f2bc131 799 }
1b44c5a6 800
a7cf8658
PL
801 return 0;
802 } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
803 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
804 req->len == sizeof(u32) && !areq->nbytes)) {
805 /* Zero length CRC32 */
806 memcpy(areq->result, ctx->ipad, sizeof(u32));
1b44c5a6 807 return 0;
38f21b4b 808 } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
b98687bb
PL
809 !areq->nbytes)) {
810 /* Zero length CBC MAC */
811 memset(areq->result, 0, AES_BLOCK_SIZE);
812 return 0;
38f21b4b
PL
813 } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
814 !areq->nbytes)) {
815 /* Zero length (X)CBC/CMAC */
816 int i;
817
818 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
13a1bb93
PL
819 ((__be32 *)areq->result)[i] =
820 cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3
38f21b4b
PL
821 areq->result[0] ^= 0x80; // 10- padding
822 crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
823 return 0;
31fb084c
PL
824 } else if (unlikely(req->hmac &&
825 (req->len == req->block_sz) &&
41abed7d 826 !areq->nbytes)) {
85b36ee8
PL
827 /*
828 * If we have an overall 0 length *HMAC* request:
829 * For HMAC, we need to finalize the inner digest
830 * and then perform the outer hash.
831 */
832
833 /* generate pad block in the cache */
834 /* start with a hash block of all zeroes */
835 memset(req->cache, 0, req->block_sz);
836 /* set the first byte to 0x80 to 'append a 1 bit' */
837 req->cache[0] = 0x80;
838 /* add the length in bits in the last 2 bytes */
839 if (req->len_is_le) {
840 /* Little endian length word (e.g. MD5) */
841 req->cache[req->block_sz-8] = (req->block_sz << 3) &
842 255;
843 req->cache[req->block_sz-7] = (req->block_sz >> 5);
844 } else {
845 /* Big endian length word (e.g. any SHA) */
846 req->cache[req->block_sz-2] = (req->block_sz >> 5);
847 req->cache[req->block_sz-1] = (req->block_sz << 3) &
848 255;
849 }
850
31fb084c 851 req->len += req->block_sz; /* plus 1 hash block */
85b36ee8
PL
852
853 /* Set special zero-length HMAC flag */
854 req->hmac_zlen = true;
855
856 /* Finalize HMAC */
857 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
41abed7d
PL
858 } else if (req->hmac) {
859 /* Finalize HMAC */
860 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
1b44c5a6
AT
861 }
862
863 return safexcel_ahash_enqueue(areq);
864}
865
866static int safexcel_ahash_finup(struct ahash_request *areq)
867{
868 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
869
1b44c5a6
AT
870 req->finish = true;
871
872 safexcel_ahash_update(areq);
873 return safexcel_ahash_final(areq);
874}
875
876static int safexcel_ahash_export(struct ahash_request *areq, void *out)
877{
1b44c5a6
AT
878 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
879 struct safexcel_ahash_export_state *export = out;
880
31fb084c
PL
881 export->len = req->len;
882 export->processed = req->processed;
1b44c5a6 883
b869648c
AT
884 export->digest = req->digest;
885
1b44c5a6 886 memcpy(export->state, req->state, req->state_sz);
41abed7d 887 memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
1b44c5a6
AT
888
889 return 0;
890}
891
892static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
893{
1b44c5a6
AT
894 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
895 const struct safexcel_ahash_export_state *export = in;
896 int ret;
897
898 ret = crypto_ahash_init(areq);
899 if (ret)
900 return ret;
901
31fb084c
PL
902 req->len = export->len;
903 req->processed = export->processed;
1b44c5a6 904
b869648c
AT
905 req->digest = export->digest;
906
41abed7d 907 memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
1b44c5a6
AT
908 memcpy(req->state, export->state, req->state_sz);
909
910 return 0;
911}
912
913static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
914{
915 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
916 struct safexcel_alg_template *tmpl =
917 container_of(__crypto_ahash_alg(tfm->__crt_alg),
918 struct safexcel_alg_template, alg.ahash);
919
920 ctx->priv = tmpl->priv;
1eb7b403
OH
921 ctx->base.send = safexcel_ahash_send;
922 ctx->base.handle_result = safexcel_handle_result;
6c1c09b3 923 ctx->fb_do_setkey = false;
1b44c5a6
AT
924
925 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
926 sizeof(struct safexcel_ahash_req));
927 return 0;
928}
929
930static int safexcel_sha1_init(struct ahash_request *areq)
931{
932 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
933 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
934
935 memset(req, 0, sizeof(*req));
936
1b44c5a6 937 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
b869648c 938 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1b44c5a6 939 req->state_sz = SHA1_DIGEST_SIZE;
6c1c09b3 940 req->digest_sz = SHA1_DIGEST_SIZE;
41abed7d 941 req->block_sz = SHA1_BLOCK_SIZE;
1b44c5a6
AT
942
943 return 0;
944}
945
946static int safexcel_sha1_digest(struct ahash_request *areq)
947{
948 int ret = safexcel_sha1_init(areq);
949
950 if (ret)
951 return ret;
952
953 return safexcel_ahash_finup(areq);
954}
955
956static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
957{
958 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
959 struct safexcel_crypto_priv *priv = ctx->priv;
960 int ret;
961
962 /* context not allocated, skip invalidation */
963 if (!ctx->base.ctxr)
964 return;
965
53c83e91 966 if (priv->flags & EIP197_TRC_CACHE) {
871df319
AT
967 ret = safexcel_ahash_exit_inv(tfm);
968 if (ret)
969 dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
970 } else {
971 dma_pool_free(priv->context_pool, ctx->base.ctxr,
972 ctx->base.ctxr_dma);
973 }
1b44c5a6
AT
974}
975
976struct safexcel_alg_template safexcel_alg_sha1 = {
977 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 978 .algo_mask = SAFEXCEL_ALG_SHA1,
1b44c5a6
AT
979 .alg.ahash = {
980 .init = safexcel_sha1_init,
981 .update = safexcel_ahash_update,
982 .final = safexcel_ahash_final,
983 .finup = safexcel_ahash_finup,
984 .digest = safexcel_sha1_digest,
985 .export = safexcel_ahash_export,
986 .import = safexcel_ahash_import,
987 .halg = {
988 .digestsize = SHA1_DIGEST_SIZE,
989 .statesize = sizeof(struct safexcel_ahash_export_state),
990 .base = {
991 .cra_name = "sha1",
992 .cra_driver_name = "safexcel-sha1",
aa88f331 993 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1b44c5a6
AT
994 .cra_flags = CRYPTO_ALG_ASYNC |
995 CRYPTO_ALG_KERN_DRIVER_ONLY,
996 .cra_blocksize = SHA1_BLOCK_SIZE,
997 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
998 .cra_init = safexcel_ahash_cra_init,
999 .cra_exit = safexcel_ahash_cra_exit,
1000 .cra_module = THIS_MODULE,
1001 },
1002 },
1003 },
1004};
1005
1006static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1007{
41abed7d 1008 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
b869648c 1009 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1b44c5a6 1010
41abed7d
PL
1011 memset(req, 0, sizeof(*req));
1012
1013 /* Start from ipad precompute */
1014 memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE);
1015 /* Already processed the key^ipad part now! */
31fb084c
PL
1016 req->len = SHA1_BLOCK_SIZE;
1017 req->processed = SHA1_BLOCK_SIZE;
41abed7d
PL
1018
1019 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1020 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1021 req->state_sz = SHA1_DIGEST_SIZE;
6c1c09b3 1022 req->digest_sz = SHA1_DIGEST_SIZE;
41abed7d
PL
1023 req->block_sz = SHA1_BLOCK_SIZE;
1024 req->hmac = true;
1025
1b44c5a6
AT
1026 return 0;
1027}
1028
1029static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1030{
1031 int ret = safexcel_hmac_sha1_init(areq);
1032
1033 if (ret)
1034 return ret;
1035
1036 return safexcel_ahash_finup(areq);
1037}
1038
1039struct safexcel_ahash_result {
1040 struct completion completion;
1041 int error;
1042};
1043
1044static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1045{
1046 struct safexcel_ahash_result *result = req->data;
1047
1048 if (error == -EINPROGRESS)
1049 return;
1050
1051 result->error = error;
1052 complete(&result->completion);
1053}
1054
1055static int safexcel_hmac_init_pad(struct ahash_request *areq,
1056 unsigned int blocksize, const u8 *key,
1057 unsigned int keylen, u8 *ipad, u8 *opad)
1058{
1059 struct safexcel_ahash_result result;
1060 struct scatterlist sg;
1061 int ret, i;
1062 u8 *keydup;
1063
1064 if (keylen <= blocksize) {
1065 memcpy(ipad, key, keylen);
1066 } else {
1067 keydup = kmemdup(key, keylen, GFP_KERNEL);
1068 if (!keydup)
1069 return -ENOMEM;
1070
1071 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1072 safexcel_ahash_complete, &result);
1073 sg_init_one(&sg, keydup, keylen);
1074 ahash_request_set_crypt(areq, &sg, ipad, keylen);
1075 init_completion(&result.completion);
1076
1077 ret = crypto_ahash_digest(areq);
4dc5475a 1078 if (ret == -EINPROGRESS || ret == -EBUSY) {
1b44c5a6
AT
1079 wait_for_completion_interruptible(&result.completion);
1080 ret = result.error;
1081 }
1082
1083 /* Avoid leaking */
1084 memzero_explicit(keydup, keylen);
1085 kfree(keydup);
1086
1087 if (ret)
1088 return ret;
1089
1090 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1091 }
1092
1093 memset(ipad + keylen, 0, blocksize - keylen);
1094 memcpy(opad, ipad, blocksize);
1095
1096 for (i = 0; i < blocksize; i++) {
aed3731e
AT
1097 ipad[i] ^= HMAC_IPAD_VALUE;
1098 opad[i] ^= HMAC_OPAD_VALUE;
1b44c5a6
AT
1099 }
1100
1101 return 0;
1102}
1103
1104static int safexcel_hmac_init_iv(struct ahash_request *areq,
1105 unsigned int blocksize, u8 *pad, void *state)
1106{
1107 struct safexcel_ahash_result result;
1108 struct safexcel_ahash_req *req;
1109 struct scatterlist sg;
1110 int ret;
1111
1112 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1113 safexcel_ahash_complete, &result);
1114 sg_init_one(&sg, pad, blocksize);
1115 ahash_request_set_crypt(areq, &sg, pad, blocksize);
1116 init_completion(&result.completion);
1117
1118 ret = crypto_ahash_init(areq);
1119 if (ret)
1120 return ret;
1121
1122 req = ahash_request_ctx(areq);
1123 req->hmac = true;
1124 req->last_req = true;
1125
1126 ret = crypto_ahash_update(areq);
12bf4142 1127 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1b44c5a6
AT
1128 return ret;
1129
1130 wait_for_completion_interruptible(&result.completion);
1131 if (result.error)
1132 return result.error;
1133
1134 return crypto_ahash_export(areq, state);
1135}
1136
f6beaea3
AT
1137int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
1138 void *istate, void *ostate)
1b44c5a6
AT
1139{
1140 struct ahash_request *areq;
1141 struct crypto_ahash *tfm;
1142 unsigned int blocksize;
1143 u8 *ipad, *opad;
1144 int ret;
1145
85d7311f 1146 tfm = crypto_alloc_ahash(alg, 0, 0);
1b44c5a6
AT
1147 if (IS_ERR(tfm))
1148 return PTR_ERR(tfm);
1149
1150 areq = ahash_request_alloc(tfm, GFP_KERNEL);
1151 if (!areq) {
1152 ret = -ENOMEM;
1153 goto free_ahash;
1154 }
1155
1156 crypto_ahash_clear_flags(tfm, ~0);
1157 blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1158
6396bb22 1159 ipad = kcalloc(2, blocksize, GFP_KERNEL);
1b44c5a6
AT
1160 if (!ipad) {
1161 ret = -ENOMEM;
1162 goto free_request;
1163 }
1164
1165 opad = ipad + blocksize;
1166
1167 ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1168 if (ret)
1169 goto free_ipad;
1170
1171 ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1172 if (ret)
1173 goto free_ipad;
1174
1175 ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1176
1177free_ipad:
1178 kfree(ipad);
1179free_request:
1180 ahash_request_free(areq);
1181free_ahash:
1182 crypto_free_ahash(tfm);
1183
1184 return ret;
1185}
1186
73f36ea7
AT
1187static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1188 unsigned int keylen, const char *alg,
1189 unsigned int state_sz)
1b44c5a6
AT
1190{
1191 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
871df319 1192 struct safexcel_crypto_priv *priv = ctx->priv;
1b44c5a6 1193 struct safexcel_ahash_export_state istate, ostate;
41abed7d 1194 int ret;
1b44c5a6 1195
73f36ea7 1196 ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1b44c5a6
AT
1197 if (ret)
1198 return ret;
1199
41abed7d
PL
1200 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr &&
1201 (memcmp(ctx->ipad, istate.state, state_sz) ||
1202 memcmp(ctx->opad, ostate.state, state_sz)))
1203 ctx->base.needs_inv = true;
1b44c5a6 1204
73f36ea7
AT
1205 memcpy(ctx->ipad, &istate.state, state_sz);
1206 memcpy(ctx->opad, &ostate.state, state_sz);
42ef3bed 1207
1b44c5a6
AT
1208 return 0;
1209}
1210
73f36ea7
AT
1211static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1212 unsigned int keylen)
1213{
1214 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1215 SHA1_DIGEST_SIZE);
1216}
1217
1b44c5a6
AT
1218struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1219 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1220 .algo_mask = SAFEXCEL_ALG_SHA1,
1b44c5a6
AT
1221 .alg.ahash = {
1222 .init = safexcel_hmac_sha1_init,
1223 .update = safexcel_ahash_update,
1224 .final = safexcel_ahash_final,
1225 .finup = safexcel_ahash_finup,
1226 .digest = safexcel_hmac_sha1_digest,
1227 .setkey = safexcel_hmac_sha1_setkey,
1228 .export = safexcel_ahash_export,
1229 .import = safexcel_ahash_import,
1230 .halg = {
1231 .digestsize = SHA1_DIGEST_SIZE,
1232 .statesize = sizeof(struct safexcel_ahash_export_state),
1233 .base = {
1234 .cra_name = "hmac(sha1)",
1235 .cra_driver_name = "safexcel-hmac-sha1",
aa88f331 1236 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1b44c5a6
AT
1237 .cra_flags = CRYPTO_ALG_ASYNC |
1238 CRYPTO_ALG_KERN_DRIVER_ONLY,
1239 .cra_blocksize = SHA1_BLOCK_SIZE,
1240 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1241 .cra_init = safexcel_ahash_cra_init,
1242 .cra_exit = safexcel_ahash_cra_exit,
1243 .cra_module = THIS_MODULE,
1244 },
1245 },
1246 },
1247};
1248
1249static int safexcel_sha256_init(struct ahash_request *areq)
1250{
1251 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1252 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1253
1254 memset(req, 0, sizeof(*req));
1255
1b44c5a6 1256 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
b869648c 1257 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1b44c5a6 1258 req->state_sz = SHA256_DIGEST_SIZE;
6c1c09b3 1259 req->digest_sz = SHA256_DIGEST_SIZE;
41abed7d 1260 req->block_sz = SHA256_BLOCK_SIZE;
1b44c5a6
AT
1261
1262 return 0;
1263}
1264
1265static int safexcel_sha256_digest(struct ahash_request *areq)
1266{
1267 int ret = safexcel_sha256_init(areq);
1268
1269 if (ret)
1270 return ret;
1271
1272 return safexcel_ahash_finup(areq);
1273}
1274
1275struct safexcel_alg_template safexcel_alg_sha256 = {
1276 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1277 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1b44c5a6
AT
1278 .alg.ahash = {
1279 .init = safexcel_sha256_init,
1280 .update = safexcel_ahash_update,
1281 .final = safexcel_ahash_final,
1282 .finup = safexcel_ahash_finup,
1283 .digest = safexcel_sha256_digest,
1284 .export = safexcel_ahash_export,
1285 .import = safexcel_ahash_import,
1286 .halg = {
1287 .digestsize = SHA256_DIGEST_SIZE,
1288 .statesize = sizeof(struct safexcel_ahash_export_state),
1289 .base = {
1290 .cra_name = "sha256",
1291 .cra_driver_name = "safexcel-sha256",
aa88f331 1292 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1b44c5a6
AT
1293 .cra_flags = CRYPTO_ALG_ASYNC |
1294 CRYPTO_ALG_KERN_DRIVER_ONLY,
1295 .cra_blocksize = SHA256_BLOCK_SIZE,
1296 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1297 .cra_init = safexcel_ahash_cra_init,
1298 .cra_exit = safexcel_ahash_cra_exit,
1299 .cra_module = THIS_MODULE,
1300 },
1301 },
1302 },
1303};
1304
1305static int safexcel_sha224_init(struct ahash_request *areq)
1306{
1307 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1308 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1309
1310 memset(req, 0, sizeof(*req));
1311
1b44c5a6 1312 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
b869648c 1313 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1b44c5a6 1314 req->state_sz = SHA256_DIGEST_SIZE;
6c1c09b3 1315 req->digest_sz = SHA256_DIGEST_SIZE;
41abed7d 1316 req->block_sz = SHA256_BLOCK_SIZE;
1b44c5a6
AT
1317
1318 return 0;
1319}
1320
1321static int safexcel_sha224_digest(struct ahash_request *areq)
1322{
1323 int ret = safexcel_sha224_init(areq);
1324
1325 if (ret)
1326 return ret;
1327
1328 return safexcel_ahash_finup(areq);
1329}
1330
1331struct safexcel_alg_template safexcel_alg_sha224 = {
1332 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1333 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1b44c5a6
AT
1334 .alg.ahash = {
1335 .init = safexcel_sha224_init,
1336 .update = safexcel_ahash_update,
1337 .final = safexcel_ahash_final,
1338 .finup = safexcel_ahash_finup,
1339 .digest = safexcel_sha224_digest,
1340 .export = safexcel_ahash_export,
1341 .import = safexcel_ahash_import,
1342 .halg = {
1343 .digestsize = SHA224_DIGEST_SIZE,
1344 .statesize = sizeof(struct safexcel_ahash_export_state),
1345 .base = {
1346 .cra_name = "sha224",
1347 .cra_driver_name = "safexcel-sha224",
aa88f331 1348 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1b44c5a6
AT
1349 .cra_flags = CRYPTO_ALG_ASYNC |
1350 CRYPTO_ALG_KERN_DRIVER_ONLY,
1351 .cra_blocksize = SHA224_BLOCK_SIZE,
1352 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1353 .cra_init = safexcel_ahash_cra_init,
1354 .cra_exit = safexcel_ahash_cra_exit,
1355 .cra_module = THIS_MODULE,
1356 },
1357 },
1358 },
1359};
73f36ea7 1360
3ad618d8
AT
1361static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1362 unsigned int keylen)
1363{
1364 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1365 SHA256_DIGEST_SIZE);
1366}
1367
1368static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1369{
41abed7d 1370 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
3ad618d8
AT
1371 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1372
41abed7d
PL
1373 memset(req, 0, sizeof(*req));
1374
1375 /* Start from ipad precompute */
1376 memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1377 /* Already processed the key^ipad part now! */
31fb084c
PL
1378 req->len = SHA256_BLOCK_SIZE;
1379 req->processed = SHA256_BLOCK_SIZE;
41abed7d
PL
1380
1381 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1382 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1383 req->state_sz = SHA256_DIGEST_SIZE;
6c1c09b3 1384 req->digest_sz = SHA256_DIGEST_SIZE;
41abed7d
PL
1385 req->block_sz = SHA256_BLOCK_SIZE;
1386 req->hmac = true;
1387
3ad618d8
AT
1388 return 0;
1389}
1390
1391static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1392{
1393 int ret = safexcel_hmac_sha224_init(areq);
1394
1395 if (ret)
1396 return ret;
1397
1398 return safexcel_ahash_finup(areq);
1399}
1400
1401struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1402 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1403 .algo_mask = SAFEXCEL_ALG_SHA2_256,
3ad618d8
AT
1404 .alg.ahash = {
1405 .init = safexcel_hmac_sha224_init,
1406 .update = safexcel_ahash_update,
1407 .final = safexcel_ahash_final,
1408 .finup = safexcel_ahash_finup,
1409 .digest = safexcel_hmac_sha224_digest,
1410 .setkey = safexcel_hmac_sha224_setkey,
1411 .export = safexcel_ahash_export,
1412 .import = safexcel_ahash_import,
1413 .halg = {
1414 .digestsize = SHA224_DIGEST_SIZE,
1415 .statesize = sizeof(struct safexcel_ahash_export_state),
1416 .base = {
1417 .cra_name = "hmac(sha224)",
1418 .cra_driver_name = "safexcel-hmac-sha224",
aa88f331 1419 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3ad618d8
AT
1420 .cra_flags = CRYPTO_ALG_ASYNC |
1421 CRYPTO_ALG_KERN_DRIVER_ONLY,
1422 .cra_blocksize = SHA224_BLOCK_SIZE,
1423 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1424 .cra_init = safexcel_ahash_cra_init,
1425 .cra_exit = safexcel_ahash_cra_exit,
1426 .cra_module = THIS_MODULE,
1427 },
1428 },
1429 },
1430};
1431
73f36ea7
AT
1432static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1433 unsigned int keylen)
1434{
1435 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1436 SHA256_DIGEST_SIZE);
1437}
1438
1439static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1440{
41abed7d 1441 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
73f36ea7
AT
1442 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1443
41abed7d
PL
1444 memset(req, 0, sizeof(*req));
1445
1446 /* Start from ipad precompute */
1447 memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1448 /* Already processed the key^ipad part now! */
31fb084c
PL
1449 req->len = SHA256_BLOCK_SIZE;
1450 req->processed = SHA256_BLOCK_SIZE;
41abed7d
PL
1451
1452 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1453 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1454 req->state_sz = SHA256_DIGEST_SIZE;
6c1c09b3 1455 req->digest_sz = SHA256_DIGEST_SIZE;
41abed7d
PL
1456 req->block_sz = SHA256_BLOCK_SIZE;
1457 req->hmac = true;
1458
73f36ea7
AT
1459 return 0;
1460}
1461
1462static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1463{
1464 int ret = safexcel_hmac_sha256_init(areq);
1465
1466 if (ret)
1467 return ret;
1468
1469 return safexcel_ahash_finup(areq);
1470}
1471
1472struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1473 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1474 .algo_mask = SAFEXCEL_ALG_SHA2_256,
73f36ea7
AT
1475 .alg.ahash = {
1476 .init = safexcel_hmac_sha256_init,
1477 .update = safexcel_ahash_update,
1478 .final = safexcel_ahash_final,
1479 .finup = safexcel_ahash_finup,
1480 .digest = safexcel_hmac_sha256_digest,
1481 .setkey = safexcel_hmac_sha256_setkey,
1482 .export = safexcel_ahash_export,
1483 .import = safexcel_ahash_import,
1484 .halg = {
1485 .digestsize = SHA256_DIGEST_SIZE,
1486 .statesize = sizeof(struct safexcel_ahash_export_state),
1487 .base = {
1488 .cra_name = "hmac(sha256)",
1489 .cra_driver_name = "safexcel-hmac-sha256",
aa88f331 1490 .cra_priority = SAFEXCEL_CRA_PRIORITY,
73f36ea7
AT
1491 .cra_flags = CRYPTO_ALG_ASYNC |
1492 CRYPTO_ALG_KERN_DRIVER_ONLY,
1493 .cra_blocksize = SHA256_BLOCK_SIZE,
1494 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1495 .cra_init = safexcel_ahash_cra_init,
1496 .cra_exit = safexcel_ahash_cra_exit,
1497 .cra_module = THIS_MODULE,
1498 },
1499 },
1500 },
1501};
b460edb6
AT
1502
1503static int safexcel_sha512_init(struct ahash_request *areq)
1504{
1505 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1506 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1507
1508 memset(req, 0, sizeof(*req));
1509
b460edb6
AT
1510 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1511 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1512 req->state_sz = SHA512_DIGEST_SIZE;
6c1c09b3 1513 req->digest_sz = SHA512_DIGEST_SIZE;
41abed7d 1514 req->block_sz = SHA512_BLOCK_SIZE;
b460edb6
AT
1515
1516 return 0;
1517}
1518
1519static int safexcel_sha512_digest(struct ahash_request *areq)
1520{
1521 int ret = safexcel_sha512_init(areq);
1522
1523 if (ret)
1524 return ret;
1525
1526 return safexcel_ahash_finup(areq);
1527}
1528
1529struct safexcel_alg_template safexcel_alg_sha512 = {
1530 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1531 .algo_mask = SAFEXCEL_ALG_SHA2_512,
b460edb6
AT
1532 .alg.ahash = {
1533 .init = safexcel_sha512_init,
1534 .update = safexcel_ahash_update,
1535 .final = safexcel_ahash_final,
1536 .finup = safexcel_ahash_finup,
1537 .digest = safexcel_sha512_digest,
1538 .export = safexcel_ahash_export,
1539 .import = safexcel_ahash_import,
1540 .halg = {
1541 .digestsize = SHA512_DIGEST_SIZE,
1542 .statesize = sizeof(struct safexcel_ahash_export_state),
1543 .base = {
1544 .cra_name = "sha512",
1545 .cra_driver_name = "safexcel-sha512",
aa88f331 1546 .cra_priority = SAFEXCEL_CRA_PRIORITY,
b460edb6
AT
1547 .cra_flags = CRYPTO_ALG_ASYNC |
1548 CRYPTO_ALG_KERN_DRIVER_ONLY,
1549 .cra_blocksize = SHA512_BLOCK_SIZE,
1550 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1551 .cra_init = safexcel_ahash_cra_init,
1552 .cra_exit = safexcel_ahash_cra_exit,
1553 .cra_module = THIS_MODULE,
1554 },
1555 },
1556 },
1557};
0de54fb1 1558
9e46eafd
AT
1559static int safexcel_sha384_init(struct ahash_request *areq)
1560{
1561 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1562 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1563
1564 memset(req, 0, sizeof(*req));
1565
9e46eafd
AT
1566 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1567 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1568 req->state_sz = SHA512_DIGEST_SIZE;
6c1c09b3 1569 req->digest_sz = SHA512_DIGEST_SIZE;
41abed7d 1570 req->block_sz = SHA512_BLOCK_SIZE;
9e46eafd
AT
1571
1572 return 0;
1573}
1574
1575static int safexcel_sha384_digest(struct ahash_request *areq)
1576{
1577 int ret = safexcel_sha384_init(areq);
1578
1579 if (ret)
1580 return ret;
1581
1582 return safexcel_ahash_finup(areq);
1583}
1584
1585struct safexcel_alg_template safexcel_alg_sha384 = {
1586 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1587 .algo_mask = SAFEXCEL_ALG_SHA2_512,
9e46eafd
AT
1588 .alg.ahash = {
1589 .init = safexcel_sha384_init,
1590 .update = safexcel_ahash_update,
1591 .final = safexcel_ahash_final,
1592 .finup = safexcel_ahash_finup,
1593 .digest = safexcel_sha384_digest,
1594 .export = safexcel_ahash_export,
1595 .import = safexcel_ahash_import,
1596 .halg = {
1597 .digestsize = SHA384_DIGEST_SIZE,
1598 .statesize = sizeof(struct safexcel_ahash_export_state),
1599 .base = {
1600 .cra_name = "sha384",
1601 .cra_driver_name = "safexcel-sha384",
aa88f331 1602 .cra_priority = SAFEXCEL_CRA_PRIORITY,
9e46eafd
AT
1603 .cra_flags = CRYPTO_ALG_ASYNC |
1604 CRYPTO_ALG_KERN_DRIVER_ONLY,
1605 .cra_blocksize = SHA384_BLOCK_SIZE,
1606 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1607 .cra_init = safexcel_ahash_cra_init,
1608 .cra_exit = safexcel_ahash_cra_exit,
1609 .cra_module = THIS_MODULE,
1610 },
1611 },
1612 },
1613};
1614
0de54fb1
AT
1615static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1616 unsigned int keylen)
1617{
1618 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1619 SHA512_DIGEST_SIZE);
1620}
1621
1622static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1623{
41abed7d 1624 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
0de54fb1
AT
1625 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1626
41abed7d
PL
1627 memset(req, 0, sizeof(*req));
1628
1629 /* Start from ipad precompute */
1630 memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1631 /* Already processed the key^ipad part now! */
31fb084c
PL
1632 req->len = SHA512_BLOCK_SIZE;
1633 req->processed = SHA512_BLOCK_SIZE;
41abed7d
PL
1634
1635 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1636 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1637 req->state_sz = SHA512_DIGEST_SIZE;
6c1c09b3 1638 req->digest_sz = SHA512_DIGEST_SIZE;
41abed7d
PL
1639 req->block_sz = SHA512_BLOCK_SIZE;
1640 req->hmac = true;
1641
0de54fb1
AT
1642 return 0;
1643}
1644
1645static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1646{
1647 int ret = safexcel_hmac_sha512_init(areq);
1648
1649 if (ret)
1650 return ret;
1651
1652 return safexcel_ahash_finup(areq);
1653}
1654
1655struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1656 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1657 .algo_mask = SAFEXCEL_ALG_SHA2_512,
0de54fb1
AT
1658 .alg.ahash = {
1659 .init = safexcel_hmac_sha512_init,
1660 .update = safexcel_ahash_update,
1661 .final = safexcel_ahash_final,
1662 .finup = safexcel_ahash_finup,
1663 .digest = safexcel_hmac_sha512_digest,
1664 .setkey = safexcel_hmac_sha512_setkey,
1665 .export = safexcel_ahash_export,
1666 .import = safexcel_ahash_import,
1667 .halg = {
1668 .digestsize = SHA512_DIGEST_SIZE,
1669 .statesize = sizeof(struct safexcel_ahash_export_state),
1670 .base = {
1671 .cra_name = "hmac(sha512)",
1672 .cra_driver_name = "safexcel-hmac-sha512",
aa88f331 1673 .cra_priority = SAFEXCEL_CRA_PRIORITY,
0de54fb1
AT
1674 .cra_flags = CRYPTO_ALG_ASYNC |
1675 CRYPTO_ALG_KERN_DRIVER_ONLY,
1676 .cra_blocksize = SHA512_BLOCK_SIZE,
1677 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1678 .cra_init = safexcel_ahash_cra_init,
1679 .cra_exit = safexcel_ahash_cra_exit,
1680 .cra_module = THIS_MODULE,
1681 },
1682 },
1683 },
1684};
1f5d5d98
AT
1685
1686static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1687 unsigned int keylen)
1688{
1689 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1690 SHA512_DIGEST_SIZE);
1691}
1692
1693static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1694{
41abed7d 1695 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1f5d5d98
AT
1696 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1697
41abed7d
PL
1698 memset(req, 0, sizeof(*req));
1699
1700 /* Start from ipad precompute */
1701 memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1702 /* Already processed the key^ipad part now! */
31fb084c
PL
1703 req->len = SHA512_BLOCK_SIZE;
1704 req->processed = SHA512_BLOCK_SIZE;
41abed7d
PL
1705
1706 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1707 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1708 req->state_sz = SHA512_DIGEST_SIZE;
6c1c09b3 1709 req->digest_sz = SHA512_DIGEST_SIZE;
41abed7d
PL
1710 req->block_sz = SHA512_BLOCK_SIZE;
1711 req->hmac = true;
1712
1f5d5d98
AT
1713 return 0;
1714}
1715
1716static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1717{
1718 int ret = safexcel_hmac_sha384_init(areq);
1719
1720 if (ret)
1721 return ret;
1722
1723 return safexcel_ahash_finup(areq);
1724}
1725
1726struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1727 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1728 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1f5d5d98
AT
1729 .alg.ahash = {
1730 .init = safexcel_hmac_sha384_init,
1731 .update = safexcel_ahash_update,
1732 .final = safexcel_ahash_final,
1733 .finup = safexcel_ahash_finup,
1734 .digest = safexcel_hmac_sha384_digest,
1735 .setkey = safexcel_hmac_sha384_setkey,
1736 .export = safexcel_ahash_export,
1737 .import = safexcel_ahash_import,
1738 .halg = {
1739 .digestsize = SHA384_DIGEST_SIZE,
1740 .statesize = sizeof(struct safexcel_ahash_export_state),
1741 .base = {
1742 .cra_name = "hmac(sha384)",
1743 .cra_driver_name = "safexcel-hmac-sha384",
aa88f331 1744 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1f5d5d98
AT
1745 .cra_flags = CRYPTO_ALG_ASYNC |
1746 CRYPTO_ALG_KERN_DRIVER_ONLY,
1747 .cra_blocksize = SHA384_BLOCK_SIZE,
1748 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1749 .cra_init = safexcel_ahash_cra_init,
1750 .cra_exit = safexcel_ahash_cra_exit,
1751 .cra_module = THIS_MODULE,
1752 },
1753 },
1754 },
1755};
293f89cf
OH
1756
1757static int safexcel_md5_init(struct ahash_request *areq)
1758{
1759 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1760 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1761
1762 memset(req, 0, sizeof(*req));
1763
293f89cf
OH
1764 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1765 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1766 req->state_sz = MD5_DIGEST_SIZE;
6c1c09b3 1767 req->digest_sz = MD5_DIGEST_SIZE;
41abed7d 1768 req->block_sz = MD5_HMAC_BLOCK_SIZE;
293f89cf
OH
1769
1770 return 0;
1771}
1772
1773static int safexcel_md5_digest(struct ahash_request *areq)
1774{
1775 int ret = safexcel_md5_init(areq);
1776
1777 if (ret)
1778 return ret;
1779
1780 return safexcel_ahash_finup(areq);
1781}
1782
1783struct safexcel_alg_template safexcel_alg_md5 = {
1784 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1785 .algo_mask = SAFEXCEL_ALG_MD5,
293f89cf
OH
1786 .alg.ahash = {
1787 .init = safexcel_md5_init,
1788 .update = safexcel_ahash_update,
1789 .final = safexcel_ahash_final,
1790 .finup = safexcel_ahash_finup,
1791 .digest = safexcel_md5_digest,
1792 .export = safexcel_ahash_export,
1793 .import = safexcel_ahash_import,
1794 .halg = {
1795 .digestsize = MD5_DIGEST_SIZE,
1796 .statesize = sizeof(struct safexcel_ahash_export_state),
1797 .base = {
1798 .cra_name = "md5",
1799 .cra_driver_name = "safexcel-md5",
aa88f331 1800 .cra_priority = SAFEXCEL_CRA_PRIORITY,
293f89cf
OH
1801 .cra_flags = CRYPTO_ALG_ASYNC |
1802 CRYPTO_ALG_KERN_DRIVER_ONLY,
1803 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1804 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1805 .cra_init = safexcel_ahash_cra_init,
1806 .cra_exit = safexcel_ahash_cra_exit,
1807 .cra_module = THIS_MODULE,
1808 },
1809 },
1810 },
1811};
b471e4b9
OH
1812
1813static int safexcel_hmac_md5_init(struct ahash_request *areq)
1814{
41abed7d 1815 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
b471e4b9
OH
1816 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1817
41abed7d
PL
1818 memset(req, 0, sizeof(*req));
1819
1820 /* Start from ipad precompute */
1821 memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE);
1822 /* Already processed the key^ipad part now! */
31fb084c
PL
1823 req->len = MD5_HMAC_BLOCK_SIZE;
1824 req->processed = MD5_HMAC_BLOCK_SIZE;
41abed7d
PL
1825
1826 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1827 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1828 req->state_sz = MD5_DIGEST_SIZE;
6c1c09b3 1829 req->digest_sz = MD5_DIGEST_SIZE;
41abed7d 1830 req->block_sz = MD5_HMAC_BLOCK_SIZE;
85b36ee8 1831 req->len_is_le = true; /* MD5 is little endian! ... */
41abed7d
PL
1832 req->hmac = true;
1833
b471e4b9
OH
1834 return 0;
1835}
1836
1837static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1838 unsigned int keylen)
1839{
1840 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1841 MD5_DIGEST_SIZE);
1842}
1843
1844static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1845{
1846 int ret = safexcel_hmac_md5_init(areq);
1847
1848 if (ret)
1849 return ret;
1850
1851 return safexcel_ahash_finup(areq);
1852}
1853
1854struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1855 .type = SAFEXCEL_ALG_TYPE_AHASH,
062b64ca 1856 .algo_mask = SAFEXCEL_ALG_MD5,
b471e4b9
OH
1857 .alg.ahash = {
1858 .init = safexcel_hmac_md5_init,
1859 .update = safexcel_ahash_update,
1860 .final = safexcel_ahash_final,
1861 .finup = safexcel_ahash_finup,
1862 .digest = safexcel_hmac_md5_digest,
1863 .setkey = safexcel_hmac_md5_setkey,
1864 .export = safexcel_ahash_export,
1865 .import = safexcel_ahash_import,
1866 .halg = {
1867 .digestsize = MD5_DIGEST_SIZE,
1868 .statesize = sizeof(struct safexcel_ahash_export_state),
1869 .base = {
1870 .cra_name = "hmac(md5)",
1871 .cra_driver_name = "safexcel-hmac-md5",
aa88f331 1872 .cra_priority = SAFEXCEL_CRA_PRIORITY,
b471e4b9
OH
1873 .cra_flags = CRYPTO_ALG_ASYNC |
1874 CRYPTO_ALG_KERN_DRIVER_ONLY,
1875 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1876 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1877 .cra_init = safexcel_ahash_cra_init,
1878 .cra_exit = safexcel_ahash_cra_exit,
1879 .cra_module = THIS_MODULE,
1880 },
1881 },
1882 },
1883};
a7cf8658
PL
1884
1885static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1886{
1887 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1888 int ret = safexcel_ahash_cra_init(tfm);
1889
1890 /* Default 'key' is all zeroes */
1891 memset(ctx->ipad, 0, sizeof(u32));
1892 return ret;
1893}
1894
1895static int safexcel_crc32_init(struct ahash_request *areq)
1896{
1897 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1898 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1899
1900 memset(req, 0, sizeof(*req));
1901
1902 /* Start from loaded key */
13a1bb93 1903 req->state[0] = (__force __le32)le32_to_cpu(~ctx->ipad[0]);
a7cf8658
PL
1904 /* Set processed to non-zero to enable invalidation detection */
1905 req->len = sizeof(u32);
1906 req->processed = sizeof(u32);
1907
1908 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1909 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1910 req->state_sz = sizeof(u32);
6c1c09b3 1911 req->digest_sz = sizeof(u32);
a7cf8658
PL
1912 req->block_sz = sizeof(u32);
1913
1914 return 0;
1915}
1916
1917static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1918 unsigned int keylen)
1919{
1920 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1921
674f368a 1922 if (keylen != sizeof(u32))
a7cf8658 1923 return -EINVAL;
a7cf8658
PL
1924
1925 memcpy(ctx->ipad, key, sizeof(u32));
1926 return 0;
1927}
1928
1929static int safexcel_crc32_digest(struct ahash_request *areq)
1930{
1931 return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1932}
1933
1934struct safexcel_alg_template safexcel_alg_crc32 = {
1935 .type = SAFEXCEL_ALG_TYPE_AHASH,
1936 .algo_mask = 0,
1937 .alg.ahash = {
1938 .init = safexcel_crc32_init,
1939 .update = safexcel_ahash_update,
1940 .final = safexcel_ahash_final,
1941 .finup = safexcel_ahash_finup,
1942 .digest = safexcel_crc32_digest,
1943 .setkey = safexcel_crc32_setkey,
1944 .export = safexcel_ahash_export,
1945 .import = safexcel_ahash_import,
1946 .halg = {
1947 .digestsize = sizeof(u32),
1948 .statesize = sizeof(struct safexcel_ahash_export_state),
1949 .base = {
1950 .cra_name = "crc32",
1951 .cra_driver_name = "safexcel-crc32",
1952 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1953 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1954 CRYPTO_ALG_ASYNC |
1955 CRYPTO_ALG_KERN_DRIVER_ONLY,
1956 .cra_blocksize = 1,
1957 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1958 .cra_init = safexcel_crc32_cra_init,
1959 .cra_exit = safexcel_ahash_cra_exit,
1960 .cra_module = THIS_MODULE,
1961 },
1962 },
1963 },
1964};
b98687bb
PL
1965
1966static int safexcel_cbcmac_init(struct ahash_request *areq)
1967{
1968 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1969 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1970
1971 memset(req, 0, sizeof(*req));
1972
1973 /* Start from loaded keys */
1974 memcpy(req->state, ctx->ipad, ctx->key_sz);
1975 /* Set processed to non-zero to enable invalidation detection */
1976 req->len = AES_BLOCK_SIZE;
1977 req->processed = AES_BLOCK_SIZE;
1978
1979 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1980 req->state_sz = ctx->key_sz;
6c1c09b3 1981 req->digest_sz = AES_BLOCK_SIZE;
b98687bb
PL
1982 req->block_sz = AES_BLOCK_SIZE;
1983 req->xcbcmac = true;
1984
1985 return 0;
1986}
1987
1988static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1989 unsigned int len)
1990{
1991 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1992 struct crypto_aes_ctx aes;
1993 int ret, i;
1994
1995 ret = aes_expandkey(&aes, key, len);
674f368a 1996 if (ret)
b98687bb 1997 return ret;
b98687bb
PL
1998
1999 memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
2000 for (i = 0; i < len / sizeof(u32); i++)
13a1bb93 2001 ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]);
b98687bb
PL
2002
2003 if (len == AES_KEYSIZE_192) {
2004 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2005 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2006 } else if (len == AES_KEYSIZE_256) {
2007 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2008 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2009 } else {
2010 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2011 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2012 }
38f21b4b 2013 ctx->cbcmac = true;
b98687bb
PL
2014
2015 memzero_explicit(&aes, sizeof(aes));
2016 return 0;
2017}
2018
2019static int safexcel_cbcmac_digest(struct ahash_request *areq)
2020{
2021 return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2022}
2023
2024struct safexcel_alg_template safexcel_alg_cbcmac = {
2025 .type = SAFEXCEL_ALG_TYPE_AHASH,
2026 .algo_mask = 0,
2027 .alg.ahash = {
2028 .init = safexcel_cbcmac_init,
2029 .update = safexcel_ahash_update,
2030 .final = safexcel_ahash_final,
2031 .finup = safexcel_ahash_finup,
2032 .digest = safexcel_cbcmac_digest,
2033 .setkey = safexcel_cbcmac_setkey,
2034 .export = safexcel_ahash_export,
2035 .import = safexcel_ahash_import,
2036 .halg = {
2037 .digestsize = AES_BLOCK_SIZE,
2038 .statesize = sizeof(struct safexcel_ahash_export_state),
2039 .base = {
2040 .cra_name = "cbcmac(aes)",
2041 .cra_driver_name = "safexcel-cbcmac-aes",
2042 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2043 .cra_flags = CRYPTO_ALG_ASYNC |
2044 CRYPTO_ALG_KERN_DRIVER_ONLY,
2045 .cra_blocksize = 1,
2046 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2047 .cra_init = safexcel_ahash_cra_init,
2048 .cra_exit = safexcel_ahash_cra_exit,
2049 .cra_module = THIS_MODULE,
2050 },
2051 },
2052 },
2053};
38f21b4b
PL
2054
2055static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2056 unsigned int len)
2057{
2058 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2059 struct crypto_aes_ctx aes;
2060 u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2061 int ret, i;
2062
2063 ret = aes_expandkey(&aes, key, len);
674f368a 2064 if (ret)
38f21b4b 2065 return ret;
38f21b4b
PL
2066
2067 /* precompute the XCBC key material */
2068 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2069 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2070 CRYPTO_TFM_REQ_MASK);
2071 ret = crypto_cipher_setkey(ctx->kaes, key, len);
2072 crypto_ahash_set_flags(tfm, crypto_cipher_get_flags(ctx->kaes) &
2073 CRYPTO_TFM_RES_MASK);
2074 if (ret)
2075 return ret;
2076
2077 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2078 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2079 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2080 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2081 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2082 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2083 for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
13a1bb93
PL
2084 ctx->ipad[i] =
2085 cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i]));
38f21b4b
PL
2086
2087 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2088 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2089 CRYPTO_TFM_REQ_MASK);
2090 ret = crypto_cipher_setkey(ctx->kaes,
2091 (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2092 AES_MIN_KEY_SIZE);
2093 crypto_ahash_set_flags(tfm, crypto_cipher_get_flags(ctx->kaes) &
2094 CRYPTO_TFM_RES_MASK);
2095 if (ret)
2096 return ret;
2097
2098 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2099 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2100 ctx->cbcmac = false;
2101
2102 memzero_explicit(&aes, sizeof(aes));
2103 return 0;
2104}
2105
2106static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2107{
2108 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2109
2110 safexcel_ahash_cra_init(tfm);
2111 ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
27018ab1 2112 return PTR_ERR_OR_ZERO(ctx->kaes);
38f21b4b
PL
2113}
2114
2115static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2116{
2117 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2118
2119 crypto_free_cipher(ctx->kaes);
2120 safexcel_ahash_cra_exit(tfm);
2121}
2122
2123struct safexcel_alg_template safexcel_alg_xcbcmac = {
2124 .type = SAFEXCEL_ALG_TYPE_AHASH,
2125 .algo_mask = 0,
2126 .alg.ahash = {
2127 .init = safexcel_cbcmac_init,
2128 .update = safexcel_ahash_update,
2129 .final = safexcel_ahash_final,
2130 .finup = safexcel_ahash_finup,
2131 .digest = safexcel_cbcmac_digest,
2132 .setkey = safexcel_xcbcmac_setkey,
2133 .export = safexcel_ahash_export,
2134 .import = safexcel_ahash_import,
2135 .halg = {
2136 .digestsize = AES_BLOCK_SIZE,
2137 .statesize = sizeof(struct safexcel_ahash_export_state),
2138 .base = {
2139 .cra_name = "xcbc(aes)",
2140 .cra_driver_name = "safexcel-xcbc-aes",
2141 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2142 .cra_flags = CRYPTO_ALG_ASYNC |
2143 CRYPTO_ALG_KERN_DRIVER_ONLY,
2144 .cra_blocksize = AES_BLOCK_SIZE,
2145 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2146 .cra_init = safexcel_xcbcmac_cra_init,
2147 .cra_exit = safexcel_xcbcmac_cra_exit,
2148 .cra_module = THIS_MODULE,
2149 },
2150 },
2151 },
2152};
7a627db9
PL
2153
2154static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2155 unsigned int len)
2156{
2157 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2158 struct crypto_aes_ctx aes;
2159 __be64 consts[4];
2160 u64 _const[2];
2161 u8 msb_mask, gfmask;
2162 int ret, i;
2163
2164 ret = aes_expandkey(&aes, key, len);
674f368a 2165 if (ret)
7a627db9 2166 return ret;
7a627db9
PL
2167
2168 for (i = 0; i < len / sizeof(u32); i++)
13a1bb93
PL
2169 ctx->ipad[i + 8] =
2170 cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i]));
7a627db9
PL
2171
2172 /* precompute the CMAC key material */
2173 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2174 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2175 CRYPTO_TFM_REQ_MASK);
2176 ret = crypto_cipher_setkey(ctx->kaes, key, len);
2177 crypto_ahash_set_flags(tfm, crypto_cipher_get_flags(ctx->kaes) &
2178 CRYPTO_TFM_RES_MASK);
2179 if (ret)
2180 return ret;
2181
2182 /* code below borrowed from crypto/cmac.c */
2183 /* encrypt the zero block */
2184 memset(consts, 0, AES_BLOCK_SIZE);
2185 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2186
2187 gfmask = 0x87;
2188 _const[0] = be64_to_cpu(consts[1]);
2189 _const[1] = be64_to_cpu(consts[0]);
2190
2191 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2192 for (i = 0; i < 4; i += 2) {
2193 msb_mask = ((s64)_const[1] >> 63) & gfmask;
2194 _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2195 _const[0] = (_const[0] << 1) ^ msb_mask;
2196
2197 consts[i + 0] = cpu_to_be64(_const[1]);
2198 consts[i + 1] = cpu_to_be64(_const[0]);
2199 }
2200 /* end of code borrowed from crypto/cmac.c */
2201
2202 for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
13a1bb93 2203 ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]);
7a627db9
PL
2204
2205 if (len == AES_KEYSIZE_192) {
2206 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2207 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2208 } else if (len == AES_KEYSIZE_256) {
2209 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2210 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2211 } else {
2212 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2213 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2214 }
2215 ctx->cbcmac = false;
2216
2217 memzero_explicit(&aes, sizeof(aes));
2218 return 0;
2219}
2220
2221struct safexcel_alg_template safexcel_alg_cmac = {
2222 .type = SAFEXCEL_ALG_TYPE_AHASH,
2223 .algo_mask = 0,
2224 .alg.ahash = {
2225 .init = safexcel_cbcmac_init,
2226 .update = safexcel_ahash_update,
2227 .final = safexcel_ahash_final,
2228 .finup = safexcel_ahash_finup,
2229 .digest = safexcel_cbcmac_digest,
2230 .setkey = safexcel_cmac_setkey,
2231 .export = safexcel_ahash_export,
2232 .import = safexcel_ahash_import,
2233 .halg = {
2234 .digestsize = AES_BLOCK_SIZE,
2235 .statesize = sizeof(struct safexcel_ahash_export_state),
2236 .base = {
2237 .cra_name = "cmac(aes)",
2238 .cra_driver_name = "safexcel-cmac-aes",
2239 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2240 .cra_flags = CRYPTO_ALG_ASYNC |
2241 CRYPTO_ALG_KERN_DRIVER_ONLY,
2242 .cra_blocksize = AES_BLOCK_SIZE,
2243 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2244 .cra_init = safexcel_xcbcmac_cra_init,
2245 .cra_exit = safexcel_xcbcmac_cra_exit,
2246 .cra_module = THIS_MODULE,
2247 },
2248 },
2249 },
2250};
0f2bc131
PL
2251
2252static int safexcel_sm3_init(struct ahash_request *areq)
2253{
2254 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2255 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2256
2257 memset(req, 0, sizeof(*req));
2258
2259 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2260 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2261 req->state_sz = SM3_DIGEST_SIZE;
6c1c09b3 2262 req->digest_sz = SM3_DIGEST_SIZE;
0f2bc131
PL
2263 req->block_sz = SM3_BLOCK_SIZE;
2264
2265 return 0;
2266}
2267
2268static int safexcel_sm3_digest(struct ahash_request *areq)
2269{
2270 int ret = safexcel_sm3_init(areq);
2271
2272 if (ret)
2273 return ret;
2274
2275 return safexcel_ahash_finup(areq);
2276}
2277
2278struct safexcel_alg_template safexcel_alg_sm3 = {
2279 .type = SAFEXCEL_ALG_TYPE_AHASH,
2280 .algo_mask = SAFEXCEL_ALG_SM3,
2281 .alg.ahash = {
2282 .init = safexcel_sm3_init,
2283 .update = safexcel_ahash_update,
2284 .final = safexcel_ahash_final,
2285 .finup = safexcel_ahash_finup,
2286 .digest = safexcel_sm3_digest,
2287 .export = safexcel_ahash_export,
2288 .import = safexcel_ahash_import,
2289 .halg = {
2290 .digestsize = SM3_DIGEST_SIZE,
2291 .statesize = sizeof(struct safexcel_ahash_export_state),
2292 .base = {
2293 .cra_name = "sm3",
2294 .cra_driver_name = "safexcel-sm3",
2295 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2296 .cra_flags = CRYPTO_ALG_ASYNC |
2297 CRYPTO_ALG_KERN_DRIVER_ONLY,
2298 .cra_blocksize = SM3_BLOCK_SIZE,
2299 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2300 .cra_init = safexcel_ahash_cra_init,
2301 .cra_exit = safexcel_ahash_cra_exit,
2302 .cra_module = THIS_MODULE,
2303 },
2304 },
2305 },
2306};
aa3a43e6
PL
2307
2308static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2309 unsigned int keylen)
2310{
2311 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2312 SM3_DIGEST_SIZE);
2313}
2314
2315static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2316{
2317 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2318 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2319
2320 memset(req, 0, sizeof(*req));
2321
2322 /* Start from ipad precompute */
2323 memcpy(req->state, ctx->ipad, SM3_DIGEST_SIZE);
2324 /* Already processed the key^ipad part now! */
2325 req->len = SM3_BLOCK_SIZE;
2326 req->processed = SM3_BLOCK_SIZE;
2327
2328 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2329 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2330 req->state_sz = SM3_DIGEST_SIZE;
6c1c09b3 2331 req->digest_sz = SM3_DIGEST_SIZE;
aa3a43e6
PL
2332 req->block_sz = SM3_BLOCK_SIZE;
2333 req->hmac = true;
2334
2335 return 0;
2336}
2337
2338static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2339{
2340 int ret = safexcel_hmac_sm3_init(areq);
2341
2342 if (ret)
2343 return ret;
2344
2345 return safexcel_ahash_finup(areq);
2346}
2347
2348struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2349 .type = SAFEXCEL_ALG_TYPE_AHASH,
2350 .algo_mask = SAFEXCEL_ALG_SM3,
2351 .alg.ahash = {
2352 .init = safexcel_hmac_sm3_init,
2353 .update = safexcel_ahash_update,
2354 .final = safexcel_ahash_final,
2355 .finup = safexcel_ahash_finup,
2356 .digest = safexcel_hmac_sm3_digest,
2357 .setkey = safexcel_hmac_sm3_setkey,
2358 .export = safexcel_ahash_export,
2359 .import = safexcel_ahash_import,
2360 .halg = {
2361 .digestsize = SM3_DIGEST_SIZE,
2362 .statesize = sizeof(struct safexcel_ahash_export_state),
2363 .base = {
2364 .cra_name = "hmac(sm3)",
2365 .cra_driver_name = "safexcel-hmac-sm3",
2366 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2367 .cra_flags = CRYPTO_ALG_ASYNC |
2368 CRYPTO_ALG_KERN_DRIVER_ONLY,
2369 .cra_blocksize = SM3_BLOCK_SIZE,
2370 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2371 .cra_init = safexcel_ahash_cra_init,
2372 .cra_exit = safexcel_ahash_cra_exit,
2373 .cra_module = THIS_MODULE,
2374 },
2375 },
2376 },
2377};
aaf5a383
PL
2378
2379static int safexcel_sha3_224_init(struct ahash_request *areq)
2380{
2381 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2382 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2383 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2384
2385 memset(req, 0, sizeof(*req));
2386
2387 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2388 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2389 req->state_sz = SHA3_224_DIGEST_SIZE;
6c1c09b3 2390 req->digest_sz = SHA3_224_DIGEST_SIZE;
aaf5a383
PL
2391 req->block_sz = SHA3_224_BLOCK_SIZE;
2392 ctx->do_fallback = false;
2393 ctx->fb_init_done = false;
2394 return 0;
2395}
2396
2397static int safexcel_sha3_fbcheck(struct ahash_request *req)
2398{
2399 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2400 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2401 struct ahash_request *subreq = ahash_request_ctx(req);
2402 int ret = 0;
2403
2404 if (ctx->do_fallback) {
2405 ahash_request_set_tfm(subreq, ctx->fback);
2406 ahash_request_set_callback(subreq, req->base.flags,
2407 req->base.complete, req->base.data);
2408 ahash_request_set_crypt(subreq, req->src, req->result,
2409 req->nbytes);
2410 if (!ctx->fb_init_done) {
6c1c09b3
PL
2411 if (ctx->fb_do_setkey) {
2412 /* Set fallback cipher HMAC key */
2413 u8 key[SHA3_224_BLOCK_SIZE];
2414
2415 memcpy(key, ctx->ipad,
2416 crypto_ahash_blocksize(ctx->fback) / 2);
2417 memcpy(key +
2418 crypto_ahash_blocksize(ctx->fback) / 2,
2419 ctx->opad,
2420 crypto_ahash_blocksize(ctx->fback) / 2);
2421 ret = crypto_ahash_setkey(ctx->fback, key,
2422 crypto_ahash_blocksize(ctx->fback));
2423 memzero_explicit(key,
2424 crypto_ahash_blocksize(ctx->fback));
2425 ctx->fb_do_setkey = false;
2426 }
2427 ret = ret ?: crypto_ahash_init(subreq);
aaf5a383
PL
2428 ctx->fb_init_done = true;
2429 }
2430 }
2431 return ret;
2432}
2433
2434static int safexcel_sha3_update(struct ahash_request *req)
2435{
2436 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2437 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2438 struct ahash_request *subreq = ahash_request_ctx(req);
2439
2440 ctx->do_fallback = true;
2441 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2442}
2443
2444static int safexcel_sha3_final(struct ahash_request *req)
2445{
2446 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2447 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2448 struct ahash_request *subreq = ahash_request_ctx(req);
2449
2450 ctx->do_fallback = true;
2451 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2452}
2453
2454static int safexcel_sha3_finup(struct ahash_request *req)
2455{
2456 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2457 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2458 struct ahash_request *subreq = ahash_request_ctx(req);
2459
2460 ctx->do_fallback |= !req->nbytes;
2461 if (ctx->do_fallback)
2462 /* Update or ex/import happened or len 0, cannot use the HW */
2463 return safexcel_sha3_fbcheck(req) ?:
2464 crypto_ahash_finup(subreq);
2465 else
2466 return safexcel_ahash_finup(req);
2467}
2468
2469static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2470{
2471 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2472 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2473 struct ahash_request *subreq = ahash_request_ctx(req);
2474
2475 ctx->do_fallback = true;
2476 ctx->fb_init_done = false;
2477 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2478}
2479
2480static int safexcel_sha3_224_digest(struct ahash_request *req)
2481{
2482 if (req->nbytes)
2483 return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2484
2485 /* HW cannot do zero length hash, use fallback instead */
2486 return safexcel_sha3_digest_fallback(req);
2487}
2488
2489static int safexcel_sha3_export(struct ahash_request *req, void *out)
2490{
2491 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2492 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2493 struct ahash_request *subreq = ahash_request_ctx(req);
2494
2495 ctx->do_fallback = true;
2496 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2497}
2498
2499static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2500{
2501 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2502 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2503 struct ahash_request *subreq = ahash_request_ctx(req);
2504
2505 ctx->do_fallback = true;
2506 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2507 // return safexcel_ahash_import(req, in);
2508}
2509
2510static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2511{
2512 struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2513 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2514
2515 safexcel_ahash_cra_init(tfm);
2516
2517 /* Allocate fallback implementation */
2518 ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2519 CRYPTO_ALG_ASYNC |
2520 CRYPTO_ALG_NEED_FALLBACK);
2521 if (IS_ERR(ctx->fback))
2522 return PTR_ERR(ctx->fback);
2523
2524 /* Update statesize from fallback algorithm! */
2525 crypto_hash_alg_common(ahash)->statesize =
2526 crypto_ahash_statesize(ctx->fback);
2527 crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2528 sizeof(struct ahash_request) +
2529 crypto_ahash_reqsize(ctx->fback)));
2530 return 0;
2531}
2532
2533static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2534{
2535 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2536
2537 crypto_free_ahash(ctx->fback);
2538 safexcel_ahash_cra_exit(tfm);
2539}
2540
2541struct safexcel_alg_template safexcel_alg_sha3_224 = {
2542 .type = SAFEXCEL_ALG_TYPE_AHASH,
2543 .algo_mask = SAFEXCEL_ALG_SHA3,
2544 .alg.ahash = {
2545 .init = safexcel_sha3_224_init,
2546 .update = safexcel_sha3_update,
2547 .final = safexcel_sha3_final,
2548 .finup = safexcel_sha3_finup,
2549 .digest = safexcel_sha3_224_digest,
2550 .export = safexcel_sha3_export,
2551 .import = safexcel_sha3_import,
2552 .halg = {
2553 .digestsize = SHA3_224_DIGEST_SIZE,
2554 .statesize = sizeof(struct safexcel_ahash_export_state),
2555 .base = {
2556 .cra_name = "sha3-224",
2557 .cra_driver_name = "safexcel-sha3-224",
2558 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2559 .cra_flags = CRYPTO_ALG_ASYNC |
2560 CRYPTO_ALG_KERN_DRIVER_ONLY |
2561 CRYPTO_ALG_NEED_FALLBACK,
2562 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2563 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2564 .cra_init = safexcel_sha3_cra_init,
2565 .cra_exit = safexcel_sha3_cra_exit,
2566 .cra_module = THIS_MODULE,
2567 },
2568 },
2569 },
2570};
2571
2572static int safexcel_sha3_256_init(struct ahash_request *areq)
2573{
2574 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2575 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2576 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2577
2578 memset(req, 0, sizeof(*req));
2579
2580 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2581 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2582 req->state_sz = SHA3_256_DIGEST_SIZE;
6c1c09b3 2583 req->digest_sz = SHA3_256_DIGEST_SIZE;
aaf5a383
PL
2584 req->block_sz = SHA3_256_BLOCK_SIZE;
2585 ctx->do_fallback = false;
2586 ctx->fb_init_done = false;
2587 return 0;
2588}
2589
2590static int safexcel_sha3_256_digest(struct ahash_request *req)
2591{
2592 if (req->nbytes)
2593 return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2594
2595 /* HW cannot do zero length hash, use fallback instead */
2596 return safexcel_sha3_digest_fallback(req);
2597}
2598
2599struct safexcel_alg_template safexcel_alg_sha3_256 = {
2600 .type = SAFEXCEL_ALG_TYPE_AHASH,
2601 .algo_mask = SAFEXCEL_ALG_SHA3,
2602 .alg.ahash = {
2603 .init = safexcel_sha3_256_init,
2604 .update = safexcel_sha3_update,
2605 .final = safexcel_sha3_final,
2606 .finup = safexcel_sha3_finup,
2607 .digest = safexcel_sha3_256_digest,
2608 .export = safexcel_sha3_export,
2609 .import = safexcel_sha3_import,
2610 .halg = {
2611 .digestsize = SHA3_256_DIGEST_SIZE,
2612 .statesize = sizeof(struct safexcel_ahash_export_state),
2613 .base = {
2614 .cra_name = "sha3-256",
2615 .cra_driver_name = "safexcel-sha3-256",
2616 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2617 .cra_flags = CRYPTO_ALG_ASYNC |
2618 CRYPTO_ALG_KERN_DRIVER_ONLY |
2619 CRYPTO_ALG_NEED_FALLBACK,
2620 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2621 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2622 .cra_init = safexcel_sha3_cra_init,
2623 .cra_exit = safexcel_sha3_cra_exit,
2624 .cra_module = THIS_MODULE,
2625 },
2626 },
2627 },
2628};
2629
2630static int safexcel_sha3_384_init(struct ahash_request *areq)
2631{
2632 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2633 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2634 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2635
2636 memset(req, 0, sizeof(*req));
2637
2638 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2639 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2640 req->state_sz = SHA3_384_DIGEST_SIZE;
6c1c09b3 2641 req->digest_sz = SHA3_384_DIGEST_SIZE;
aaf5a383
PL
2642 req->block_sz = SHA3_384_BLOCK_SIZE;
2643 ctx->do_fallback = false;
2644 ctx->fb_init_done = false;
2645 return 0;
2646}
2647
2648static int safexcel_sha3_384_digest(struct ahash_request *req)
2649{
2650 if (req->nbytes)
2651 return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2652
2653 /* HW cannot do zero length hash, use fallback instead */
2654 return safexcel_sha3_digest_fallback(req);
2655}
2656
2657struct safexcel_alg_template safexcel_alg_sha3_384 = {
2658 .type = SAFEXCEL_ALG_TYPE_AHASH,
2659 .algo_mask = SAFEXCEL_ALG_SHA3,
2660 .alg.ahash = {
2661 .init = safexcel_sha3_384_init,
2662 .update = safexcel_sha3_update,
2663 .final = safexcel_sha3_final,
2664 .finup = safexcel_sha3_finup,
2665 .digest = safexcel_sha3_384_digest,
2666 .export = safexcel_sha3_export,
2667 .import = safexcel_sha3_import,
2668 .halg = {
2669 .digestsize = SHA3_384_DIGEST_SIZE,
2670 .statesize = sizeof(struct safexcel_ahash_export_state),
2671 .base = {
2672 .cra_name = "sha3-384",
2673 .cra_driver_name = "safexcel-sha3-384",
2674 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2675 .cra_flags = CRYPTO_ALG_ASYNC |
2676 CRYPTO_ALG_KERN_DRIVER_ONLY |
2677 CRYPTO_ALG_NEED_FALLBACK,
2678 .cra_blocksize = SHA3_384_BLOCK_SIZE,
2679 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2680 .cra_init = safexcel_sha3_cra_init,
2681 .cra_exit = safexcel_sha3_cra_exit,
2682 .cra_module = THIS_MODULE,
2683 },
2684 },
2685 },
2686};
2687
2688static int safexcel_sha3_512_init(struct ahash_request *areq)
2689{
2690 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2691 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2692 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2693
2694 memset(req, 0, sizeof(*req));
2695
2696 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2697 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2698 req->state_sz = SHA3_512_DIGEST_SIZE;
6c1c09b3 2699 req->digest_sz = SHA3_512_DIGEST_SIZE;
aaf5a383
PL
2700 req->block_sz = SHA3_512_BLOCK_SIZE;
2701 ctx->do_fallback = false;
2702 ctx->fb_init_done = false;
2703 return 0;
2704}
2705
2706static int safexcel_sha3_512_digest(struct ahash_request *req)
2707{
2708 if (req->nbytes)
2709 return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2710
2711 /* HW cannot do zero length hash, use fallback instead */
2712 return safexcel_sha3_digest_fallback(req);
2713}
2714
2715struct safexcel_alg_template safexcel_alg_sha3_512 = {
2716 .type = SAFEXCEL_ALG_TYPE_AHASH,
2717 .algo_mask = SAFEXCEL_ALG_SHA3,
2718 .alg.ahash = {
2719 .init = safexcel_sha3_512_init,
2720 .update = safexcel_sha3_update,
2721 .final = safexcel_sha3_final,
2722 .finup = safexcel_sha3_finup,
2723 .digest = safexcel_sha3_512_digest,
2724 .export = safexcel_sha3_export,
2725 .import = safexcel_sha3_import,
2726 .halg = {
2727 .digestsize = SHA3_512_DIGEST_SIZE,
2728 .statesize = sizeof(struct safexcel_ahash_export_state),
2729 .base = {
2730 .cra_name = "sha3-512",
2731 .cra_driver_name = "safexcel-sha3-512",
2732 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2733 .cra_flags = CRYPTO_ALG_ASYNC |
2734 CRYPTO_ALG_KERN_DRIVER_ONLY |
2735 CRYPTO_ALG_NEED_FALLBACK,
2736 .cra_blocksize = SHA3_512_BLOCK_SIZE,
2737 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2738 .cra_init = safexcel_sha3_cra_init,
2739 .cra_exit = safexcel_sha3_cra_exit,
2740 .cra_module = THIS_MODULE,
2741 },
2742 },
2743 },
2744};
6c1c09b3
PL
2745
2746static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2747{
2748 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2749 int ret;
2750
2751 ret = safexcel_sha3_cra_init(tfm);
2752 if (ret)
2753 return ret;
2754
2755 /* Allocate precalc basic digest implementation */
2756 ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2757 if (IS_ERR(ctx->shpre))
2758 return PTR_ERR(ctx->shpre);
2759
2760 ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2761 crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2762 if (!ctx->shdesc) {
2763 crypto_free_shash(ctx->shpre);
2764 return -ENOMEM;
2765 }
2766 ctx->shdesc->tfm = ctx->shpre;
2767 return 0;
2768}
2769
2770static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2771{
2772 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2773
2774 crypto_free_ahash(ctx->fback);
2775 crypto_free_shash(ctx->shpre);
2776 kfree(ctx->shdesc);
2777 safexcel_ahash_cra_exit(tfm);
2778}
2779
2780static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2781 unsigned int keylen)
2782{
2783 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2784 int ret = 0;
2785
2786 if (keylen > crypto_ahash_blocksize(tfm)) {
2787 /*
2788 * If the key is larger than the blocksize, then hash it
2789 * first using our fallback cipher
2790 */
2791 ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2792 (u8 *)ctx->ipad);
2793 keylen = crypto_shash_digestsize(ctx->shpre);
2794
2795 /*
2796 * If the digest is larger than half the blocksize, we need to
2797 * move the rest to opad due to the way our HMAC infra works.
2798 */
2799 if (keylen > crypto_ahash_blocksize(tfm) / 2)
2800 /* Buffers overlap, need to use memmove iso memcpy! */
2801 memmove(ctx->opad,
2802 (u8 *)ctx->ipad +
2803 crypto_ahash_blocksize(tfm) / 2,
2804 keylen - crypto_ahash_blocksize(tfm) / 2);
2805 } else {
2806 /*
2807 * Copy the key to our ipad & opad buffers
2808 * Note that ipad and opad each contain one half of the key,
2809 * to match the existing HMAC driver infrastructure.
2810 */
2811 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2812 memcpy(ctx->ipad, key, keylen);
2813 } else {
2814 memcpy(ctx->ipad, key,
2815 crypto_ahash_blocksize(tfm) / 2);
2816 memcpy(ctx->opad,
2817 key + crypto_ahash_blocksize(tfm) / 2,
2818 keylen - crypto_ahash_blocksize(tfm) / 2);
2819 }
2820 }
2821
2822 /* Pad key with zeroes */
2823 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2824 memset((u8 *)ctx->ipad + keylen, 0,
2825 crypto_ahash_blocksize(tfm) / 2 - keylen);
2826 memset(ctx->opad, 0, crypto_ahash_blocksize(tfm) / 2);
2827 } else {
2828 memset((u8 *)ctx->opad + keylen -
2829 crypto_ahash_blocksize(tfm) / 2, 0,
2830 crypto_ahash_blocksize(tfm) - keylen);
2831 }
2832
2833 /* If doing fallback, still need to set the new key! */
2834 ctx->fb_do_setkey = true;
2835 return ret;
2836}
2837
2838static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2839{
2840 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2841 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2842 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2843
2844 memset(req, 0, sizeof(*req));
2845
2846 /* Copy (half of) the key */
2847 memcpy(req->state, ctx->ipad, SHA3_224_BLOCK_SIZE / 2);
2848 /* Start of HMAC should have len == processed == blocksize */
2849 req->len = SHA3_224_BLOCK_SIZE;
2850 req->processed = SHA3_224_BLOCK_SIZE;
2851 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2852 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2853 req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2854 req->digest_sz = SHA3_224_DIGEST_SIZE;
2855 req->block_sz = SHA3_224_BLOCK_SIZE;
2856 req->hmac = true;
2857 ctx->do_fallback = false;
2858 ctx->fb_init_done = false;
2859 return 0;
2860}
2861
2862static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2863{
2864 if (req->nbytes)
2865 return safexcel_hmac_sha3_224_init(req) ?:
2866 safexcel_ahash_finup(req);
2867
2868 /* HW cannot do zero length HMAC, use fallback instead */
2869 return safexcel_sha3_digest_fallback(req);
2870}
2871
2872static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2873{
2874 return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2875}
2876
2877struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2878 .type = SAFEXCEL_ALG_TYPE_AHASH,
2879 .algo_mask = SAFEXCEL_ALG_SHA3,
2880 .alg.ahash = {
2881 .init = safexcel_hmac_sha3_224_init,
2882 .update = safexcel_sha3_update,
2883 .final = safexcel_sha3_final,
2884 .finup = safexcel_sha3_finup,
2885 .digest = safexcel_hmac_sha3_224_digest,
2886 .setkey = safexcel_hmac_sha3_setkey,
2887 .export = safexcel_sha3_export,
2888 .import = safexcel_sha3_import,
2889 .halg = {
2890 .digestsize = SHA3_224_DIGEST_SIZE,
2891 .statesize = sizeof(struct safexcel_ahash_export_state),
2892 .base = {
2893 .cra_name = "hmac(sha3-224)",
2894 .cra_driver_name = "safexcel-hmac-sha3-224",
2895 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2896 .cra_flags = CRYPTO_ALG_ASYNC |
2897 CRYPTO_ALG_KERN_DRIVER_ONLY |
2898 CRYPTO_ALG_NEED_FALLBACK,
2899 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2900 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2901 .cra_init = safexcel_hmac_sha3_224_cra_init,
2902 .cra_exit = safexcel_hmac_sha3_cra_exit,
2903 .cra_module = THIS_MODULE,
2904 },
2905 },
2906 },
2907};
2908
2909static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2910{
2911 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2912 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2913 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2914
2915 memset(req, 0, sizeof(*req));
2916
2917 /* Copy (half of) the key */
2918 memcpy(req->state, ctx->ipad, SHA3_256_BLOCK_SIZE / 2);
2919 /* Start of HMAC should have len == processed == blocksize */
2920 req->len = SHA3_256_BLOCK_SIZE;
2921 req->processed = SHA3_256_BLOCK_SIZE;
2922 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2923 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2924 req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2925 req->digest_sz = SHA3_256_DIGEST_SIZE;
2926 req->block_sz = SHA3_256_BLOCK_SIZE;
2927 req->hmac = true;
2928 ctx->do_fallback = false;
2929 ctx->fb_init_done = false;
2930 return 0;
2931}
2932
2933static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2934{
2935 if (req->nbytes)
2936 return safexcel_hmac_sha3_256_init(req) ?:
2937 safexcel_ahash_finup(req);
2938
2939 /* HW cannot do zero length HMAC, use fallback instead */
2940 return safexcel_sha3_digest_fallback(req);
2941}
2942
2943static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2944{
2945 return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2946}
2947
2948struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2949 .type = SAFEXCEL_ALG_TYPE_AHASH,
2950 .algo_mask = SAFEXCEL_ALG_SHA3,
2951 .alg.ahash = {
2952 .init = safexcel_hmac_sha3_256_init,
2953 .update = safexcel_sha3_update,
2954 .final = safexcel_sha3_final,
2955 .finup = safexcel_sha3_finup,
2956 .digest = safexcel_hmac_sha3_256_digest,
2957 .setkey = safexcel_hmac_sha3_setkey,
2958 .export = safexcel_sha3_export,
2959 .import = safexcel_sha3_import,
2960 .halg = {
2961 .digestsize = SHA3_256_DIGEST_SIZE,
2962 .statesize = sizeof(struct safexcel_ahash_export_state),
2963 .base = {
2964 .cra_name = "hmac(sha3-256)",
2965 .cra_driver_name = "safexcel-hmac-sha3-256",
2966 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2967 .cra_flags = CRYPTO_ALG_ASYNC |
2968 CRYPTO_ALG_KERN_DRIVER_ONLY |
2969 CRYPTO_ALG_NEED_FALLBACK,
2970 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2971 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2972 .cra_init = safexcel_hmac_sha3_256_cra_init,
2973 .cra_exit = safexcel_hmac_sha3_cra_exit,
2974 .cra_module = THIS_MODULE,
2975 },
2976 },
2977 },
2978};
2979
2980static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2981{
2982 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2983 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2984 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2985
2986 memset(req, 0, sizeof(*req));
2987
2988 /* Copy (half of) the key */
2989 memcpy(req->state, ctx->ipad, SHA3_384_BLOCK_SIZE / 2);
2990 /* Start of HMAC should have len == processed == blocksize */
2991 req->len = SHA3_384_BLOCK_SIZE;
2992 req->processed = SHA3_384_BLOCK_SIZE;
2993 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2994 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2995 req->state_sz = SHA3_384_BLOCK_SIZE / 2;
2996 req->digest_sz = SHA3_384_DIGEST_SIZE;
2997 req->block_sz = SHA3_384_BLOCK_SIZE;
2998 req->hmac = true;
2999 ctx->do_fallback = false;
3000 ctx->fb_init_done = false;
3001 return 0;
3002}
3003
3004static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3005{
3006 if (req->nbytes)
3007 return safexcel_hmac_sha3_384_init(req) ?:
3008 safexcel_ahash_finup(req);
3009
3010 /* HW cannot do zero length HMAC, use fallback instead */
3011 return safexcel_sha3_digest_fallback(req);
3012}
3013
3014static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3015{
3016 return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3017}
3018
3019struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3020 .type = SAFEXCEL_ALG_TYPE_AHASH,
3021 .algo_mask = SAFEXCEL_ALG_SHA3,
3022 .alg.ahash = {
3023 .init = safexcel_hmac_sha3_384_init,
3024 .update = safexcel_sha3_update,
3025 .final = safexcel_sha3_final,
3026 .finup = safexcel_sha3_finup,
3027 .digest = safexcel_hmac_sha3_384_digest,
3028 .setkey = safexcel_hmac_sha3_setkey,
3029 .export = safexcel_sha3_export,
3030 .import = safexcel_sha3_import,
3031 .halg = {
3032 .digestsize = SHA3_384_DIGEST_SIZE,
3033 .statesize = sizeof(struct safexcel_ahash_export_state),
3034 .base = {
3035 .cra_name = "hmac(sha3-384)",
3036 .cra_driver_name = "safexcel-hmac-sha3-384",
3037 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3038 .cra_flags = CRYPTO_ALG_ASYNC |
3039 CRYPTO_ALG_KERN_DRIVER_ONLY |
3040 CRYPTO_ALG_NEED_FALLBACK,
3041 .cra_blocksize = SHA3_384_BLOCK_SIZE,
3042 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3043 .cra_init = safexcel_hmac_sha3_384_cra_init,
3044 .cra_exit = safexcel_hmac_sha3_cra_exit,
3045 .cra_module = THIS_MODULE,
3046 },
3047 },
3048 },
3049};
3050
3051static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3052{
3053 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3054 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3055 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3056
3057 memset(req, 0, sizeof(*req));
3058
3059 /* Copy (half of) the key */
3060 memcpy(req->state, ctx->ipad, SHA3_512_BLOCK_SIZE / 2);
3061 /* Start of HMAC should have len == processed == blocksize */
3062 req->len = SHA3_512_BLOCK_SIZE;
3063 req->processed = SHA3_512_BLOCK_SIZE;
3064 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3065 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3066 req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3067 req->digest_sz = SHA3_512_DIGEST_SIZE;
3068 req->block_sz = SHA3_512_BLOCK_SIZE;
3069 req->hmac = true;
3070 ctx->do_fallback = false;
3071 ctx->fb_init_done = false;
3072 return 0;
3073}
3074
3075static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3076{
3077 if (req->nbytes)
3078 return safexcel_hmac_sha3_512_init(req) ?:
3079 safexcel_ahash_finup(req);
3080
3081 /* HW cannot do zero length HMAC, use fallback instead */
3082 return safexcel_sha3_digest_fallback(req);
3083}
3084
3085static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3086{
3087 return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3088}
3089struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3090 .type = SAFEXCEL_ALG_TYPE_AHASH,
3091 .algo_mask = SAFEXCEL_ALG_SHA3,
3092 .alg.ahash = {
3093 .init = safexcel_hmac_sha3_512_init,
3094 .update = safexcel_sha3_update,
3095 .final = safexcel_sha3_final,
3096 .finup = safexcel_sha3_finup,
3097 .digest = safexcel_hmac_sha3_512_digest,
3098 .setkey = safexcel_hmac_sha3_setkey,
3099 .export = safexcel_sha3_export,
3100 .import = safexcel_sha3_import,
3101 .halg = {
3102 .digestsize = SHA3_512_DIGEST_SIZE,
3103 .statesize = sizeof(struct safexcel_ahash_export_state),
3104 .base = {
3105 .cra_name = "hmac(sha3-512)",
3106 .cra_driver_name = "safexcel-hmac-sha3-512",
3107 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3108 .cra_flags = CRYPTO_ALG_ASYNC |
3109 CRYPTO_ALG_KERN_DRIVER_ONLY |
3110 CRYPTO_ALG_NEED_FALLBACK,
3111 .cra_blocksize = SHA3_512_BLOCK_SIZE,
3112 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3113 .cra_init = safexcel_hmac_sha3_512_cra_init,
3114 .cra_exit = safexcel_hmac_sha3_cra_exit,
3115 .cra_module = THIS_MODULE,
3116 },
3117 },
3118 },
3119};