1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * AMCC SoC PPC4xx Crypto Driver
5 * Copyright (c) 2008 Applied Micro Circuits Corporation.
6 * All rights reserved. James Hsiao <jhsiao@amcc.com>
8 * This file implements the Linux crypto algorithms.
11 #include <linux/kernel.h>
12 #include <linux/interrupt.h>
13 #include <linux/spinlock_types.h>
14 #include <linux/scatterlist.h>
15 #include <linux/crypto.h>
16 #include <linux/hash.h>
17 #include <crypto/internal/hash.h>
18 #include <linux/dma-mapping.h>
19 #include <crypto/algapi.h>
20 #include <crypto/aead.h>
21 #include <crypto/aes.h>
22 #include <crypto/gcm.h>
23 #include <crypto/sha.h>
24 #include <crypto/ctr.h>
25 #include <crypto/skcipher.h>
26 #include "crypto4xx_reg_def.h"
27 #include "crypto4xx_core.h"
28 #include "crypto4xx_sa.h"
30 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
31 u32 save_iv, u32 ld_h, u32 ld_iv,
32 u32 hdr_proc, u32 h, u32 c, u32 pad_type,
33 u32 op_grp, u32 op, u32 dir)
35 sa->sa_command_0.w = 0;
36 sa->sa_command_0.bf.save_hash_state = save_h;
37 sa->sa_command_0.bf.save_iv = save_iv;
38 sa->sa_command_0.bf.load_hash_state = ld_h;
39 sa->sa_command_0.bf.load_iv = ld_iv;
40 sa->sa_command_0.bf.hdr_proc = hdr_proc;
41 sa->sa_command_0.bf.hash_alg = h;
42 sa->sa_command_0.bf.cipher_alg = c;
43 sa->sa_command_0.bf.pad_type = pad_type & 3;
44 sa->sa_command_0.bf.extend_pad = pad_type >> 2;
45 sa->sa_command_0.bf.op_group = op_grp;
46 sa->sa_command_0.bf.opcode = op;
47 sa->sa_command_0.bf.dir = dir;
50 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
51 u32 hmac_mc, u32 cfb, u32 esn,
52 u32 sn_mask, u32 mute, u32 cp_pad,
53 u32 cp_pay, u32 cp_hdr)
55 sa->sa_command_1.w = 0;
56 sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
57 sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
58 sa->sa_command_1.bf.feedback_mode = cfb,
59 sa->sa_command_1.bf.sa_rev = 1;
60 sa->sa_command_1.bf.hmac_muting = hmac_mc;
61 sa->sa_command_1.bf.extended_seq_num = esn;
62 sa->sa_command_1.bf.seq_num_mask = sn_mask;
63 sa->sa_command_1.bf.mutable_bit_proc = mute;
64 sa->sa_command_1.bf.copy_pad = cp_pad;
65 sa->sa_command_1.bf.copy_payload = cp_pay;
66 sa->sa_command_1.bf.copy_hdr = cp_hdr;
69 static inline int crypto4xx_crypt(struct skcipher_request *req,
70 const unsigned int ivlen, bool decrypt,
73 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
74 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
75 __le32 iv[AES_IV_SIZE];
77 if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
81 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
83 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
84 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
85 ctx->sa_len, 0, NULL);
88 int crypto4xx_encrypt_noiv_block(struct skcipher_request *req)
90 return crypto4xx_crypt(req, 0, false, true);
93 int crypto4xx_encrypt_iv_stream(struct skcipher_request *req)
95 return crypto4xx_crypt(req, AES_IV_SIZE, false, false);
98 int crypto4xx_decrypt_noiv_block(struct skcipher_request *req)
100 return crypto4xx_crypt(req, 0, true, true);
103 int crypto4xx_decrypt_iv_stream(struct skcipher_request *req)
105 return crypto4xx_crypt(req, AES_IV_SIZE, true, false);
108 int crypto4xx_encrypt_iv_block(struct skcipher_request *req)
110 return crypto4xx_crypt(req, AES_IV_SIZE, false, true);
113 int crypto4xx_decrypt_iv_block(struct skcipher_request *req)
115 return crypto4xx_crypt(req, AES_IV_SIZE, true, true);
121 static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
127 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
128 struct dynamic_sa_ctl *sa;
131 if (keylen != AES_KEYSIZE_256 && keylen != AES_KEYSIZE_192 &&
132 keylen != AES_KEYSIZE_128)
136 if (ctx->sa_in || ctx->sa_out)
137 crypto4xx_free_sa(ctx);
139 rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
146 set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
147 SA_NOT_SAVE_IV : SA_SAVE_IV),
148 SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
149 SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
150 SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
151 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
152 SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
155 set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
156 fb, SA_EXTENDED_SN_OFF,
157 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
158 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
160 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
162 sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
163 sa->sa_command_1.bf.key_len = keylen >> 3;
165 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
167 sa->sa_command_0.bf.dir = DIR_OUTBOUND;
169 * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
170 * it's the DIR_(IN|OUT)BOUND that matters
172 sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
177 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
178 const u8 *key, unsigned int keylen)
180 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
181 CRYPTO_FEEDBACK_MODE_NO_FB);
184 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
185 const u8 *key, unsigned int keylen)
187 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
188 CRYPTO_FEEDBACK_MODE_128BIT_CFB);
191 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
192 const u8 *key, unsigned int keylen)
194 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
195 CRYPTO_FEEDBACK_MODE_NO_FB);
198 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
199 const u8 *key, unsigned int keylen)
201 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
202 CRYPTO_FEEDBACK_MODE_64BIT_OFB);
205 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
206 const u8 *key, unsigned int keylen)
208 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
211 rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
212 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
216 ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
217 CTR_RFC3686_NONCE_SIZE]);
222 int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
224 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
225 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
226 __le32 iv[AES_IV_SIZE / 4] = {
228 cpu_to_le32p((u32 *) req->iv),
229 cpu_to_le32p((u32 *) (req->iv + 4)),
232 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
233 req->cryptlen, iv, AES_IV_SIZE,
234 ctx->sa_out, ctx->sa_len, 0, NULL);
237 int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
239 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
240 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
241 __le32 iv[AES_IV_SIZE / 4] = {
243 cpu_to_le32p((u32 *) req->iv),
244 cpu_to_le32p((u32 *) (req->iv + 4)),
247 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
248 req->cryptlen, iv, AES_IV_SIZE,
249 ctx->sa_out, ctx->sa_len, 0, NULL);
253 crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
255 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
256 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
257 size_t iv_len = crypto_skcipher_ivsize(cipher);
258 unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
259 unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
263 * The hardware uses only the last 32-bits as the counter while the
264 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
265 * the whole IV is a counter. So fallback if the counter is going to
268 if (counter + nblks < counter) {
269 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
272 skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
273 skcipher_request_set_callback(subreq, req->base.flags,
275 skcipher_request_set_crypt(subreq, req->src, req->dst,
276 req->cryptlen, req->iv);
277 ret = encrypt ? crypto_skcipher_encrypt(subreq)
278 : crypto_skcipher_decrypt(subreq);
279 skcipher_request_zero(subreq);
283 return encrypt ? crypto4xx_encrypt_iv_stream(req)
284 : crypto4xx_decrypt_iv_stream(req);
287 static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
288 struct crypto_skcipher *cipher,
294 crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
295 CRYPTO_TFM_REQ_MASK);
296 crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
297 crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
298 rc = crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
299 crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
300 crypto_skcipher_set_flags(cipher,
301 crypto_sync_skcipher_get_flags(ctx->sw_cipher.cipher) &
302 CRYPTO_TFM_RES_MASK);
307 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
308 const u8 *key, unsigned int keylen)
310 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
313 rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
317 return crypto4xx_setkey_aes(cipher, key, keylen,
318 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
321 int crypto4xx_encrypt_ctr(struct skcipher_request *req)
323 return crypto4xx_ctr_crypt(req, true);
326 int crypto4xx_decrypt_ctr(struct skcipher_request *req)
328 return crypto4xx_ctr_crypt(req, false);
331 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
333 bool is_ccm, bool decrypt)
335 struct crypto_aead *aead = crypto_aead_reqtfm(req);
337 /* authsize has to be a multiple of 4 */
338 if (aead->authsize & 3)
342 * hardware does not handle cases where plaintext
343 * is less than a block.
345 if (len < AES_BLOCK_SIZE)
348 /* assoc len needs to be a multiple of 4 and <= 1020 */
349 if (req->assoclen & 0x3 || req->assoclen > 1020)
352 /* CCM supports only counter field length of 2 and 4 bytes */
353 if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
359 static int crypto4xx_aead_fallback(struct aead_request *req,
360 struct crypto4xx_ctx *ctx, bool do_decrypt)
362 struct aead_request *subreq = aead_request_ctx(req);
364 aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
365 aead_request_set_callback(subreq, req->base.flags,
366 req->base.complete, req->base.data);
367 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
369 aead_request_set_ad(subreq, req->assoclen);
370 return do_decrypt ? crypto_aead_decrypt(subreq) :
371 crypto_aead_encrypt(subreq);
374 static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
375 struct crypto_aead *cipher,
381 crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
382 crypto_aead_set_flags(ctx->sw_cipher.aead,
383 crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
384 rc = crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
385 crypto_aead_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
386 crypto_aead_set_flags(cipher,
387 crypto_aead_get_flags(ctx->sw_cipher.aead) &
388 CRYPTO_TFM_RES_MASK);
397 int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
400 struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
401 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
402 struct dynamic_sa_ctl *sa;
405 rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
409 if (ctx->sa_in || ctx->sa_out)
410 crypto4xx_free_sa(ctx);
412 rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
417 sa = (struct dynamic_sa_ctl *) ctx->sa_in;
418 sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
420 set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
421 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
422 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
424 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
425 SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
427 set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
428 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
429 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
430 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
433 sa->sa_command_1.bf.key_len = keylen >> 3;
435 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
437 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
438 sa = (struct dynamic_sa_ctl *) ctx->sa_out;
440 set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
441 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
442 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
444 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
445 SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
447 set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
448 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
449 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
450 SA_COPY_PAD, SA_COPY_PAYLOAD,
453 sa->sa_command_1.bf.key_len = keylen >> 3;
457 static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
459 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
460 struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
461 struct crypto_aead *aead = crypto_aead_reqtfm(req);
463 u32 tmp_sa[SA_AES128_CCM_LEN + 4];
464 struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
465 unsigned int len = req->cryptlen;
468 len -= crypto_aead_authsize(aead);
470 if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
471 return crypto4xx_aead_fallback(req, ctx, decrypt);
473 memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
474 sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
476 if (req->iv[0] == 1) {
477 /* CRYPTO_MODE_AES_ICM */
478 sa->sa_command_1.bf.crypto_mode9_8 = 1;
481 iv[3] = cpu_to_le32(0);
482 crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
484 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
486 sa, ctx->sa_len, req->assoclen, rctx->dst);
489 int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
491 return crypto4xx_crypt_aes_ccm(req, false);
494 int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
496 return crypto4xx_crypt_aes_ccm(req, true);
499 int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
500 unsigned int authsize)
502 struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
503 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
505 return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
512 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
524 static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
527 struct crypto_aes_ctx ctx;
528 uint8_t src[16] = { 0 };
531 rc = aes_expandkey(&ctx, key, keylen);
533 pr_err("aes_expandkey() failed: %d\n", rc);
537 aes_encrypt(&ctx, src, src);
538 crypto4xx_memcpy_to_le32(hash_start, src, 16);
539 memzero_explicit(&ctx, sizeof(ctx));
543 int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
544 const u8 *key, unsigned int keylen)
546 struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
547 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
548 struct dynamic_sa_ctl *sa;
551 if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0)
554 rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
558 if (ctx->sa_in || ctx->sa_out)
559 crypto4xx_free_sa(ctx);
561 rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
565 sa = (struct dynamic_sa_ctl *) ctx->sa_in;
567 sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
568 set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
569 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
570 SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
571 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
572 SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
574 set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
575 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
576 SA_SEQ_MASK_ON, SA_MC_DISABLE,
577 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
580 sa->sa_command_1.bf.key_len = keylen >> 3;
582 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
585 rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
588 pr_err("GCM hash key setting failed = %d\n", rc);
592 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
593 sa = (struct dynamic_sa_ctl *) ctx->sa_out;
594 sa->sa_command_0.bf.dir = DIR_OUTBOUND;
595 sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
599 crypto4xx_free_sa(ctx);
603 static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
606 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
607 struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
609 unsigned int len = req->cryptlen;
612 len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
614 if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
615 return crypto4xx_aead_fallback(req, ctx, decrypt);
617 crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
618 iv[3] = cpu_to_le32(1);
620 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
622 decrypt ? ctx->sa_in : ctx->sa_out,
623 ctx->sa_len, req->assoclen, rctx->dst);
626 int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
628 return crypto4xx_crypt_aes_gcm(req, false);
631 int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
633 return crypto4xx_crypt_aes_gcm(req, true);
637 * HASH SHA1 Functions
639 static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
644 struct crypto_alg *alg = tfm->__crt_alg;
645 struct crypto4xx_alg *my_alg;
646 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
647 struct dynamic_sa_hash160 *sa;
650 my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
652 ctx->dev = my_alg->dev;
655 if (ctx->sa_in || ctx->sa_out)
656 crypto4xx_free_sa(ctx);
658 rc = crypto4xx_alloc_sa(ctx, sa_len);
662 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
663 sizeof(struct crypto4xx_ctx));
664 sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
665 set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
666 SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
667 SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
668 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
669 SA_OPCODE_HASH, DIR_INBOUND);
670 set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
671 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
672 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
673 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
675 /* Need to zero hash digest in SA */
676 memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
677 memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
682 int crypto4xx_hash_init(struct ahash_request *req)
684 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
686 struct dynamic_sa_ctl *sa;
689 ds = crypto_ahash_digestsize(
690 __crypto_ahash_cast(req->base.tfm));
691 sa->sa_command_0.bf.digest_len = ds >> 2;
692 sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
697 int crypto4xx_hash_update(struct ahash_request *req)
699 struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
700 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
701 struct scatterlist dst;
702 unsigned int ds = crypto_ahash_digestsize(ahash);
704 sg_init_one(&dst, req->result, ds);
706 return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
707 req->nbytes, NULL, 0, ctx->sa_in,
708 ctx->sa_len, 0, NULL);
711 int crypto4xx_hash_final(struct ahash_request *req)
716 int crypto4xx_hash_digest(struct ahash_request *req)
718 struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
719 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
720 struct scatterlist dst;
721 unsigned int ds = crypto_ahash_digestsize(ahash);
723 sg_init_one(&dst, req->result, ds);
725 return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
726 req->nbytes, NULL, 0, ctx->sa_in,
727 ctx->sa_len, 0, NULL);
733 int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
735 return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,