crypto: riscv/sha256 - Use API partial block handling
authorHerbert Xu <herbert@gondor.apana.org.au>
Fri, 18 Apr 2025 02:59:45 +0000 (10:59 +0800)
committerHerbert Xu <herbert@gondor.apana.org.au>
Wed, 23 Apr 2025 07:52:45 +0000 (15:52 +0800)
Use the Crypto API partial block handling.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/riscv/crypto/sha256-riscv64-glue.c
include/crypto/sha256_base.h
lib/crypto/sha256.c

index 71e051e40a64f4cbc8702f724b20e202c045a866..c998300ab84354a4c97d6e52ce2c922a7b962fd7 100644 (file)
@@ -14,7 +14,7 @@
 #include <crypto/internal/hash.h>
 #include <crypto/internal/simd.h>
 #include <crypto/sha256_base.h>
-#include <linux/linkage.h>
+#include <linux/kernel.h>
 #include <linux/module.h>
 
 /*
  * It is assumed to be the first field.
  */
 asmlinkage void sha256_transform_zvknha_or_zvknhb_zvkb(
-       struct sha256_state *state, const u8 *data, int num_blocks);
+       struct crypto_sha256_state *state, const u8 *data, int num_blocks);
 
-static int riscv64_sha256_update(struct shash_desc *desc, const u8 *data,
-                                unsigned int len)
+static void sha256_block(struct crypto_sha256_state *state, const u8 *data,
+                        int num_blocks)
 {
        /*
-        * Ensure struct sha256_state begins directly with the SHA-256
+        * Ensure struct crypto_sha256_state begins directly with the SHA-256
         * 256-bit internal state, as this is what the asm function expects.
         */
-       BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
+       BUILD_BUG_ON(offsetof(struct crypto_sha256_state, state) != 0);
 
        if (crypto_simd_usable()) {
                kernel_vector_begin();
-               sha256_base_do_update(desc, data, len,
-                                     sha256_transform_zvknha_or_zvknhb_zvkb);
+               sha256_transform_zvknha_or_zvknhb_zvkb(state, data, num_blocks);
                kernel_vector_end();
-       } else {
-               crypto_sha256_update(desc, data, len);
-       }
-       return 0;
+       } else
+               sha256_transform_blocks(state, data, num_blocks);
 }
 
-static int riscv64_sha256_finup(struct shash_desc *desc, const u8 *data,
-                               unsigned int len, u8 *out)
+static int riscv64_sha256_update(struct shash_desc *desc, const u8 *data,
+                                unsigned int len)
 {
-       if (crypto_simd_usable()) {
-               kernel_vector_begin();
-               if (len)
-                       sha256_base_do_update(
-                               desc, data, len,
-                               sha256_transform_zvknha_or_zvknhb_zvkb);
-               sha256_base_do_finalize(
-                       desc, sha256_transform_zvknha_or_zvknhb_zvkb);
-               kernel_vector_end();
-
-               return sha256_base_finish(desc, out);
-       }
-
-       return crypto_sha256_finup(desc, data, len, out);
+       return sha256_base_do_update_blocks(desc, data, len, sha256_block);
 }
 
-static int riscv64_sha256_final(struct shash_desc *desc, u8 *out)
+static int riscv64_sha256_finup(struct shash_desc *desc, const u8 *data,
+                               unsigned int len, u8 *out)
 {
-       return riscv64_sha256_finup(desc, NULL, 0, out);
+       sha256_base_do_finup(desc, data, len, sha256_block);
+       return sha256_base_finish(desc, out);
 }
 
 static int riscv64_sha256_digest(struct shash_desc *desc, const u8 *data,
@@ -79,13 +65,14 @@ static struct shash_alg riscv64_sha256_algs[] = {
        {
                .init = sha256_base_init,
                .update = riscv64_sha256_update,
-               .final = riscv64_sha256_final,
                .finup = riscv64_sha256_finup,
                .digest = riscv64_sha256_digest,
-               .descsize = sizeof(struct sha256_state),
+               .descsize = sizeof(struct crypto_sha256_state),
                .digestsize = SHA256_DIGEST_SIZE,
                .base = {
                        .cra_blocksize = SHA256_BLOCK_SIZE,
+                       .cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
+                                    CRYPTO_AHASH_ALG_FINUP_MAX,
                        .cra_priority = 300,
                        .cra_name = "sha256",
                        .cra_driver_name = "sha256-riscv64-zvknha_or_zvknhb-zvkb",
@@ -94,12 +81,13 @@ static struct shash_alg riscv64_sha256_algs[] = {
        }, {
                .init = sha224_base_init,
                .update = riscv64_sha256_update,
-               .final = riscv64_sha256_final,
                .finup = riscv64_sha256_finup,
-               .descsize = sizeof(struct sha256_state),
+               .descsize = sizeof(struct crypto_sha256_state),
                .digestsize = SHA224_DIGEST_SIZE,
                .base = {
                        .cra_blocksize = SHA224_BLOCK_SIZE,
+                       .cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
+                                    CRYPTO_AHASH_ALG_FINUP_MAX,
                        .cra_priority = 300,
                        .cra_name = "sha224",
                        .cra_driver_name = "sha224-riscv64-zvknha_or_zvknhb-zvkb",
index 727a1b63e1e976c0ada22a4b90df398da1541a66..b9d3583b625639c02f7cc928a23e19c16883a01f 100644 (file)
@@ -198,4 +198,7 @@ static inline int sha256_base_finish(struct shash_desc *desc, u8 *out)
        return __sha256_base_finish(sctx->state, out, digest_size);
 }
 
+void sha256_transform_blocks(struct crypto_sha256_state *sst,
+                            const u8 *input, int blocks);
+
 #endif /* _CRYPTO_SHA256_BASE_H */
index 04c1f2557e6c2752b9998e2eee9a2df471521428..39ead0222937679c8747ce5919bf25129968cb1d 100644 (file)
@@ -118,28 +118,36 @@ static void sha256_transform(u32 *state, const u8 *input, u32 *W)
        state[4] += e; state[5] += f; state[6] += g; state[7] += h;
 }
 
-static void sha256_transform_blocks(struct sha256_state *sctx,
-                                   const u8 *input, int blocks)
+void sha256_transform_blocks(struct crypto_sha256_state *sst,
+                            const u8 *input, int blocks)
 {
        u32 W[64];
 
        do {
-               sha256_transform(sctx->state, input, W);
+               sha256_transform(sst->state, input, W);
                input += SHA256_BLOCK_SIZE;
        } while (--blocks);
 
        memzero_explicit(W, sizeof(W));
 }
+EXPORT_SYMBOL_GPL(sha256_transform_blocks);
+
+static void lib_sha256_transform_blocks(struct sha256_state *sctx,
+                                       const u8 *input, int blocks)
+{
+       sha256_transform_blocks((struct crypto_sha256_state *)sctx, input,
+                               blocks);
+}
 
 void sha256_update(struct sha256_state *sctx, const u8 *data, unsigned int len)
 {
-       lib_sha256_base_do_update(sctx, data, len, sha256_transform_blocks);
+       lib_sha256_base_do_update(sctx, data, len, lib_sha256_transform_blocks);
 }
 EXPORT_SYMBOL(sha256_update);
 
 static void __sha256_final(struct sha256_state *sctx, u8 *out, int digest_size)
 {
-       lib_sha256_base_do_finalize(sctx, sha256_transform_blocks);
+       lib_sha256_base_do_finalize(sctx, lib_sha256_transform_blocks);
        lib_sha256_base_finish(sctx, out, digest_size);
 }