crypto: x86/chacha - Remove SIMD fallback path
authorHerbert Xu <herbert@gondor.apana.org.au>
Thu, 3 Apr 2025 04:14:50 +0000 (12:14 +0800)
committerHerbert Xu <herbert@gondor.apana.org.au>
Mon, 7 Apr 2025 05:22:27 +0000 (13:22 +0800)
Get rid of the fallback path as SIMD is now always usable in softirq
context.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/x86/crypto/chacha_glue.c

index 8bb74a2728798b04fe1a4890ac02a905509d670a..6a3d60cf31925b456036b43abf7ee0c4c78124cd 100644 (file)
@@ -6,9 +6,7 @@
  * Copyright (C) 2015 Martin Willi
  */
 
-#include <crypto/algapi.h>
 #include <crypto/internal/chacha.h>
-#include <crypto/internal/simd.h>
 #include <crypto/internal/skcipher.h>
 #include <linux/kernel.h>
 #include <linux/module.h>
@@ -35,7 +33,6 @@ asmlinkage void chacha_4block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
 asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
                                           unsigned int len, int nrounds);
 
-static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_simd);
 static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx2);
 static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx512vl);
 
@@ -123,23 +120,15 @@ static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
 
 void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
 {
-       if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable()) {
-               hchacha_block_generic(state, stream, nrounds);
-       } else {
-               kernel_fpu_begin();
-               hchacha_block_ssse3(state, stream, nrounds);
-               kernel_fpu_end();
-       }
+       kernel_fpu_begin();
+       hchacha_block_ssse3(state, stream, nrounds);
+       kernel_fpu_end();
 }
 EXPORT_SYMBOL(hchacha_block_arch);
 
 void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
                       int nrounds)
 {
-       if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable() ||
-           bytes <= CHACHA_BLOCK_SIZE)
-               return chacha_crypt_generic(state, dst, src, bytes, nrounds);
-
        do {
                unsigned int todo = min_t(unsigned int, bytes, SZ_4K);
 
@@ -171,18 +160,11 @@ static int chacha_simd_stream_xor(struct skcipher_request *req,
                if (nbytes < walk.total)
                        nbytes = round_down(nbytes, walk.stride);
 
-               if (!static_branch_likely(&chacha_use_simd) ||
-                   !crypto_simd_usable()) {
-                       chacha_crypt_generic(state, walk.dst.virt.addr,
-                                            walk.src.virt.addr, nbytes,
-                                            ctx->nrounds);
-               } else {
-                       kernel_fpu_begin();
-                       chacha_dosimd(state, walk.dst.virt.addr,
-                                     walk.src.virt.addr, nbytes,
-                                     ctx->nrounds);
-                       kernel_fpu_end();
-               }
+               kernel_fpu_begin();
+               chacha_dosimd(state, walk.dst.virt.addr,
+                             walk.src.virt.addr, nbytes,
+                             ctx->nrounds);
+               kernel_fpu_end();
                err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
        }
 
@@ -207,13 +189,9 @@ static int xchacha_simd(struct skcipher_request *req)
 
        chacha_init(state, ctx->key, req->iv);
 
-       if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) {
-               kernel_fpu_begin();
-               hchacha_block_ssse3(state, subctx.key, ctx->nrounds);
-               kernel_fpu_end();
-       } else {
-               hchacha_block_generic(state, subctx.key, ctx->nrounds);
-       }
+       kernel_fpu_begin();
+       hchacha_block_ssse3(state, subctx.key, ctx->nrounds);
+       kernel_fpu_end();
        subctx.nrounds = ctx->nrounds;
 
        memcpy(&real_iv[0], req->iv + 24, 8);
@@ -275,8 +253,6 @@ static int __init chacha_simd_mod_init(void)
        if (!boot_cpu_has(X86_FEATURE_SSSE3))
                return 0;
 
-       static_branch_enable(&chacha_use_simd);
-
        if (boot_cpu_has(X86_FEATURE_AVX) &&
            boot_cpu_has(X86_FEATURE_AVX2) &&
            cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {