From 9b4400215e0e0b3edad010398069fcc51659a8cc Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Thu, 3 Apr 2025 12:14:50 +0800 Subject: [PATCH] crypto: x86/chacha - Remove SIMD fallback path Get rid of the fallback path as SIMD is now always usable in softirq context. Signed-off-by: Herbert Xu --- arch/x86/crypto/chacha_glue.c | 46 +++++++++-------------------------- 1 file changed, 11 insertions(+), 35 deletions(-) diff --git a/arch/x86/crypto/chacha_glue.c b/arch/x86/crypto/chacha_glue.c index 8bb74a272879..6a3d60cf3192 100644 --- a/arch/x86/crypto/chacha_glue.c +++ b/arch/x86/crypto/chacha_glue.c @@ -6,9 +6,7 @@ * Copyright (C) 2015 Martin Willi */ -#include #include -#include #include #include #include @@ -35,7 +33,6 @@ asmlinkage void chacha_4block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src, asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src, unsigned int len, int nrounds); -static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_simd); static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx2); static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx512vl); @@ -123,23 +120,15 @@ static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src, void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) { - if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable()) { - hchacha_block_generic(state, stream, nrounds); - } else { - kernel_fpu_begin(); - hchacha_block_ssse3(state, stream, nrounds); - kernel_fpu_end(); - } + kernel_fpu_begin(); + hchacha_block_ssse3(state, stream, nrounds); + kernel_fpu_end(); } EXPORT_SYMBOL(hchacha_block_arch); void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, int nrounds) { - if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable() || - bytes <= CHACHA_BLOCK_SIZE) - return chacha_crypt_generic(state, dst, src, bytes, nrounds); - do { unsigned int todo = min_t(unsigned int, bytes, SZ_4K); @@ -171,18 +160,11 @@ static int chacha_simd_stream_xor(struct skcipher_request *req, if (nbytes < walk.total) nbytes = round_down(nbytes, walk.stride); - if (!static_branch_likely(&chacha_use_simd) || - !crypto_simd_usable()) { - chacha_crypt_generic(state, walk.dst.virt.addr, - walk.src.virt.addr, nbytes, - ctx->nrounds); - } else { - kernel_fpu_begin(); - chacha_dosimd(state, walk.dst.virt.addr, - walk.src.virt.addr, nbytes, - ctx->nrounds); - kernel_fpu_end(); - } + kernel_fpu_begin(); + chacha_dosimd(state, walk.dst.virt.addr, + walk.src.virt.addr, nbytes, + ctx->nrounds); + kernel_fpu_end(); err = skcipher_walk_done(&walk, walk.nbytes - nbytes); } @@ -207,13 +189,9 @@ static int xchacha_simd(struct skcipher_request *req) chacha_init(state, ctx->key, req->iv); - if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) { - kernel_fpu_begin(); - hchacha_block_ssse3(state, subctx.key, ctx->nrounds); - kernel_fpu_end(); - } else { - hchacha_block_generic(state, subctx.key, ctx->nrounds); - } + kernel_fpu_begin(); + hchacha_block_ssse3(state, subctx.key, ctx->nrounds); + kernel_fpu_end(); subctx.nrounds = ctx->nrounds; memcpy(&real_iv[0], req->iv + 24, 8); @@ -275,8 +253,6 @@ static int __init chacha_simd_mod_init(void) if (!boot_cpu_has(X86_FEATURE_SSSE3)) return 0; - static_branch_enable(&chacha_use_simd); - if (boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_AVX2) && cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { -- 2.25.1