lib/crypto: sha256: Remove sha256_is_arch_optimized()

Remove sha256_is_arch_optimized(), since it is no longer used.

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20250630160645.3198-12-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
This commit is contained in:
Eric Biggers 2025-06-30 09:06:42 -07:00
parent 07f090959b
commit 9f9846a72e
9 changed files with 0 additions and 58 deletions

View file

@ -61,12 +61,6 @@ void sha256_blocks_arch(struct sha256_block_state *state,
}
EXPORT_SYMBOL_GPL(sha256_blocks_arch);
bool sha256_is_arch_optimized(void)
{
return octeon_has_crypto();
}
EXPORT_SYMBOL_GPL(sha256_is_arch_optimized);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("SHA-256 Secure Hash Algorithm (OCTEON)");
MODULE_AUTHOR("Aaro Koskinen <aaro.koskinen@iki.fi>");

View file

@ -9,14 +9,6 @@
#include <linux/types.h>
#include <linux/unaligned.h>
#if IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_SHA256)
bool sha256_is_arch_optimized(void);
#else
static inline bool sha256_is_arch_optimized(void)
{
return false;
}
#endif
void sha256_blocks_generic(struct sha256_block_state *state,
const u8 *data, size_t nblocks);
void sha256_blocks_arch(struct sha256_block_state *state,

View file

@ -37,13 +37,6 @@ void sha256_blocks_arch(struct sha256_block_state *state,
}
EXPORT_SYMBOL_GPL(sha256_blocks_arch);
bool sha256_is_arch_optimized(void)
{
/* We always can use at least the ARM scalar implementation. */
return true;
}
EXPORT_SYMBOL_GPL(sha256_is_arch_optimized);
static int __init sha256_arm_mod_init(void)
{
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) {

View file

@ -47,13 +47,6 @@ void sha256_blocks_arch(struct sha256_block_state *state,
}
EXPORT_SYMBOL_GPL(sha256_blocks_arch);
bool sha256_is_arch_optimized(void)
{
/* We always can use at least the ARM64 scalar implementation. */
return true;
}
EXPORT_SYMBOL_GPL(sha256_is_arch_optimized);
static int __init sha256_arm64_mod_init(void)
{
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&

View file

@ -61,11 +61,5 @@ void sha256_blocks_arch(struct sha256_block_state *state,
}
EXPORT_SYMBOL_GPL(sha256_blocks_arch);
bool sha256_is_arch_optimized(void)
{
return true;
}
EXPORT_SYMBOL_GPL(sha256_is_arch_optimized);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("SHA-256 Secure Hash Algorithm, SPE optimized");

View file

@ -34,12 +34,6 @@ void sha256_blocks_arch(struct sha256_block_state *state,
}
EXPORT_SYMBOL_GPL(sha256_blocks_arch);
bool sha256_is_arch_optimized(void)
{
return static_key_enabled(&have_extensions);
}
EXPORT_SYMBOL_GPL(sha256_is_arch_optimized);
static int __init riscv64_sha256_mod_init(void)
{
/* Both zvknha and zvknhb provide the SHA-256 instructions. */

View file

@ -23,12 +23,6 @@ void sha256_blocks_arch(struct sha256_block_state *state,
}
EXPORT_SYMBOL_GPL(sha256_blocks_arch);
bool sha256_is_arch_optimized(void)
{
return static_key_enabled(&have_cpacf_sha256);
}
EXPORT_SYMBOL_GPL(sha256_is_arch_optimized);
static int __init sha256_s390_mod_init(void)
{
if (cpu_have_feature(S390_CPU_FEATURE_MSA) &&

View file

@ -32,12 +32,6 @@ void sha256_blocks_arch(struct sha256_block_state *state,
}
EXPORT_SYMBOL_GPL(sha256_blocks_arch);
bool sha256_is_arch_optimized(void)
{
return static_key_enabled(&have_sha256_opcodes);
}
EXPORT_SYMBOL_GPL(sha256_is_arch_optimized);
static int __init sha256_sparc64_mod_init(void)
{
unsigned long cfr;

View file

@ -37,12 +37,6 @@ void sha256_blocks_arch(struct sha256_block_state *state,
}
EXPORT_SYMBOL_GPL(sha256_blocks_arch);
bool sha256_is_arch_optimized(void)
{
return static_key_enabled(&have_sha256_x86);
}
EXPORT_SYMBOL_GPL(sha256_is_arch_optimized);
static int __init sha256_x86_mod_init(void)
{
if (boot_cpu_has(X86_FEATURE_SHA_NI)) {