mirror of
git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2025-08-05 16:54:27 +00:00

Consolidate the CPU-based SHA-256 code into a single module, following what I did with SHA-512: - Each arch now provides a header file lib/crypto/$(SRCARCH)/sha256.h, replacing lib/crypto/$(SRCARCH)/sha256.c. The header defines sha256_blocks() and optionally sha256_mod_init_arch(). It is included by lib/crypto/sha256.c, and thus the code gets built into the single libsha256 module, with proper inlining and dead code elimination. - sha256_blocks_generic() is moved from lib/crypto/sha256-generic.c into lib/crypto/sha256.c. It's now a static function marked with __maybe_unused, so the compiler automatically eliminates it in any cases where it's not used. - Whether arch-optimized SHA-256 is buildable is now controlled centrally by lib/crypto/Kconfig instead of by lib/crypto/$(SRCARCH)/Kconfig. The conditions for enabling it remain the same as before, and it remains enabled by default. - Any additional arch-specific translation units for the optimized SHA-256 code (such as assembly files) are now compiled by lib/crypto/Makefile instead of lib/crypto/$(SRCARCH)/Makefile. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20250630160645.3198-13-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
46 lines
1.4 KiB
C
46 lines
1.4 KiB
C
/* SPDX-License-Identifier: GPL-2.0-or-later */
|
|
/*
|
|
* SHA-256 optimized for ARM
|
|
*
|
|
* Copyright 2025 Google LLC
|
|
*/
|
|
#include <asm/neon.h>
|
|
#include <crypto/internal/simd.h>
|
|
|
|
asmlinkage void sha256_block_data_order(struct sha256_block_state *state,
|
|
const u8 *data, size_t nblocks);
|
|
asmlinkage void sha256_block_data_order_neon(struct sha256_block_state *state,
|
|
const u8 *data, size_t nblocks);
|
|
asmlinkage void sha256_ce_transform(struct sha256_block_state *state,
|
|
const u8 *data, size_t nblocks);
|
|
|
|
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
|
|
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
|
|
|
|
static void sha256_blocks(struct sha256_block_state *state,
|
|
const u8 *data, size_t nblocks)
|
|
{
|
|
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
|
|
static_branch_likely(&have_neon) && crypto_simd_usable()) {
|
|
kernel_neon_begin();
|
|
if (static_branch_likely(&have_ce))
|
|
sha256_ce_transform(state, data, nblocks);
|
|
else
|
|
sha256_block_data_order_neon(state, data, nblocks);
|
|
kernel_neon_end();
|
|
} else {
|
|
sha256_block_data_order(state, data, nblocks);
|
|
}
|
|
}
|
|
|
|
#ifdef CONFIG_KERNEL_MODE_NEON
|
|
#define sha256_mod_init_arch sha256_mod_init_arch
|
|
static inline void sha256_mod_init_arch(void)
|
|
{
|
|
if (elf_hwcap & HWCAP_NEON) {
|
|
static_branch_enable(&have_neon);
|
|
if (elf_hwcap2 & HWCAP2_SHA2)
|
|
static_branch_enable(&have_ce);
|
|
}
|
|
}
|
|
#endif /* CONFIG_KERNEL_MODE_NEON */
|