lib/crc: sparc: Migrate optimized CRC code into lib/crc/

Move the sparc-optimized CRC code from arch/sparc/lib/crc* into its new
location in lib/crc/sparc/, and wire it up in the new way.  This new way
of organizing the CRC code eliminates the need to artificially split the
code for each CRC variant into separate arch and generic modules,
enabling better inlining and dead code elimination.  For more details,
see "lib/crc: Prepare for arch-optimized code in subdirs of lib/crc/".

Reviewed-by: "Martin K. Petersen" <martin.petersen@oracle.com>
Acked-by: Ingo Molnar <mingo@kernel.org>
Acked-by: "Jason A. Donenfeld" <Jason@zx2c4.com>
Link: https://lore.kernel.org/r/20250607200454.73587-11-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
This commit is contained in:
Eric Biggers 2025-06-07 13:04:52 -07:00
parent 2374bf2386
commit 9b2d720e8a
6 changed files with 10 additions and 37 deletions

View file

@ -110,7 +110,6 @@ config SPARC64
select HAVE_SETUP_PER_CPU_AREA select HAVE_SETUP_PER_CPU_AREA
select NEED_PER_CPU_EMBED_FIRST_CHUNK select NEED_PER_CPU_EMBED_FIRST_CHUNK
select NEED_PER_CPU_PAGE_FIRST_CHUNK select NEED_PER_CPU_PAGE_FIRST_CHUNK
select ARCH_HAS_CRC32
config ARCH_PROC_KCORE_TEXT config ARCH_PROC_KCORE_TEXT
def_bool y def_bool y

View file

@ -54,5 +54,3 @@ lib-$(CONFIG_SPARC64) += mcount.o ipcsum.o xor.o hweight.o ffs.o
obj-$(CONFIG_SPARC64) += iomap.o obj-$(CONFIG_SPARC64) += iomap.o
obj-$(CONFIG_SPARC32) += atomic32.o obj-$(CONFIG_SPARC32) += atomic32.o
obj-$(CONFIG_SPARC64) += PeeCeeI.o obj-$(CONFIG_SPARC64) += PeeCeeI.o
obj-$(CONFIG_CRC32_ARCH) += crc32-sparc.o
crc32-sparc-y := crc32.o crc32c_asm.o

View file

@ -75,6 +75,7 @@ config CRC32_ARCH
default y if PPC64 && ALTIVEC default y if PPC64 && ALTIVEC
default y if RISCV && RISCV_ISA_ZBC default y if RISCV && RISCV_ISA_ZBC
default y if S390 default y if S390
default y if SPARC64
config CRC64 config CRC64
tristate tristate

View file

@ -28,6 +28,7 @@ crc32-$(CONFIG_ARM64) += arm64/crc32-core.o
crc32-$(CONFIG_PPC) += powerpc/crc32c-vpmsum_asm.o crc32-$(CONFIG_PPC) += powerpc/crc32c-vpmsum_asm.o
crc32-$(CONFIG_RISCV) += riscv/crc32_lsb.o riscv/crc32_msb.o crc32-$(CONFIG_RISCV) += riscv/crc32_lsb.o riscv/crc32_msb.o
crc32-$(CONFIG_S390) += s390/crc32le-vx.o s390/crc32be-vx.o crc32-$(CONFIG_S390) += s390/crc32le-vx.o s390/crc32be-vx.o
crc32-$(CONFIG_SPARC) += sparc/crc32c_asm.o
endif endif
obj-$(CONFIG_CRC64) += crc64.o obj-$(CONFIG_CRC64) += crc64.o

View file

@ -8,26 +8,17 @@
* Kent Liu <kent.liu@intel.com> * Kent Liu <kent.liu@intel.com>
*/ */
#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
#include <linux/init.h>
#include <linux/module.h>
#include <linux/kernel.h>
#include <linux/crc32.h>
#include <asm/pstate.h> #include <asm/pstate.h>
#include <asm/elf.h> #include <asm/elf.h>
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_crc32c_opcode); static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_crc32c_opcode);
u32 crc32_le_arch(u32 crc, const u8 *data, size_t len) #define crc32_le_arch crc32_le_base /* not implemented on this arch */
{ #define crc32_be_arch crc32_be_base /* not implemented on this arch */
return crc32_le_base(crc, data, len);
}
EXPORT_SYMBOL(crc32_le_arch);
void crc32c_sparc64(u32 *crcp, const u64 *data, size_t len); void crc32c_sparc64(u32 *crcp, const u64 *data, size_t len);
u32 crc32c_arch(u32 crc, const u8 *data, size_t len) static inline u32 crc32c_arch(u32 crc, const u8 *data, size_t len)
{ {
size_t n = -(uintptr_t)data & 7; size_t n = -(uintptr_t)data & 7;
@ -51,43 +42,26 @@ u32 crc32c_arch(u32 crc, const u8 *data, size_t len)
crc = crc32c_base(crc, data, len); crc = crc32c_base(crc, data, len);
return crc; return crc;
} }
EXPORT_SYMBOL(crc32c_arch);
u32 crc32_be_arch(u32 crc, const u8 *data, size_t len) #define crc32_mod_init_arch crc32_mod_init_arch
{ static inline void crc32_mod_init_arch(void)
return crc32_be_base(crc, data, len);
}
EXPORT_SYMBOL(crc32_be_arch);
static int __init crc32_sparc_init(void)
{ {
unsigned long cfr; unsigned long cfr;
if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO)) if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
return 0; return;
__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr)); __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
if (!(cfr & CFR_CRC32C)) if (!(cfr & CFR_CRC32C))
return 0; return;
static_branch_enable(&have_crc32c_opcode); static_branch_enable(&have_crc32c_opcode);
pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n"); pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
return 0;
} }
subsys_initcall(crc32_sparc_init);
static void __exit crc32_sparc_exit(void) static inline u32 crc32_optimizations_arch(void)
{
}
module_exit(crc32_sparc_exit);
u32 crc32_optimizations(void)
{ {
if (static_key_enabled(&have_crc32c_opcode)) if (static_key_enabled(&have_crc32c_opcode))
return CRC32C_OPTIMIZATION; return CRC32C_OPTIMIZATION;
return 0; return 0;
} }
EXPORT_SYMBOL(crc32_optimizations);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("CRC32c (Castagnoli), sparc64 crc32c opcode accelerated");