2024-12-01 17:08:33 -08:00
|
|
|
// SPDX-License-Identifier: GPL-2.0-only
|
2025-04-17 10:26:28 +08:00
|
|
|
#include <asm/switch_to.h>
|
2024-12-01 17:08:33 -08:00
|
|
|
#include <crypto/internal/simd.h>
|
|
|
|
#include <linux/cpufeature.h>
|
2025-04-17 10:26:28 +08:00
|
|
|
#include <linux/jump_label.h>
|
|
|
|
#include <linux/preempt.h>
|
|
|
|
#include <linux/uaccess.h>
|
2024-12-01 17:08:33 -08:00
|
|
|
|
|
|
|
#define VMX_ALIGN 16
|
|
|
|
#define VMX_ALIGN_MASK (VMX_ALIGN-1)
|
|
|
|
|
|
|
|
#define VECTOR_BREAKPOINT 512
|
|
|
|
|
2025-04-13 08:43:50 -07:00
|
|
|
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_vec_crypto);
|
2024-12-01 17:08:33 -08:00
|
|
|
|
2025-06-07 13:04:49 -07:00
|
|
|
#define crc32_le_arch crc32_le_base /* not implemented on this arch */
|
|
|
|
#define crc32_be_arch crc32_be_base /* not implemented on this arch */
|
2024-12-01 17:08:33 -08:00
|
|
|
|
2025-06-07 13:04:49 -07:00
|
|
|
u32 __crc32c_vpmsum(u32 crc, const u8 *p, size_t len);
|
2024-12-01 17:08:33 -08:00
|
|
|
|
2025-06-07 13:04:49 -07:00
|
|
|
static inline u32 crc32c_arch(u32 crc, const u8 *p, size_t len)
|
2024-12-01 17:08:33 -08:00
|
|
|
{
|
|
|
|
unsigned int prealign;
|
|
|
|
unsigned int tail;
|
|
|
|
|
|
|
|
if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) ||
|
|
|
|
!static_branch_likely(&have_vec_crypto) || !crypto_simd_usable())
|
2025-02-07 18:49:11 -08:00
|
|
|
return crc32c_base(crc, p, len);
|
2024-12-01 17:08:33 -08:00
|
|
|
|
|
|
|
if ((unsigned long)p & VMX_ALIGN_MASK) {
|
|
|
|
prealign = VMX_ALIGN - ((unsigned long)p & VMX_ALIGN_MASK);
|
2025-02-07 18:49:11 -08:00
|
|
|
crc = crc32c_base(crc, p, prealign);
|
2024-12-01 17:08:33 -08:00
|
|
|
len -= prealign;
|
|
|
|
p += prealign;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len & ~VMX_ALIGN_MASK) {
|
|
|
|
preempt_disable();
|
|
|
|
pagefault_disable();
|
|
|
|
enable_kernel_altivec();
|
|
|
|
crc = __crc32c_vpmsum(crc, p, len & ~VMX_ALIGN_MASK);
|
|
|
|
disable_kernel_altivec();
|
|
|
|
pagefault_enable();
|
|
|
|
preempt_enable();
|
|
|
|
}
|
|
|
|
|
|
|
|
tail = len & VMX_ALIGN_MASK;
|
|
|
|
if (tail) {
|
|
|
|
p += len & ~VMX_ALIGN_MASK;
|
2025-02-07 18:49:11 -08:00
|
|
|
crc = crc32c_base(crc, p, tail);
|
2024-12-01 17:08:33 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
return crc;
|
|
|
|
}
|
|
|
|
|
2025-06-07 13:04:49 -07:00
|
|
|
#define crc32_mod_init_arch crc32_mod_init_arch
|
|
|
|
static inline void crc32_mod_init_arch(void)
|
2024-12-01 17:08:33 -08:00
|
|
|
{
|
|
|
|
if (cpu_has_feature(CPU_FTR_ARCH_207S) &&
|
|
|
|
(cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO))
|
|
|
|
static_branch_enable(&have_vec_crypto);
|
|
|
|
}
|
|
|
|
|
2025-06-07 13:04:49 -07:00
|
|
|
static inline u32 crc32_optimizations_arch(void)
|
2024-12-01 17:08:33 -08:00
|
|
|
{
|
|
|
|
if (static_key_enabled(&have_vec_crypto))
|
|
|
|
return CRC32C_OPTIMIZATION;
|
|
|
|
return 0;
|
|
|
|
}
|