2024-12-01 17:08:31 -08:00
|
|
|
// SPDX-License-Identifier: GPL-2.0
|
|
|
|
/*
|
|
|
|
* CRC32 and CRC32C using LoongArch crc* instructions
|
|
|
|
*
|
|
|
|
* Module based on mips/crypto/crc32-mips.c
|
|
|
|
*
|
|
|
|
* Copyright (C) 2014 Linaro Ltd <yazen.ghannam@linaro.org>
|
|
|
|
* Copyright (C) 2018 MIPS Tech, LLC
|
|
|
|
* Copyright (C) 2020-2023 Loongson Technology Corporation Limited
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <asm/cpu-features.h>
|
|
|
|
#include <linux/unaligned.h>
|
|
|
|
|
|
|
|
#define _CRC32(crc, value, size, type) \
|
|
|
|
do { \
|
|
|
|
__asm__ __volatile__( \
|
|
|
|
#type ".w." #size ".w" " %0, %1, %0\n\t"\
|
|
|
|
: "+r" (crc) \
|
|
|
|
: "r" (value) \
|
|
|
|
: "memory"); \
|
|
|
|
} while (0)
|
|
|
|
|
|
|
|
#define CRC32(crc, value, size) _CRC32(crc, value, size, crc)
|
|
|
|
#define CRC32C(crc, value, size) _CRC32(crc, value, size, crcc)
|
|
|
|
|
2025-04-13 08:43:50 -07:00
|
|
|
static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_crc32);
|
2024-12-01 17:08:31 -08:00
|
|
|
|
2025-06-07 13:04:47 -07:00
|
|
|
static inline u32 crc32_le_arch(u32 crc, const u8 *p, size_t len)
|
2024-12-01 17:08:31 -08:00
|
|
|
{
|
|
|
|
if (!static_branch_likely(&have_crc32))
|
|
|
|
return crc32_le_base(crc, p, len);
|
|
|
|
|
|
|
|
while (len >= sizeof(u64)) {
|
|
|
|
u64 value = get_unaligned_le64(p);
|
|
|
|
|
|
|
|
CRC32(crc, value, d);
|
|
|
|
p += sizeof(u64);
|
|
|
|
len -= sizeof(u64);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len & sizeof(u32)) {
|
|
|
|
u32 value = get_unaligned_le32(p);
|
|
|
|
|
|
|
|
CRC32(crc, value, w);
|
|
|
|
p += sizeof(u32);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len & sizeof(u16)) {
|
|
|
|
u16 value = get_unaligned_le16(p);
|
|
|
|
|
|
|
|
CRC32(crc, value, h);
|
|
|
|
p += sizeof(u16);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len & sizeof(u8)) {
|
|
|
|
u8 value = *p++;
|
|
|
|
|
|
|
|
CRC32(crc, value, b);
|
|
|
|
}
|
|
|
|
|
|
|
|
return crc;
|
|
|
|
}
|
|
|
|
|
2025-06-07 13:04:47 -07:00
|
|
|
static inline u32 crc32c_arch(u32 crc, const u8 *p, size_t len)
|
2024-12-01 17:08:31 -08:00
|
|
|
{
|
|
|
|
if (!static_branch_likely(&have_crc32))
|
2025-02-07 18:49:11 -08:00
|
|
|
return crc32c_base(crc, p, len);
|
2024-12-01 17:08:31 -08:00
|
|
|
|
|
|
|
while (len >= sizeof(u64)) {
|
|
|
|
u64 value = get_unaligned_le64(p);
|
|
|
|
|
|
|
|
CRC32C(crc, value, d);
|
|
|
|
p += sizeof(u64);
|
|
|
|
len -= sizeof(u64);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len & sizeof(u32)) {
|
|
|
|
u32 value = get_unaligned_le32(p);
|
|
|
|
|
|
|
|
CRC32C(crc, value, w);
|
|
|
|
p += sizeof(u32);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len & sizeof(u16)) {
|
|
|
|
u16 value = get_unaligned_le16(p);
|
|
|
|
|
|
|
|
CRC32C(crc, value, h);
|
|
|
|
p += sizeof(u16);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len & sizeof(u8)) {
|
|
|
|
u8 value = *p++;
|
|
|
|
|
|
|
|
CRC32C(crc, value, b);
|
|
|
|
}
|
|
|
|
|
|
|
|
return crc;
|
|
|
|
}
|
|
|
|
|
2025-06-07 13:04:47 -07:00
|
|
|
#define crc32_be_arch crc32_be_base /* not implemented on this arch */
|
2024-12-01 17:08:31 -08:00
|
|
|
|
2025-06-07 13:04:47 -07:00
|
|
|
#define crc32_mod_init_arch crc32_mod_init_arch
|
|
|
|
static inline void crc32_mod_init_arch(void)
|
2024-12-01 17:08:31 -08:00
|
|
|
{
|
|
|
|
if (cpu_has_crc32)
|
|
|
|
static_branch_enable(&have_crc32);
|
|
|
|
}
|
|
|
|
|
2025-06-07 13:04:47 -07:00
|
|
|
static inline u32 crc32_optimizations_arch(void)
|
2024-12-01 17:08:31 -08:00
|
|
|
{
|
|
|
|
if (static_key_enabled(&have_crc32))
|
|
|
|
return CRC32_LE_OPTIMIZATION | CRC32C_OPTIMIZATION;
|
|
|
|
return 0;
|
|
|
|
}
|