mirror of
git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2025-04-13 09:59:31 +00:00
sparc/crc32: expose CRC32 functions through lib
Move the sparc CRC32C assembly code into the lib directory and wire it up to the library interface. This allows it to be used without going through the crypto API. It remains usable via the crypto API too via the shash algorithms that use the library interface. Thus all the arch-specific "shash" code becomes unnecessary and is removed. Note: to see the diff from arch/sparc/crypto/crc32c_glue.c to arch/sparc/lib/crc32_glue.c, view this commit with 'git show -M10'. Reviewed-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20241202010844.144356-11-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@google.com>
This commit is contained in:
parent
008071917d
commit
0f60a8ace5
7 changed files with 97 additions and 199 deletions
|
@ -110,6 +110,7 @@ config SPARC64
|
|||
select HAVE_SETUP_PER_CPU_AREA
|
||||
select NEED_PER_CPU_EMBED_FIRST_CHUNK
|
||||
select NEED_PER_CPU_PAGE_FIRST_CHUNK
|
||||
select ARCH_HAS_CRC32
|
||||
|
||||
config ARCH_PROC_KCORE_TEXT
|
||||
def_bool y
|
||||
|
|
|
@ -16,16 +16,6 @@ config CRYPTO_DES_SPARC64
|
|||
|
||||
Architecture: sparc64
|
||||
|
||||
config CRYPTO_CRC32C_SPARC64
|
||||
tristate "CRC32c"
|
||||
depends on SPARC64
|
||||
select CRYPTO_HASH
|
||||
select CRC32
|
||||
help
|
||||
CRC32c CRC algorithm with the iSCSI polynomial (RFC 3385 and RFC 3720)
|
||||
|
||||
Architecture: sparc64
|
||||
|
||||
config CRYPTO_MD5_SPARC64
|
||||
tristate "Digests: MD5"
|
||||
depends on SPARC64
|
||||
|
|
|
@ -12,8 +12,6 @@ obj-$(CONFIG_CRYPTO_AES_SPARC64) += aes-sparc64.o
|
|||
obj-$(CONFIG_CRYPTO_DES_SPARC64) += des-sparc64.o
|
||||
obj-$(CONFIG_CRYPTO_CAMELLIA_SPARC64) += camellia-sparc64.o
|
||||
|
||||
obj-$(CONFIG_CRYPTO_CRC32C_SPARC64) += crc32c-sparc64.o
|
||||
|
||||
sha1-sparc64-y := sha1_asm.o sha1_glue.o
|
||||
sha256-sparc64-y := sha256_asm.o sha256_glue.o
|
||||
sha512-sparc64-y := sha512_asm.o sha512_glue.o
|
||||
|
@ -22,5 +20,3 @@ md5-sparc64-y := md5_asm.o md5_glue.o
|
|||
aes-sparc64-y := aes_asm.o aes_glue.o
|
||||
des-sparc64-y := des_asm.o des_glue.o
|
||||
camellia-sparc64-y := camellia_asm.o camellia_glue.o
|
||||
|
||||
crc32c-sparc64-y := crc32c_asm.o crc32c_glue.o
|
||||
|
|
|
@ -1,184 +0,0 @@
|
|||
// SPDX-License-Identifier: GPL-2.0-only
|
||||
/* Glue code for CRC32C optimized for sparc64 crypto opcodes.
|
||||
*
|
||||
* This is based largely upon arch/x86/crypto/crc32c-intel.c
|
||||
*
|
||||
* Copyright (C) 2008 Intel Corporation
|
||||
* Authors: Austin Zhang <austin_zhang@linux.intel.com>
|
||||
* Kent Liu <kent.liu@intel.com>
|
||||
*/
|
||||
|
||||
#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
|
||||
|
||||
#include <linux/init.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/string.h>
|
||||
#include <linux/kernel.h>
|
||||
#include <linux/crc32.h>
|
||||
|
||||
#include <crypto/internal/hash.h>
|
||||
|
||||
#include <asm/pstate.h>
|
||||
#include <asm/elf.h>
|
||||
#include <linux/unaligned.h>
|
||||
|
||||
#include "opcodes.h"
|
||||
|
||||
/*
|
||||
* Setting the seed allows arbitrary accumulators and flexible XOR policy
|
||||
* If your algorithm starts with ~0, then XOR with ~0 before you set
|
||||
* the seed.
|
||||
*/
|
||||
static int crc32c_sparc64_setkey(struct crypto_shash *hash, const u8 *key,
|
||||
unsigned int keylen)
|
||||
{
|
||||
u32 *mctx = crypto_shash_ctx(hash);
|
||||
|
||||
if (keylen != sizeof(u32))
|
||||
return -EINVAL;
|
||||
*mctx = get_unaligned_le32(key);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int crc32c_sparc64_init(struct shash_desc *desc)
|
||||
{
|
||||
u32 *mctx = crypto_shash_ctx(desc->tfm);
|
||||
u32 *crcp = shash_desc_ctx(desc);
|
||||
|
||||
*crcp = *mctx;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
extern void crc32c_sparc64(u32 *crcp, const u64 *data, unsigned int len);
|
||||
|
||||
static u32 crc32c_compute(u32 crc, const u8 *data, unsigned int len)
|
||||
{
|
||||
unsigned int n = -(uintptr_t)data & 7;
|
||||
|
||||
if (n) {
|
||||
/* Data isn't 8-byte aligned. Align it. */
|
||||
n = min(n, len);
|
||||
crc = __crc32c_le(crc, data, n);
|
||||
data += n;
|
||||
len -= n;
|
||||
}
|
||||
n = len & ~7U;
|
||||
if (n) {
|
||||
crc32c_sparc64(&crc, (const u64 *)data, n);
|
||||
data += n;
|
||||
len -= n;
|
||||
}
|
||||
if (len)
|
||||
crc = __crc32c_le(crc, data, len);
|
||||
return crc;
|
||||
}
|
||||
|
||||
static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data,
|
||||
unsigned int len)
|
||||
{
|
||||
u32 *crcp = shash_desc_ctx(desc);
|
||||
|
||||
*crcp = crc32c_compute(*crcp, data, len);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int __crc32c_sparc64_finup(const u32 *crcp, const u8 *data,
|
||||
unsigned int len, u8 *out)
|
||||
{
|
||||
put_unaligned_le32(~crc32c_compute(*crcp, data, len), out);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int crc32c_sparc64_finup(struct shash_desc *desc, const u8 *data,
|
||||
unsigned int len, u8 *out)
|
||||
{
|
||||
return __crc32c_sparc64_finup(shash_desc_ctx(desc), data, len, out);
|
||||
}
|
||||
|
||||
static int crc32c_sparc64_final(struct shash_desc *desc, u8 *out)
|
||||
{
|
||||
u32 *crcp = shash_desc_ctx(desc);
|
||||
|
||||
put_unaligned_le32(~*crcp, out);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int crc32c_sparc64_digest(struct shash_desc *desc, const u8 *data,
|
||||
unsigned int len, u8 *out)
|
||||
{
|
||||
return __crc32c_sparc64_finup(crypto_shash_ctx(desc->tfm), data, len,
|
||||
out);
|
||||
}
|
||||
|
||||
static int crc32c_sparc64_cra_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
u32 *key = crypto_tfm_ctx(tfm);
|
||||
|
||||
*key = ~0;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
#define CHKSUM_BLOCK_SIZE 1
|
||||
#define CHKSUM_DIGEST_SIZE 4
|
||||
|
||||
static struct shash_alg alg = {
|
||||
.setkey = crc32c_sparc64_setkey,
|
||||
.init = crc32c_sparc64_init,
|
||||
.update = crc32c_sparc64_update,
|
||||
.final = crc32c_sparc64_final,
|
||||
.finup = crc32c_sparc64_finup,
|
||||
.digest = crc32c_sparc64_digest,
|
||||
.descsize = sizeof(u32),
|
||||
.digestsize = CHKSUM_DIGEST_SIZE,
|
||||
.base = {
|
||||
.cra_name = "crc32c",
|
||||
.cra_driver_name = "crc32c-sparc64",
|
||||
.cra_priority = SPARC_CR_OPCODE_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
|
||||
.cra_blocksize = CHKSUM_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(u32),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_init = crc32c_sparc64_cra_init,
|
||||
}
|
||||
};
|
||||
|
||||
static bool __init sparc64_has_crc32c_opcode(void)
|
||||
{
|
||||
unsigned long cfr;
|
||||
|
||||
if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
|
||||
return false;
|
||||
|
||||
__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
|
||||
if (!(cfr & CFR_CRC32C))
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static int __init crc32c_sparc64_mod_init(void)
|
||||
{
|
||||
if (sparc64_has_crc32c_opcode()) {
|
||||
pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
|
||||
return crypto_register_shash(&alg);
|
||||
}
|
||||
pr_info("sparc64 crc32c opcode not available.\n");
|
||||
return -ENODEV;
|
||||
}
|
||||
|
||||
static void __exit crc32c_sparc64_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_shash(&alg);
|
||||
}
|
||||
|
||||
module_init(crc32c_sparc64_mod_init);
|
||||
module_exit(crc32c_sparc64_mod_fini);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("CRC32c (Castagnoli), sparc64 crc32c opcode accelerated");
|
||||
|
||||
MODULE_ALIAS_CRYPTO("crc32c");
|
||||
|
||||
#include "crop_devid.c"
|
|
@ -53,3 +53,5 @@ lib-$(CONFIG_SPARC64) += mcount.o ipcsum.o xor.o hweight.o ffs.o
|
|||
obj-$(CONFIG_SPARC64) += iomap.o
|
||||
obj-$(CONFIG_SPARC32) += atomic32.o
|
||||
obj-$(CONFIG_SPARC64) += PeeCeeI.o
|
||||
obj-$(CONFIG_CRC32_ARCH) += crc32-sparc.o
|
||||
crc32-sparc-y := crc32_glue.o crc32c_asm.o
|
||||
|
|
93
arch/sparc/lib/crc32_glue.c
Normal file
93
arch/sparc/lib/crc32_glue.c
Normal file
|
@ -0,0 +1,93 @@
|
|||
// SPDX-License-Identifier: GPL-2.0-only
|
||||
/* Glue code for CRC32C optimized for sparc64 crypto opcodes.
|
||||
*
|
||||
* This is based largely upon arch/x86/crypto/crc32c-intel.c
|
||||
*
|
||||
* Copyright (C) 2008 Intel Corporation
|
||||
* Authors: Austin Zhang <austin_zhang@linux.intel.com>
|
||||
* Kent Liu <kent.liu@intel.com>
|
||||
*/
|
||||
|
||||
#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
|
||||
|
||||
#include <linux/init.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/kernel.h>
|
||||
#include <linux/crc32.h>
|
||||
#include <asm/pstate.h>
|
||||
#include <asm/elf.h>
|
||||
|
||||
static DEFINE_STATIC_KEY_FALSE(have_crc32c_opcode);
|
||||
|
||||
u32 crc32_le_arch(u32 crc, const u8 *data, size_t len)
|
||||
{
|
||||
return crc32_le_base(crc, data, len);
|
||||
}
|
||||
EXPORT_SYMBOL(crc32_le_arch);
|
||||
|
||||
void crc32c_sparc64(u32 *crcp, const u64 *data, size_t len);
|
||||
|
||||
u32 crc32c_le_arch(u32 crc, const u8 *data, size_t len)
|
||||
{
|
||||
size_t n = -(uintptr_t)data & 7;
|
||||
|
||||
if (!static_branch_likely(&have_crc32c_opcode))
|
||||
return crc32c_le_base(crc, data, len);
|
||||
|
||||
if (n) {
|
||||
/* Data isn't 8-byte aligned. Align it. */
|
||||
n = min(n, len);
|
||||
crc = crc32c_le_base(crc, data, n);
|
||||
data += n;
|
||||
len -= n;
|
||||
}
|
||||
n = len & ~7U;
|
||||
if (n) {
|
||||
crc32c_sparc64(&crc, (const u64 *)data, n);
|
||||
data += n;
|
||||
len -= n;
|
||||
}
|
||||
if (len)
|
||||
crc = crc32c_le_base(crc, data, len);
|
||||
return crc;
|
||||
}
|
||||
EXPORT_SYMBOL(crc32c_le_arch);
|
||||
|
||||
u32 crc32_be_arch(u32 crc, const u8 *data, size_t len)
|
||||
{
|
||||
return crc32_be_base(crc, data, len);
|
||||
}
|
||||
EXPORT_SYMBOL(crc32_be_arch);
|
||||
|
||||
static int __init crc32_sparc_init(void)
|
||||
{
|
||||
unsigned long cfr;
|
||||
|
||||
if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
|
||||
return 0;
|
||||
|
||||
__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
|
||||
if (!(cfr & CFR_CRC32C))
|
||||
return 0;
|
||||
|
||||
static_branch_enable(&have_crc32c_opcode);
|
||||
pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
|
||||
return 0;
|
||||
}
|
||||
arch_initcall(crc32_sparc_init);
|
||||
|
||||
static void __exit crc32_sparc_exit(void)
|
||||
{
|
||||
}
|
||||
module_exit(crc32_sparc_exit);
|
||||
|
||||
u32 crc32_optimizations(void)
|
||||
{
|
||||
if (static_key_enabled(&have_crc32c_opcode))
|
||||
return CRC32C_OPTIMIZATION;
|
||||
return 0;
|
||||
}
|
||||
EXPORT_SYMBOL(crc32_optimizations);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("CRC32c (Castagnoli), sparc64 crc32c opcode accelerated");
|
|
@ -3,7 +3,7 @@
|
|||
#include <asm/visasm.h>
|
||||
#include <asm/asi.h>
|
||||
|
||||
#include "opcodes.h"
|
||||
#include "../crypto/opcodes.h"
|
||||
|
||||
ENTRY(crc32c_sparc64)
|
||||
/* %o0=crc32p, %o1=data_ptr, %o2=len */
|
Loading…
Add table
Reference in a new issue