linux/lib/crypto/riscv/chacha-riscv64-glue.c
Eric Biggers daed4fcf04 lib/crypto: riscv: Move arch/riscv/lib/crypto/ into lib/crypto/
Move the contents of arch/riscv/lib/crypto/ into lib/crypto/riscv/.

The new code organization makes a lot more sense for how this code
actually works and is developed.  In particular, it makes it possible to
build each algorithm as a single module, with better inlining and dead
code elimination.  For a more detailed explanation, see the patchset
which did this for the CRC library code:
https://lore.kernel.org/r/20250607200454.73587-1-ebiggers@kernel.org/.
Also see the patchset which did this for SHA-512:
https://lore.kernel.org/linux-crypto/20250616014019.415791-1-ebiggers@kernel.org/

This is just a preparatory commit, which does the move to get the files
into their new location but keeps them building the same way as before.
Later commits will make the actual improvements to the way the
arch-optimized code is integrated for each algorithm.

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Acked-by: Palmer Dabbelt <palmer@dabbelt.com>
Reviewed-by: Martin K. Petersen <martin.petersen@oracle.com>
Reviewed-by: Sohil Mehta <sohil.mehta@intel.com>
Link: https://lore.kernel.org/r/20250619191908.134235-6-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
2025-06-30 09:26:20 -07:00

75 lines
2 KiB
C

// SPDX-License-Identifier: GPL-2.0-only
/*
* ChaCha stream cipher (RISC-V optimized)
*
* Copyright (C) 2023 SiFive, Inc.
* Author: Jerry Shih <jerry.shih@sifive.com>
*/
#include <asm/simd.h>
#include <asm/vector.h>
#include <crypto/chacha.h>
#include <crypto/internal/simd.h>
#include <linux/linkage.h>
#include <linux/module.h>
static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_zvkb);
asmlinkage void chacha_zvkb(struct chacha_state *state, const u8 *in, u8 *out,
size_t nblocks, int nrounds);
void hchacha_block_arch(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds)
{
hchacha_block_generic(state, out, nrounds);
}
EXPORT_SYMBOL(hchacha_block_arch);
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds)
{
u8 block_buffer[CHACHA_BLOCK_SIZE];
unsigned int full_blocks = bytes / CHACHA_BLOCK_SIZE;
unsigned int tail_bytes = bytes % CHACHA_BLOCK_SIZE;
if (!static_branch_likely(&use_zvkb) || !crypto_simd_usable())
return chacha_crypt_generic(state, dst, src, bytes, nrounds);
kernel_vector_begin();
if (full_blocks) {
chacha_zvkb(state, src, dst, full_blocks, nrounds);
src += full_blocks * CHACHA_BLOCK_SIZE;
dst += full_blocks * CHACHA_BLOCK_SIZE;
}
if (tail_bytes) {
memcpy(block_buffer, src, tail_bytes);
chacha_zvkb(state, block_buffer, block_buffer, 1, nrounds);
memcpy(dst, block_buffer, tail_bytes);
}
kernel_vector_end();
}
EXPORT_SYMBOL(chacha_crypt_arch);
bool chacha_is_arch_optimized(void)
{
return static_key_enabled(&use_zvkb);
}
EXPORT_SYMBOL(chacha_is_arch_optimized);
static int __init riscv64_chacha_mod_init(void)
{
if (riscv_isa_extension_available(NULL, ZVKB) &&
riscv_vector_vlen() >= 128)
static_branch_enable(&use_zvkb);
return 0;
}
subsys_initcall(riscv64_chacha_mod_init);
static void __exit riscv64_chacha_mod_exit(void)
{
}
module_exit(riscv64_chacha_mod_exit);
MODULE_DESCRIPTION("ChaCha stream cipher (RISC-V optimized)");
MODULE_AUTHOR("Jerry Shih <jerry.shih@sifive.com>");
MODULE_LICENSE("GPL");