mirror of
git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2025-08-05 16:54:27 +00:00

Inspired by commit 2e77a62cb3a6("arm64: extable: add a dedicated uaccess handler"), do similar to LoongArch to add a dedicated uaccess exception handler to update registers in exception context and subsequently return back into the function which faulted, so we remove the need for fixups specialized to each faulting instruction. Add gpr-num.h here because we need to map the same GPR names to integer constants, so that we can use this to build meta-data for the exception fixups. The compiler treats gpr 0 as zero rather than $r0, so set it separately to .L__gpr_num_zero, otherwise the following assembly error will occurs: {standard input}: Assembler messages: {standard input}:1074: Error: invalid operands (*UND* and *ABS* sections) for `<<' {standard input}:1160: Error: invalid operands (*UND* and *ABS* sections) for `<<' make[1]: *** [scripts/Makefile.build:249: fs/fcntl.o] Error 1 Signed-off-by: Youling Tang <tangyouling@loongson.cn> Signed-off-by: Huacai Chen <chenhuacai@loongson.cn>
94 lines
2.1 KiB
C
94 lines
2.1 KiB
C
/* SPDX-License-Identifier: GPL-2.0 */
|
|
/*
|
|
* Copyright (C) 2020-2022 Loongson Technology Corporation Limited
|
|
*/
|
|
#ifndef _ASM_FUTEX_H
|
|
#define _ASM_FUTEX_H
|
|
|
|
#include <linux/futex.h>
|
|
#include <linux/uaccess.h>
|
|
#include <asm/asm-extable.h>
|
|
#include <asm/barrier.h>
|
|
#include <asm/errno.h>
|
|
|
|
#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
|
|
{ \
|
|
__asm__ __volatile__( \
|
|
"1: ll.w %1, %4 # __futex_atomic_op\n" \
|
|
" " insn " \n" \
|
|
"2: sc.w $t0, %2 \n" \
|
|
" beqz $t0, 1b \n" \
|
|
"3: \n" \
|
|
_ASM_EXTABLE_UACCESS_ERR(1b, 3b, %0) \
|
|
_ASM_EXTABLE_UACCESS_ERR(2b, 3b, %0) \
|
|
: "=r" (ret), "=&r" (oldval), \
|
|
"=ZC" (*uaddr) \
|
|
: "0" (0), "ZC" (*uaddr), "Jr" (oparg) \
|
|
: "memory", "t0"); \
|
|
}
|
|
|
|
static inline int
|
|
arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
|
|
{
|
|
int oldval = 0, ret = 0;
|
|
|
|
pagefault_disable();
|
|
|
|
switch (op) {
|
|
case FUTEX_OP_SET:
|
|
__futex_atomic_op("move $t0, %z5", ret, oldval, uaddr, oparg);
|
|
break;
|
|
case FUTEX_OP_ADD:
|
|
__futex_atomic_op("add.w $t0, %1, %z5", ret, oldval, uaddr, oparg);
|
|
break;
|
|
case FUTEX_OP_OR:
|
|
__futex_atomic_op("or $t0, %1, %z5", ret, oldval, uaddr, oparg);
|
|
break;
|
|
case FUTEX_OP_ANDN:
|
|
__futex_atomic_op("and $t0, %1, %z5", ret, oldval, uaddr, ~oparg);
|
|
break;
|
|
case FUTEX_OP_XOR:
|
|
__futex_atomic_op("xor $t0, %1, %z5", ret, oldval, uaddr, oparg);
|
|
break;
|
|
default:
|
|
ret = -ENOSYS;
|
|
}
|
|
|
|
pagefault_enable();
|
|
|
|
if (!ret)
|
|
*oval = oldval;
|
|
|
|
return ret;
|
|
}
|
|
|
|
static inline int
|
|
futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newval)
|
|
{
|
|
int ret = 0;
|
|
u32 val = 0;
|
|
|
|
if (!access_ok(uaddr, sizeof(u32)))
|
|
return -EFAULT;
|
|
|
|
__asm__ __volatile__(
|
|
"# futex_atomic_cmpxchg_inatomic \n"
|
|
"1: ll.w %1, %3 \n"
|
|
" bne %1, %z4, 3f \n"
|
|
" move $t0, %z5 \n"
|
|
"2: sc.w $t0, %2 \n"
|
|
" beqz $t0, 1b \n"
|
|
"3: \n"
|
|
__WEAK_LLSC_MB
|
|
_ASM_EXTABLE_UACCESS_ERR(1b, 3b, %0)
|
|
_ASM_EXTABLE_UACCESS_ERR(2b, 3b, %0)
|
|
: "+r" (ret), "=&r" (val), "=ZC" (*uaddr)
|
|
: "ZC" (*uaddr), "Jr" (oldval), "Jr" (newval)
|
|
: "memory", "t0");
|
|
|
|
*uval = val;
|
|
|
|
return ret;
|
|
}
|
|
|
|
#endif /* _ASM_FUTEX_H */
|