^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Copyright (c) 2006 Ralf Baechle (ralf@linux-mips.org)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * Copyright (c) 2018 Jim Wilson (jimw@sifive.com)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #ifndef _ASM_RISCV_FUTEX_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #define _ASM_RISCV_FUTEX_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <linux/futex.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/uaccess.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <linux/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <asm/asm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) /* We don't even really need the extable code, but for now keep it simple */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #ifndef CONFIG_MMU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #define __enable_user_access() do { } while (0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #define __disable_user_access() do { } while (0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) uintptr_t tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) __enable_user_access(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) __asm__ __volatile__ ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) "1: " insn " \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) "2: \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) " .section .fixup,\"ax\" \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) " .balign 4 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) "3: li %[r],%[e] \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) " jump 2b,%[t] \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) " .previous \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) " .section __ex_table,\"a\" \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) " .balign " RISCV_SZPTR " \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) " " RISCV_PTR " 1b, 3b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) " .previous \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) : [r] "+r" (ret), [ov] "=&r" (oldval), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) [u] "+m" (*uaddr), [t] "=&r" (tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) : [op] "Jr" (oparg), [e] "i" (-EFAULT) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) __disable_user_access(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) static inline int
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) int oldval = 0, ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) if (!access_ok(uaddr, sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) switch (op) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) case FUTEX_OP_SET:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) __futex_atomic_op("amoswap.w.aqrl %[ov],%z[op],%[u]",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) ret, oldval, uaddr, oparg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) case FUTEX_OP_ADD:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) __futex_atomic_op("amoadd.w.aqrl %[ov],%z[op],%[u]",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) ret, oldval, uaddr, oparg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) case FUTEX_OP_OR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) __futex_atomic_op("amoor.w.aqrl %[ov],%z[op],%[u]",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) ret, oldval, uaddr, oparg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) case FUTEX_OP_ANDN:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) __futex_atomic_op("amoand.w.aqrl %[ov],%z[op],%[u]",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) ret, oldval, uaddr, ~oparg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) case FUTEX_OP_XOR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) __futex_atomic_op("amoxor.w.aqrl %[ov],%z[op],%[u]",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) ret, oldval, uaddr, oparg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) ret = -ENOSYS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) if (!ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) *oval = oldval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) static inline int
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) u32 oldval, u32 newval)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) int ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) uintptr_t tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) if (!access_ok(uaddr, sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) __enable_user_access();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) "1: lr.w.aqrl %[v],%[u] \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) " bne %[v],%z[ov],3f \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) "2: sc.w.aqrl %[t],%z[nv],%[u] \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) " bnez %[t],1b \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) "3: \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) " .section .fixup,\"ax\" \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) " .balign 4 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) "4: li %[r],%[e] \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) " jump 3b,%[t] \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) " .previous \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) " .section __ex_table,\"a\" \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) " .balign " RISCV_SZPTR " \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) " " RISCV_PTR " 1b, 4b \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) " " RISCV_PTR " 2b, 4b \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) " .previous \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) : [r] "+r" (ret), [v] "=&r" (val), [u] "+m" (*uaddr), [t] "=&r" (tmp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) : [ov] "Jr" (oldval), [nv] "Jr" (newval), [e] "i" (-EFAULT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) __disable_user_access();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) *uval = val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) #endif /* _ASM_RISCV_FUTEX_H */