^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef _ASM_X86_FUTEX_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define _ASM_X86_FUTEX_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #ifdef __KERNEL__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #include <linux/futex.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <linux/uaccess.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <asm/asm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <asm/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <asm/processor.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <asm/smap.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #define unsafe_atomic_op1(insn, oval, uaddr, oparg, label) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) do { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) int oldval = 0, ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) asm volatile("1:\t" insn "\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) "2:\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) "\t.section .fixup,\"ax\"\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) "3:\tmov\t%3, %1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) "\tjmp\t2b\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) "\t.previous\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) _ASM_EXTABLE_UA(1b, 3b) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) : "i" (-EFAULT), "0" (oparg), "1" (0)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) if (ret) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) goto label; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) *oval = oldval; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) } while(0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #define unsafe_atomic_op2(insn, oval, uaddr, oparg, label) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) do { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) int oldval = 0, ret, tem; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) asm volatile("1:\tmovl %2, %0\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) "2:\tmovl\t%0, %3\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) "\t" insn "\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) "\tjnz\t2b\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) "4:\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) "\t.section .fixup,\"ax\"\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) "5:\tmov\t%5, %1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) "\tjmp\t4b\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) "\t.previous\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) _ASM_EXTABLE_UA(1b, 5b) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) _ASM_EXTABLE_UA(3b, 5b) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) : "=&a" (oldval), "=&r" (ret), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) "+m" (*uaddr), "=&r" (tem) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) : "r" (oparg), "i" (-EFAULT), "1" (0)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) if (ret) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) goto label; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) *oval = oldval; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) } while(0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) u32 __user *uaddr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) if (!user_access_begin(uaddr, sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) switch (op) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) case FUTEX_OP_SET:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) case FUTEX_OP_ADD:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) uaddr, oparg, Efault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) case FUTEX_OP_OR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) case FUTEX_OP_ANDN:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) case FUTEX_OP_XOR:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) user_access_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) return -ENOSYS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) user_access_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) Efault:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) user_access_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) u32 oldval, u32 newval)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) int ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) if (!user_access_begin(uaddr, sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) asm volatile("\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) "2:\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) "\t.section .fixup, \"ax\"\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) "3:\tmov %3, %0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) "\tjmp 2b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) "\t.previous\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) _ASM_EXTABLE_UA(1b, 3b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) : "i" (-EFAULT), "r" (newval), "1" (oldval)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) : "memory"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) );
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) user_access_end();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) *uval = oldval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) #endif /* _ASM_X86_FUTEX_H */