^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef ASM_X86_CMPXCHG_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define ASM_X86_CMPXCHG_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include <linux/compiler.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #include <asm/cpufeatures.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #include <asm/alternative.h> /* Provides LOCK_PREFIX */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * Non-existent functions to indicate usage errors at link time
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) * (or compile-time if the compiler implements __compiletime_error().
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) extern void __xchg_wrong_size(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) __compiletime_error("Bad argument size for xchg");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) extern void __cmpxchg_wrong_size(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) __compiletime_error("Bad argument size for cmpxchg");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) extern void __xadd_wrong_size(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) __compiletime_error("Bad argument size for xadd");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) extern void __add_wrong_size(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) __compiletime_error("Bad argument size for add");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) * Constants for operation sizes. On 32-bit, the 64-bit size it set to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) * -1 because sizeof will never return -1, thereby making those switch
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) * case statements guaranteeed dead code which the compiler will
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * eliminate, and allowing the "missing symbol in the default case" to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) * indicate a usage error.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #define __X86_CASE_B 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #define __X86_CASE_W 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #define __X86_CASE_L 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #ifdef CONFIG_64BIT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #define __X86_CASE_Q 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) #define __X86_CASE_Q -1 /* sizeof will never return -1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) * An exchange-type operation, which takes a value and a pointer, and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) * returns the old value.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) #define __xchg_op(ptr, arg, op, lock) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) __typeof__ (*(ptr)) __ret = (arg); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) switch (sizeof(*(ptr))) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) case __X86_CASE_B: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) asm volatile (lock #op "b %b0, %1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) : "+q" (__ret), "+m" (*(ptr)) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) : : "memory", "cc"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) case __X86_CASE_W: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) asm volatile (lock #op "w %w0, %1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) : "+r" (__ret), "+m" (*(ptr)) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) : : "memory", "cc"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) case __X86_CASE_L: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) asm volatile (lock #op "l %0, %1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) : "+r" (__ret), "+m" (*(ptr)) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) : : "memory", "cc"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) case __X86_CASE_Q: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) asm volatile (lock #op "q %q0, %1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) : "+r" (__ret), "+m" (*(ptr)) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) : : "memory", "cc"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) default: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) __ ## op ## _wrong_size(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) __ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) * Since this is generally used to protect other memory information, we
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) * use "asm volatile" and "memory" clobbers to prevent gcc from moving
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) * information around.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) #define arch_xchg(ptr, v) __xchg_op((ptr), (v), xchg, "")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) * Atomic compare and exchange. Compare OLD with MEM, if identical,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) * store NEW in MEM. Return the initial value in MEM. Success is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) * indicated by comparing RETURN with OLD.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) #define __raw_cmpxchg(ptr, old, new, size, lock) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) __typeof__(*(ptr)) __ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) __typeof__(*(ptr)) __old = (old); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) __typeof__(*(ptr)) __new = (new); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) switch (size) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) case __X86_CASE_B: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) volatile u8 *__ptr = (volatile u8 *)(ptr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) asm volatile(lock "cmpxchgb %2,%1" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) : "=a" (__ret), "+m" (*__ptr) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) : "q" (__new), "0" (__old) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) case __X86_CASE_W: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) volatile u16 *__ptr = (volatile u16 *)(ptr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) asm volatile(lock "cmpxchgw %2,%1" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) : "=a" (__ret), "+m" (*__ptr) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) : "r" (__new), "0" (__old) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) case __X86_CASE_L: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) volatile u32 *__ptr = (volatile u32 *)(ptr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) asm volatile(lock "cmpxchgl %2,%1" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) : "=a" (__ret), "+m" (*__ptr) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) : "r" (__new), "0" (__old) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) case __X86_CASE_Q: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) volatile u64 *__ptr = (volatile u64 *)(ptr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) asm volatile(lock "cmpxchgq %2,%1" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) : "=a" (__ret), "+m" (*__ptr) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) : "r" (__new), "0" (__old) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) default: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) __cmpxchg_wrong_size(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) __ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) #define __cmpxchg(ptr, old, new, size) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) #define __sync_cmpxchg(ptr, old, new, size) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) #define __cmpxchg_local(ptr, old, new, size) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) __raw_cmpxchg((ptr), (old), (new), (size), "")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) #ifdef CONFIG_X86_32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) # include <asm/cmpxchg_32.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) # include <asm/cmpxchg_64.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) #define arch_cmpxchg(ptr, old, new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) __cmpxchg(ptr, old, new, sizeof(*(ptr)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) #define arch_sync_cmpxchg(ptr, old, new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) __sync_cmpxchg(ptr, old, new, sizeof(*(ptr)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) #define arch_cmpxchg_local(ptr, old, new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) __cmpxchg_local(ptr, old, new, sizeof(*(ptr)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) #define __raw_try_cmpxchg(_ptr, _pold, _new, size, lock) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) bool success; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) __typeof__(_ptr) _old = (__typeof__(_ptr))(_pold); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) __typeof__(*(_ptr)) __old = *_old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) __typeof__(*(_ptr)) __new = (_new); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) switch (size) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) case __X86_CASE_B: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) volatile u8 *__ptr = (volatile u8 *)(_ptr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) asm volatile(lock "cmpxchgb %[new], %[ptr]" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) CC_SET(z) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) : CC_OUT(z) (success), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) [ptr] "+m" (*__ptr), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) [old] "+a" (__old) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) : [new] "q" (__new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) case __X86_CASE_W: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) volatile u16 *__ptr = (volatile u16 *)(_ptr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) asm volatile(lock "cmpxchgw %[new], %[ptr]" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) CC_SET(z) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) : CC_OUT(z) (success), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) [ptr] "+m" (*__ptr), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) [old] "+a" (__old) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) : [new] "r" (__new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) case __X86_CASE_L: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) volatile u32 *__ptr = (volatile u32 *)(_ptr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) asm volatile(lock "cmpxchgl %[new], %[ptr]" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) CC_SET(z) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) : CC_OUT(z) (success), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) [ptr] "+m" (*__ptr), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) [old] "+a" (__old) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) : [new] "r" (__new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) case __X86_CASE_Q: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) volatile u64 *__ptr = (volatile u64 *)(_ptr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) asm volatile(lock "cmpxchgq %[new], %[ptr]" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) CC_SET(z) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) : CC_OUT(z) (success), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) [ptr] "+m" (*__ptr), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) [old] "+a" (__old) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) : [new] "r" (__new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) default: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) __cmpxchg_wrong_size(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) if (unlikely(!success)) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) *_old = __old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) likely(success); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) #define __try_cmpxchg(ptr, pold, new, size) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) __raw_try_cmpxchg((ptr), (pold), (new), (size), LOCK_PREFIX)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) #define try_cmpxchg(ptr, pold, new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) __try_cmpxchg((ptr), (pold), (new), sizeof(*(ptr)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) * xadd() adds "inc" to "*ptr" and atomically returns the previous
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) * value of "*ptr".
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) * xadd() is locked when multiple CPUs are online
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) #define __xadd(ptr, inc, lock) __xchg_op((ptr), (inc), xadd, lock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) #define xadd(ptr, inc) __xadd((ptr), (inc), LOCK_PREFIX)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) #define __cmpxchg_double(pfx, p1, p2, o1, o2, n1, n2) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) bool __ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) __typeof__(*(p1)) __old1 = (o1), __new1 = (n1); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) __typeof__(*(p2)) __old2 = (o2), __new2 = (n2); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) VM_BUG_ON((unsigned long)(p1) % (2 * sizeof(long))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) VM_BUG_ON((unsigned long)((p1) + 1) != (unsigned long)(p2)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) asm volatile(pfx "cmpxchg%c5b %1" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) CC_SET(e) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) : CC_OUT(e) (__ret), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) "+m" (*(p1)), "+m" (*(p2)), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) "+a" (__old1), "+d" (__old2) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) : "i" (2 * sizeof(long)), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) "b" (__new1), "c" (__new2)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) __ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) #define arch_cmpxchg_double(p1, p2, o1, o2, n1, n2) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) __cmpxchg_double(LOCK_PREFIX, p1, p2, o1, o2, n1, n2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) #define arch_cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) __cmpxchg_double(, p1, p2, o1, o2, n1, n2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) #endif /* ASM_X86_CMPXCHG_H */