^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef _ASM_POWERPC_CMPXCHG_H_
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define _ASM_POWERPC_CMPXCHG_H_
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #ifdef __KERNEL__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #include <linux/compiler.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #include <asm/synch.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <linux/bug.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #ifdef __BIG_ENDIAN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #define BITOFF_CAL(size, off) ((sizeof(u32) - size - off) * BITS_PER_BYTE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #define BITOFF_CAL(size, off) (off * BITS_PER_BYTE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #define XCHG_GEN(type, sfx, cl) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) static inline u32 __xchg_##type##sfx(volatile void *p, u32 val) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) unsigned int prev, prev_mask, tmp, bitoff, off; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) off = (unsigned long)p % sizeof(u32); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) bitoff = BITOFF_CAL(sizeof(type), off); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) p -= off; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) val <<= bitoff; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) prev_mask = (u32)(type)-1 << bitoff; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) "1: lwarx %0,0,%3\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) " andc %1,%0,%5\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) " or %1,%1,%4\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) " stwcx. %1,0,%3\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) " bne- 1b\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) : "r" (p), "r" (val), "r" (prev_mask) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) : "cc", cl); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) return prev >> bitoff; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) #define CMPXCHG_GEN(type, sfx, br, br2, cl) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) static inline \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) u32 __cmpxchg_##type##sfx(volatile void *p, u32 old, u32 new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) unsigned int prev, prev_mask, tmp, bitoff, off; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) off = (unsigned long)p % sizeof(u32); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) bitoff = BITOFF_CAL(sizeof(type), off); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) p -= off; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) old <<= bitoff; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) new <<= bitoff; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) prev_mask = (u32)(type)-1 << bitoff; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) br \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) "1: lwarx %0,0,%3\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) " and %1,%0,%6\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) " cmpw 0,%1,%4\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) " bne- 2f\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) " andc %1,%0,%6\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) " or %1,%1,%5\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) " stwcx. %1,0,%3\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) " bne- 1b\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) br2 \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) "\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) "2:" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) : "r" (p), "r" (old), "r" (new), "r" (prev_mask) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) : "cc", cl); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) return prev >> bitoff; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) * Atomic exchange
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) * Changes the memory location '*p' to be val and returns
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) * the previous value stored there.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) XCHG_GEN(u8, _local, "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) XCHG_GEN(u8, _relaxed, "cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) XCHG_GEN(u16, _local, "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) XCHG_GEN(u16, _relaxed, "cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) __xchg_u32_local(volatile void *p, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) "1: lwarx %0,0,%2 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) " stwcx. %3,0,%2 \n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) bne- 1b"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) : "r" (p), "r" (val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) : "cc", "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) __xchg_u32_relaxed(u32 *p, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) "1: lwarx %0,0,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) " stwcx. %3,0,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) " bne- 1b"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) : "r" (p), "r" (val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) : "cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) __xchg_u64_local(volatile void *p, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) "1: ldarx %0,0,%2 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) " stdcx. %3,0,%2 \n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) bne- 1b"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) : "r" (p), "r" (val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) : "cc", "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) __xchg_u64_relaxed(u64 *p, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) "1: ldarx %0,0,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) " stdcx. %3,0,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) " bne- 1b"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) : "r" (p), "r" (val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) : "cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) __xchg_local(void *ptr, unsigned long x, unsigned int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) case 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) return __xchg_u8_local(ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) case 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) return __xchg_u16_local(ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) case 4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) return __xchg_u32_local(ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) return __xchg_u64_local(ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) return x;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) __xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) case 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) return __xchg_u8_relaxed(ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) case 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) return __xchg_u16_relaxed(ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) case 4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) return __xchg_u32_relaxed(ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) return __xchg_u64_relaxed(ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) return x;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) #define xchg_local(ptr,x) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) __typeof__(*(ptr)) _x_ = (x); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) (__typeof__(*(ptr))) __xchg_local((ptr), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) (unsigned long)_x_, sizeof(*(ptr))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) #define xchg_relaxed(ptr, x) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) __typeof__(*(ptr)) _x_ = (x); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) (__typeof__(*(ptr))) __xchg_relaxed((ptr), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) (unsigned long)_x_, sizeof(*(ptr))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) * Compare and exchange - if *p == old, set it to new,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) * and return the old value of *p.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) CMPXCHG_GEN(u8, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) CMPXCHG_GEN(u8, _local, , , "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) CMPXCHG_GEN(u8, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) CMPXCHG_GEN(u8, _relaxed, , , "cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) CMPXCHG_GEN(u16, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) CMPXCHG_GEN(u16, _local, , , "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) CMPXCHG_GEN(u16, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) CMPXCHG_GEN(u16, _relaxed, , , "cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) unsigned int prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) PPC_ATOMIC_ENTRY_BARRIER
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) "1: lwarx %0,0,%2 # __cmpxchg_u32\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) cmpw 0,%0,%3\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) bne- 2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) " stwcx. %4,0,%2\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) bne- 1b"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) PPC_ATOMIC_EXIT_BARRIER
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) "\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 2:"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) : "r" (p), "r" (old), "r" (new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) : "cc", "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) unsigned int prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) "1: lwarx %0,0,%2 # __cmpxchg_u32\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) cmpw 0,%0,%3\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) bne- 2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) " stwcx. %4,0,%2\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) bne- 1b"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) "\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) 2:"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) : "r" (p), "r" (old), "r" (new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) : "cc", "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) __cmpxchg_u32_relaxed(u32 *p, unsigned long old, unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) "1: lwarx %0,0,%2 # __cmpxchg_u32_relaxed\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) " cmpw 0,%0,%3\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) " bne- 2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) " stwcx. %4,0,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) " bne- 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) "2:"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) : "r" (p), "r" (old), "r" (new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) : "cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) * cmpxchg family don't have order guarantee if cmp part fails, therefore we
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) * can avoid superfluous barriers if we use assembly code to implement
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) * cmpxchg() and cmpxchg_acquire(), however we don't do the similar for
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) * cmpxchg_release() because that will result in putting a barrier in the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) * middle of a ll/sc loop, which is probably a bad idea. For example, this
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) * might cause the conditional store more likely to fail.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) __cmpxchg_u32_acquire(u32 *p, unsigned long old, unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) "1: lwarx %0,0,%2 # __cmpxchg_u32_acquire\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) " cmpw 0,%0,%3\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) " bne- 2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) " stwcx. %4,0,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) " bne- 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) PPC_ACQUIRE_BARRIER
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) "\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) "2:"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) : "r" (p), "r" (old), "r" (new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) : "cc", "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) PPC_ATOMIC_ENTRY_BARRIER
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) "1: ldarx %0,0,%2 # __cmpxchg_u64\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) cmpd 0,%0,%3\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) bne- 2f\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) stdcx. %4,0,%2\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) bne- 1b"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) PPC_ATOMIC_EXIT_BARRIER
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) "\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) 2:"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) : "r" (p), "r" (old), "r" (new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) : "cc", "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) "1: ldarx %0,0,%2 # __cmpxchg_u64\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) cmpd 0,%0,%3\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) bne- 2f\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) stdcx. %4,0,%2\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) bne- 1b"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) "\n\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) 2:"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) : "r" (p), "r" (old), "r" (new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) : "cc", "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) __cmpxchg_u64_relaxed(u64 *p, unsigned long old, unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) "1: ldarx %0,0,%2 # __cmpxchg_u64_relaxed\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) " cmpd 0,%0,%3\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) " bne- 2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) " stdcx. %4,0,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) " bne- 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) "2:"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) : "r" (p), "r" (old), "r" (new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) : "cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) unsigned long prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) "1: ldarx %0,0,%2 # __cmpxchg_u64_acquire\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) " cmpd 0,%0,%3\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) " bne- 2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) " stdcx. %4,0,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) " bne- 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) PPC_ACQUIRE_BARRIER
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) "\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) "2:"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) : "=&r" (prev), "+m" (*p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) : "r" (p), "r" (old), "r" (new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) : "cc", "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) unsigned int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) case 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) return __cmpxchg_u8(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) case 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) return __cmpxchg_u16(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) case 4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) return __cmpxchg_u32(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) return __cmpxchg_u64(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) return old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) __cmpxchg_local(void *ptr, unsigned long old, unsigned long new,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) unsigned int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) case 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) return __cmpxchg_u8_local(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) case 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) return __cmpxchg_u16_local(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) case 4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) return __cmpxchg_u32_local(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) return __cmpxchg_u64_local(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_local");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) return old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) __cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) unsigned int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) case 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) return __cmpxchg_u8_relaxed(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) case 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) return __cmpxchg_u16_relaxed(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) case 4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) return __cmpxchg_u32_relaxed(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) return __cmpxchg_u64_relaxed(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_relaxed");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) return old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) unsigned int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) case 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) return __cmpxchg_u8_acquire(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) case 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) return __cmpxchg_u16_acquire(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) case 4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) return __cmpxchg_u32_acquire(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) return __cmpxchg_u64_acquire(ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_acquire");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) return old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) #define cmpxchg(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) __typeof__(*(ptr)) _o_ = (o); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) __typeof__(*(ptr)) _n_ = (n); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) (unsigned long)_n_, sizeof(*(ptr))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) #define cmpxchg_local(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) __typeof__(*(ptr)) _o_ = (o); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) __typeof__(*(ptr)) _n_ = (n); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) (unsigned long)_n_, sizeof(*(ptr))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) #define cmpxchg_relaxed(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) __typeof__(*(ptr)) _o_ = (o); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) __typeof__(*(ptr)) _n_ = (n); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) (__typeof__(*(ptr))) __cmpxchg_relaxed((ptr), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) (unsigned long)_o_, (unsigned long)_n_, \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) sizeof(*(ptr))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) #define cmpxchg_acquire(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) __typeof__(*(ptr)) _o_ = (o); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) __typeof__(*(ptr)) _n_ = (n); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) (__typeof__(*(ptr))) __cmpxchg_acquire((ptr), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) (unsigned long)_o_, (unsigned long)_n_, \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) sizeof(*(ptr))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) #define cmpxchg64(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) cmpxchg((ptr), (o), (n)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) #define cmpxchg64_local(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) cmpxchg_local((ptr), (o), (n)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) #define cmpxchg64_relaxed(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) cmpxchg_relaxed((ptr), (o), (n)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) #define cmpxchg64_acquire(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) cmpxchg_acquire((ptr), (o), (n)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) #include <asm-generic/cmpxchg-local.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) #endif /* __KERNEL__ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) #endif /* _ASM_POWERPC_CMPXCHG_H_ */