^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef _ALPHA_CMPXCHG_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #error Do not include xchg.h directly!
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * except that local version do not have the expensive memory barrier.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * So this file is included twice from asm/cmpxchg.h.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) * Atomic exchange.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) * Since it can be used to implement critical sections
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) * it must clobber "memory" (also for interrupts in UP).
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) ____xchg(_u8, volatile char *m, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) unsigned long ret, tmp, addr64;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) " andnot %4,7,%3\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) " insbl %1,%4,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) "1: ldq_l %2,0(%3)\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) " extbl %2,%4,%0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) " mskbl %2,%4,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) " or %1,%2,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) " stq_c %2,0(%3)\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) " beq %2,2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) ".subsection 2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) "2: br 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) ".previous"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) : "r" ((long)m), "1" (val) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) ____xchg(_u16, volatile short *m, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) unsigned long ret, tmp, addr64;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) " andnot %4,7,%3\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) " inswl %1,%4,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) "1: ldq_l %2,0(%3)\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) " extwl %2,%4,%0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) " mskwl %2,%4,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) " or %1,%2,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) " stq_c %2,0(%3)\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) " beq %2,2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) ".subsection 2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) "2: br 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) ".previous"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) : "r" ((long)m), "1" (val) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) ____xchg(_u32, volatile int *m, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) unsigned long dummy;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) "1: ldl_l %0,%4\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) " bis $31,%3,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) " stl_c %1,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) " beq %1,2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) ".subsection 2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) "2: br 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) ".previous"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) : "=&r" (val), "=&r" (dummy), "=m" (*m)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) : "rI" (val), "m" (*m) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) return val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) ____xchg(_u64, volatile long *m, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) unsigned long dummy;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) "1: ldq_l %0,%4\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) " bis $31,%3,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) " stq_c %1,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) " beq %1,2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) ".subsection 2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) "2: br 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) ".previous"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) : "=&r" (val), "=&r" (dummy), "=m" (*m)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) : "rI" (val), "m" (*m) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) return val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) /* This function doesn't exist, so you'll get a linker error
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) if something tries to do an invalid xchg(). */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) extern void __xchg_called_with_bad_pointer(void);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) ____xchg(, volatile void *ptr, unsigned long x, int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) case 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) return ____xchg(_u8, ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) case 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) return ____xchg(_u16, ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) case 4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) return ____xchg(_u32, ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) return ____xchg(_u64, ptr, x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) __xchg_called_with_bad_pointer();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) return x;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) * Atomic compare and exchange. Compare OLD with MEM, if identical,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) * store NEW in MEM. Return the initial value in MEM. Success is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) * indicated by comparing RETURN with OLD.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) unsigned long prev, tmp, cmp, addr64;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) " andnot %5,7,%4\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) " insbl %1,%5,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) "1: ldq_l %2,0(%4)\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) " extbl %2,%5,%0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) " cmpeq %0,%6,%3\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) " beq %3,2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) " mskbl %2,%5,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) " or %1,%2,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) " stq_c %2,0(%4)\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) " beq %2,3f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) "2:\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) ".subsection 2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) "3: br 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) ".previous"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) unsigned long prev, tmp, cmp, addr64;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) " andnot %5,7,%4\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) " inswl %1,%5,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) "1: ldq_l %2,0(%4)\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) " extwl %2,%5,%0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) " cmpeq %0,%6,%3\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) " beq %3,2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) " mskwl %2,%5,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) " or %1,%2,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) " stq_c %2,0(%4)\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) " beq %2,3f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) "2:\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) ".subsection 2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) "3: br 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) ".previous"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) ____cmpxchg(_u32, volatile int *m, int old, int new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) unsigned long prev, cmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) "1: ldl_l %0,%5\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) " cmpeq %0,%3,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) " beq %1,2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) " mov %4,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) " stl_c %1,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) " beq %1,3f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) "2:\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) ".subsection 2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) "3: br 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) ".previous"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) : "=&r"(prev), "=&r"(cmp), "=m"(*m)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) : "r"((long) old), "r"(new), "m"(*m) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) unsigned long prev, cmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) "1: ldq_l %0,%5\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) " cmpeq %0,%3,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) " beq %1,2f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) " mov %4,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) " stq_c %1,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) " beq %1,3f\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) "2:\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) ".subsection 2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) "3: br 1b\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) ".previous"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) : "=&r"(prev), "=&r"(cmp), "=m"(*m)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) : "r"((long) old), "r"(new), "m"(*m) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) /* This function doesn't exist, so you'll get a linker error
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) if something tries to do an invalid cmpxchg(). */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) extern void __cmpxchg_called_with_bad_pointer(void);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) static __always_inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) case 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) return ____cmpxchg(_u8, ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) case 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) return ____cmpxchg(_u16, ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) case 4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) return ____cmpxchg(_u32, ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) return ____cmpxchg(_u64, ptr, old, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) __cmpxchg_called_with_bad_pointer();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) return old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) #endif