^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * forked from parisc asm/atomic.h which was:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * Copyright (C) 2000 Philipp Rumpf <prumpf@tux.org>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 2006 Kyle McMartin <kyle@parisc-linux.org>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #ifndef _ASM_PARISC_CMPXCHG_H_
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #define _ASM_PARISC_CMPXCHG_H_
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) /* This should get optimized out since it's never called.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) ** Or get a link error if xchg is used "wrong".
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) extern void __xchg_called_with_bad_pointer(void);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) /* __xchg32/64 defined in arch/parisc/lib/bitops.c */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) extern unsigned long __xchg8(char, volatile char *);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) extern unsigned long __xchg32(int, volatile int *);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #ifdef CONFIG_64BIT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) extern unsigned long __xchg64(unsigned long, volatile unsigned long *);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) /* optimizer better get rid of switch since size is a constant */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) __xchg(unsigned long x, volatile void *ptr, int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #ifdef CONFIG_64BIT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) case 8: return __xchg64(x, (volatile unsigned long *) ptr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) case 4: return __xchg32((int) x, (volatile int *) ptr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) case 1: return __xchg8((char) x, (volatile char *) ptr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) __xchg_called_with_bad_pointer();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) return x;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) ** REVISIT - Abandoned use of LDCW in xchg() for now:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) ** o need to test sizeof(*ptr) to avoid clearing adjacent bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) ** o and while we are at it, could CONFIG_64BIT code use LDCD too?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) **
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) ** if (__builtin_constant_p(x) && (x == NULL))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) ** if (((unsigned long)p & 0xf) == 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) ** return __ldcw(p);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) #define xchg(ptr, x) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) __typeof__(*(ptr)) __ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) __typeof__(*(ptr)) _x_ = (x); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) __ret = (__typeof__(*(ptr))) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) __xchg((unsigned long)_x_, (ptr), sizeof(*(ptr))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) __ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) /* bug catcher for when unsupported size is used - won't link */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) extern void __cmpxchg_called_with_bad_pointer(void);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) /* __cmpxchg_u32/u64 defined in arch/parisc/lib/bitops.c */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) extern unsigned long __cmpxchg_u32(volatile unsigned int *m, unsigned int old,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) unsigned int new_);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) extern u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new_);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) extern u8 __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new_);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) /* don't worry...optimizer will get rid of most of this */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) static inline unsigned long
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) #ifdef CONFIG_64BIT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) case 8: return __cmpxchg_u64((u64 *)ptr, old, new_);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) case 4: return __cmpxchg_u32((unsigned int *)ptr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) (unsigned int)old, (unsigned int)new_);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) case 1: return __cmpxchg_u8((u8 *)ptr, old & 0xff, new_ & 0xff);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) __cmpxchg_called_with_bad_pointer();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) return old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) #define cmpxchg(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) __typeof__(*(ptr)) _o_ = (o); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) __typeof__(*(ptr)) _n_ = (n); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) (unsigned long)_n_, sizeof(*(ptr))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) #include <asm-generic/cmpxchg-local.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) static inline unsigned long __cmpxchg_local(volatile void *ptr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) unsigned long old,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) unsigned long new_, int size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) switch (size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) #ifdef CONFIG_64BIT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) case 8: return __cmpxchg_u64((u64 *)ptr, old, new_);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) case 4: return __cmpxchg_u32(ptr, old, new_);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) return __cmpxchg_local_generic(ptr, old, new_, size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) * them available.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) #define cmpxchg_local(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) (unsigned long)(n), sizeof(*(ptr))))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) #ifdef CONFIG_64BIT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) #define cmpxchg64_local(ptr, o, n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) cmpxchg_local((ptr), (o), (n)); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) #define cmpxchg64(ptr, o, n) __cmpxchg_u64(ptr, o, n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) #endif /* _ASM_PARISC_CMPXCHG_H_ */