^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef __ASM_SH_BITOPS_CAS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define __ASM_SH_BITOPS_CAS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) static inline unsigned __bo_cas(volatile unsigned *p, unsigned old, unsigned new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) __asm__ __volatile__("cas.l %1,%0,@r0"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) : "+r"(new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) : "r"(old), "z"(p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) : "t", "memory" );
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) return new;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) static inline void set_bit(int nr, volatile void *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) unsigned mask, old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) volatile unsigned *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) mask = 1U << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) do old = *a;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) while (__bo_cas(a, old, old|mask) != old);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) static inline void clear_bit(int nr, volatile void *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) unsigned mask, old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) volatile unsigned *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) mask = 1U << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) do old = *a;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) while (__bo_cas(a, old, old&~mask) != old);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) static inline void change_bit(int nr, volatile void *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) unsigned mask, old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) volatile unsigned *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) mask = 1U << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) do old = *a;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) while (__bo_cas(a, old, old^mask) != old);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) static inline int test_and_set_bit(int nr, volatile void *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) unsigned mask, old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) volatile unsigned *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) mask = 1U << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) do old = *a;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) while (__bo_cas(a, old, old|mask) != old);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) return !!(old & mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) static inline int test_and_clear_bit(int nr, volatile void *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) unsigned mask, old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) volatile unsigned *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) mask = 1U << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) do old = *a;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) while (__bo_cas(a, old, old&~mask) != old);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) return !!(old & mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) static inline int test_and_change_bit(int nr, volatile void *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) unsigned mask, old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) volatile unsigned *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) mask = 1U << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) do old = *a;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) while (__bo_cas(a, old, old^mask) != old);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) return !!(old & mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) #include <asm-generic/bitops/non-atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) #endif /* __ASM_SH_BITOPS_CAS_H */