^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define _ASM_GENERIC_BITOPS_ATOMIC_H_
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include <linux/atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #include <linux/compiler.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #include <asm/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * Implementation of atomic bitops using atomic-fetch ops.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) * See Documentation/atomic_bitops.txt for details.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) static __always_inline void set_bit(unsigned int nr, volatile unsigned long *p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) p += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) static __always_inline void clear_bit(unsigned int nr, volatile unsigned long *p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) p += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) static __always_inline void change_bit(unsigned int nr, volatile unsigned long *p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) p += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) static inline int test_and_set_bit(unsigned int nr, volatile unsigned long *p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) long old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) unsigned long mask = BIT_MASK(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) p += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) if (READ_ONCE(*p) & mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) return 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) old = atomic_long_fetch_or(mask, (atomic_long_t *)p);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) return !!(old & mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) static inline int test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) long old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) unsigned long mask = BIT_MASK(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) p += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) if (!(READ_ONCE(*p) & mask))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) old = atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) return !!(old & mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) static inline int test_and_change_bit(unsigned int nr, volatile unsigned long *p)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) long old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) unsigned long mask = BIT_MASK(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) p += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) old = atomic_long_fetch_xor(mask, (atomic_long_t *)p);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) return !!(old & mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) #endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */