^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Copyright (C) 2012 Regents of the University of California
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #ifndef _ASM_RISCV_BITOPS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #define _ASM_RISCV_BITOPS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #ifndef _LINUX_BITOPS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #error "Only <linux/bitops.h> can be included directly"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #endif /* _LINUX_BITOPS_H */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <linux/compiler.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <linux/irqflags.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <asm/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <asm/bitsperlong.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <asm-generic/bitops/__ffs.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <asm-generic/bitops/ffz.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <asm-generic/bitops/fls.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <asm-generic/bitops/__fls.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include <asm-generic/bitops/fls64.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #include <asm-generic/bitops/find.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #include <asm-generic/bitops/sched.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #include <asm-generic/bitops/ffs.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #include <asm-generic/bitops/hweight.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #if (BITS_PER_LONG == 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #define __AMO(op) "amo" #op ".d"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #elif (BITS_PER_LONG == 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #define __AMO(op) "amo" #op ".w"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #error "Unexpected BITS_PER_LONG"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) #define __test_and_op_bit_ord(op, mod, nr, addr, ord) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) unsigned long __res, __mask; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) __mask = BIT_MASK(nr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) __asm__ __volatile__ ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) __AMO(op) #ord " %0, %2, %1" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) : "=r" (__res), "+A" (addr[BIT_WORD(nr)]) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) : "r" (mod(__mask)) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) ((__res & __mask) != 0); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) #define __op_bit_ord(op, mod, nr, addr, ord) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) __asm__ __volatile__ ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) __AMO(op) #ord " zero, %1, %0" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) : "+A" (addr[BIT_WORD(nr)]) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) : "r" (mod(BIT_MASK(nr))) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) #define __test_and_op_bit(op, mod, nr, addr) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) __test_and_op_bit_ord(op, mod, nr, addr, .aqrl)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #define __op_bit(op, mod, nr, addr) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) __op_bit_ord(op, mod, nr, addr, )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) /* Bitmask modifiers */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) #define __NOP(x) (x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) #define __NOT(x) (~(x))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) * test_and_set_bit - Set a bit and return its old value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) * @nr: Bit to set
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) * @addr: Address to count from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) * This operation may be reordered on other architectures than x86.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) return __test_and_op_bit(or, __NOP, nr, addr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) * test_and_clear_bit - Clear a bit and return its old value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) * @nr: Bit to clear
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) * @addr: Address to count from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) * This operation can be reordered on other architectures other than x86.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) return __test_and_op_bit(and, __NOT, nr, addr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) * test_and_change_bit - Change a bit and return its old value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) * @nr: Bit to change
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) * @addr: Address to count from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) * This operation is atomic and cannot be reordered.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) * It also implies a memory barrier.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) return __test_and_op_bit(xor, __NOP, nr, addr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) * set_bit - Atomically set a bit in memory
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) * @nr: the bit to set
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) * @addr: the address to start counting from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) * Note: there are no guarantees that this function will not be reordered
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) * on non x86 architectures, so if you are writing portable code,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) * make sure not to rely on its reordering guarantees.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) * Note that @nr may be almost arbitrarily large; this function is not
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) * restricted to acting on a single-word quantity.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) static inline void set_bit(int nr, volatile unsigned long *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) __op_bit(or, __NOP, nr, addr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) * clear_bit - Clears a bit in memory
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) * @nr: Bit to clear
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) * @addr: Address to start counting from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) * Note: there are no guarantees that this function will not be reordered
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) * on non x86 architectures, so if you are writing portable code,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) * make sure not to rely on its reordering guarantees.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) static inline void clear_bit(int nr, volatile unsigned long *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) __op_bit(and, __NOT, nr, addr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) * change_bit - Toggle a bit in memory
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) * @nr: Bit to change
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) * @addr: Address to start counting from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) * change_bit() may be reordered on other architectures than x86.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) * Note that @nr may be almost arbitrarily large; this function is not
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) * restricted to acting on a single-word quantity.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) static inline void change_bit(int nr, volatile unsigned long *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) __op_bit(xor, __NOP, nr, addr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) * test_and_set_bit_lock - Set a bit and return its old value, for lock
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) * @nr: Bit to set
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) * @addr: Address to count from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) * This operation is atomic and provides acquire barrier semantics.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) * It can be used to implement bit locks.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) static inline int test_and_set_bit_lock(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) unsigned long nr, volatile unsigned long *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) return __test_and_op_bit_ord(or, __NOP, nr, addr, .aq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) * clear_bit_unlock - Clear a bit in memory, for unlock
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) * @nr: the bit to set
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) * @addr: the address to start counting from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) * This operation is atomic and provides release barrier semantics.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) static inline void clear_bit_unlock(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) unsigned long nr, volatile unsigned long *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) __op_bit_ord(and, __NOT, nr, addr, .rl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) * __clear_bit_unlock - Clear a bit in memory, for unlock
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) * @nr: the bit to set
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) * @addr: the address to start counting from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) * This operation is like clear_bit_unlock, however it is not atomic.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) * It does provide release barrier semantics so it can be used to unlock
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) * a bit lock, however it would only be used if no other CPU can modify
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) * any bits in the memory until the lock is released (a good example is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) * if the bit lock itself protects access to the other bits in the word).
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) * On RISC-V systems there seems to be no benefit to taking advantage of the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) * non-atomic property here: it's a lot more instructions and we still have to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) * provide release semantics anyway.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) static inline void __clear_bit_unlock(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) unsigned long nr, volatile unsigned long *addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) clear_bit_unlock(nr, addr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) #undef __test_and_op_bit
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) #undef __op_bit
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) #undef __NOP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) #undef __NOT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) #undef __AMO
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) #include <asm-generic/bitops/non-atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) #include <asm-generic/bitops/le.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) #include <asm-generic/bitops/ext2-atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) #endif /* _ASM_RISCV_BITOPS_H */