^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) * include/asm-xtensa/bitops.h
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * Atomic operations that C can't guarantee us.Useful for resource counting etc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * This file is subject to the terms and conditions of the GNU General Public
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * License. See the file "COPYING" in the main directory of this archive
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * for more details.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * Copyright (C) 2001 - 2007 Tensilica Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #ifndef _XTENSA_BITOPS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #define _XTENSA_BITOPS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #ifndef _LINUX_BITOPS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #error only <linux/bitops.h> can be included directly
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <asm/processor.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <asm/byteorder.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include <asm/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #include <asm-generic/bitops/non-atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #if XCHAL_HAVE_NSA
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) static inline unsigned long __cntlz (unsigned long x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) int lz;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) return lz;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) * ffz: Find first zero in word. Undefined if no zero exists.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) static inline int ffz(unsigned long x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) return 31 - __cntlz(~x & -~x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) * __ffs: Find first bit set in word. Return 0 for bit 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) static inline unsigned long __ffs(unsigned long x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) return 31 - __cntlz(x & -x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) * ffs: Find first bit set in word. This is defined the same way as
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) * the libc and compiler builtin ffs routines, therefore
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) * differs in spirit from the above ffz (man ffs).
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) static inline int ffs(unsigned long x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) return 32 - __cntlz(x & -x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) * fls: Find last (most-significant) bit set in word.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) static inline int fls (unsigned int x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) return 32 - __cntlz(x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) * __fls - find last (most-significant) set bit in a long word
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) * @word: the word to search
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) * Undefined if no set bit exists, so code should check against 0 first.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) static inline unsigned long __fls(unsigned long word)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) return 31 - __cntlz(word);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) /* Use the generic implementation if we don't have the nsa/nsau instructions. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) # include <asm-generic/bitops/ffs.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) # include <asm-generic/bitops/__ffs.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) # include <asm-generic/bitops/ffz.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) # include <asm-generic/bitops/fls.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) # include <asm-generic/bitops/__fls.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) #include <asm-generic/bitops/fls64.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) #if XCHAL_HAVE_EXCLUSIVE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) #define BIT_OP(op, insn, inv) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) unsigned long tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) unsigned long mask = 1UL << (bit & 31); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) p += bit >> 5; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) "1: l32ex %[tmp], %[addr]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) " "insn" %[tmp], %[tmp], %[mask]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) " s32ex %[tmp], %[addr]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) " getex %[tmp]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) " beqz %[tmp], 1b\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) : [tmp] "=&a" (tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) : [mask] "a" (inv mask), [addr] "a" (p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) #define TEST_AND_BIT_OP(op, insn, inv) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) static inline int \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) unsigned long tmp, value; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) unsigned long mask = 1UL << (bit & 31); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) p += bit >> 5; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) "1: l32ex %[value], %[addr]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) " "insn" %[tmp], %[value], %[mask]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) " s32ex %[tmp], %[addr]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) " getex %[tmp]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) " beqz %[tmp], 1b\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) : [tmp] "=&a" (tmp), [value] "=&a" (value) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) : [mask] "a" (inv mask), [addr] "a" (p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) return value & mask; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) #elif XCHAL_HAVE_S32C1I
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) #define BIT_OP(op, insn, inv) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) unsigned long tmp, value; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) unsigned long mask = 1UL << (bit & 31); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) p += bit >> 5; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) "1: l32i %[value], %[mem]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) " wsr %[value], scompare1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) " "insn" %[tmp], %[value], %[mask]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) " s32c1i %[tmp], %[mem]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) " bne %[tmp], %[value], 1b\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) : [tmp] "=&a" (tmp), [value] "=&a" (value), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) [mem] "+m" (*p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) : [mask] "a" (inv mask) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) #define TEST_AND_BIT_OP(op, insn, inv) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) static inline int \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) unsigned long tmp, value; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) unsigned long mask = 1UL << (bit & 31); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) p += bit >> 5; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) "1: l32i %[value], %[mem]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) " wsr %[value], scompare1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) " "insn" %[tmp], %[value], %[mask]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) " s32c1i %[tmp], %[mem]\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) " bne %[tmp], %[value], 1b\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) : [tmp] "=&a" (tmp), [value] "=&a" (value), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) [mem] "+m" (*p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) : [mask] "a" (inv mask) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) return tmp & mask; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) #define BIT_OP(op, insn, inv)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) #define TEST_AND_BIT_OP(op, insn, inv)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) #include <asm-generic/bitops/atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) #endif /* XCHAL_HAVE_S32C1I */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) #define BIT_OPS(op, insn, inv) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) BIT_OP(op, insn, inv) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) TEST_AND_BIT_OP(op, insn, inv)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) BIT_OPS(set, "or", )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) BIT_OPS(clear, "and", ~)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) BIT_OPS(change, "xor", )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) #undef BIT_OPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) #undef BIT_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) #undef TEST_AND_BIT_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) #include <asm-generic/bitops/find.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) #include <asm-generic/bitops/le.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) #include <asm-generic/bitops/ext2-atomic-setbit.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) #include <asm-generic/bitops/hweight.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) #include <asm-generic/bitops/lock.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) #include <asm-generic/bitops/sched.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) #endif /* _XTENSA_BITOPS_H */