^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef _PARISC_BITOPS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define _PARISC_BITOPS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #ifndef _LINUX_BITOPS_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #error only <linux/bitops.h> can be included directly
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <linux/compiler.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <asm/types.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <asm/byteorder.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <asm/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <linux/atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) /* See http://marc.theaimsgroup.com/?t=108826637900003 for discussion
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) * on use of volatile and __*_bit() (set/clear/change):
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) * *_bit() want use of volatile.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) * __*_bit() are "relaxed" and don't use spinlock or volatile.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) static __inline__ void set_bit(int nr, volatile unsigned long * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) unsigned long mask = BIT_MASK(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) addr += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) _atomic_spin_lock_irqsave(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) *addr |= mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) _atomic_spin_unlock_irqrestore(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) static __inline__ void clear_bit(int nr, volatile unsigned long * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) unsigned long mask = BIT_MASK(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) addr += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) _atomic_spin_lock_irqsave(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) *addr &= ~mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) _atomic_spin_unlock_irqrestore(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) static __inline__ void change_bit(int nr, volatile unsigned long * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) unsigned long mask = BIT_MASK(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) addr += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) _atomic_spin_lock_irqsave(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) *addr ^= mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) _atomic_spin_unlock_irqrestore(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) static __inline__ int test_and_set_bit(int nr, volatile unsigned long * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) unsigned long mask = BIT_MASK(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) unsigned long old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) int set;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) addr += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) _atomic_spin_lock_irqsave(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) old = *addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) set = (old & mask) ? 1 : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) if (!set)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) *addr = old | mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) _atomic_spin_unlock_irqrestore(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) return set;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) static __inline__ int test_and_clear_bit(int nr, volatile unsigned long * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) unsigned long mask = BIT_MASK(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) unsigned long old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) int set;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) addr += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) _atomic_spin_lock_irqsave(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) old = *addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) set = (old & mask) ? 1 : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) if (set)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) *addr = old & ~mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) _atomic_spin_unlock_irqrestore(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) return set;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) static __inline__ int test_and_change_bit(int nr, volatile unsigned long * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) unsigned long mask = BIT_MASK(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) unsigned long oldbit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) addr += BIT_WORD(nr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) _atomic_spin_lock_irqsave(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) oldbit = *addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) *addr = oldbit ^ mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) _atomic_spin_unlock_irqrestore(addr, flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) return (oldbit & mask) ? 1 : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) #include <asm-generic/bitops/non-atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) #ifdef __KERNEL__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) * __ffs - find first bit in word. returns 0 to "BITS_PER_LONG-1".
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) * @word: The word to search
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) * __ffs() return is undefined if no bit is set.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) * 32-bit fast __ffs by LaMont Jones "lamont At hp com".
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) * 64-bit enhancement by Grant Grundler "grundler At parisc-linux org".
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) * (with help from willy/jejb to get the semantics right)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) * This algorithm avoids branches by making use of nullification.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) * One side effect of "extr" instructions is it sets PSW[N] bit.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) * How PSW[N] (nullify next insn) gets set is determined by the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) * "condition" field (eg "<>" or "TR" below) in the extr* insn.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) * Only the 1st and one of either the 2cd or 3rd insn will get executed.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) * Each set of 3 insn will get executed in 2 cycles on PA8x00 vs 16 or so
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) * cycles for each mispredicted branch.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) static __inline__ unsigned long __ffs(unsigned long x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) unsigned long ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) __asm__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) #ifdef CONFIG_64BIT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) " ldi 63,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) " extrd,u,*<> %0,63,32,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) " extrd,u,*TR %0,31,32,%0\n" /* move top 32-bits down */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) " addi -32,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) " ldi 31,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) " extru,<> %0,31,16,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) " extru,TR %0,15,16,%0\n" /* xxxx0000 -> 0000xxxx */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) " addi -16,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) " extru,<> %0,31,8,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) " extru,TR %0,23,8,%0\n" /* 0000xx00 -> 000000xx */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) " addi -8,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) " extru,<> %0,31,4,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) " extru,TR %0,27,4,%0\n" /* 000000x0 -> 0000000x */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) " addi -4,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) " extru,<> %0,31,2,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) " extru,TR %0,29,2,%0\n" /* 0000000y, 1100b -> 0011b */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) " addi -2,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) " extru,= %0,31,1,%%r0\n" /* check last bit */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) " addi -1,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) : "+r" (x), "=r" (ret) );
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) #include <asm-generic/bitops/ffz.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) * ffs: find first bit set. returns 1 to BITS_PER_LONG or 0 (if none set)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) * This is defined the same way as the libc and compiler builtin
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) * ffs routines, therefore differs in spirit from the above ffz (man ffs).
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) static __inline__ int ffs(int x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) return x ? (__ffs((unsigned long)x) + 1) : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) * fls: find last (most significant) bit set.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) * fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) static __inline__ int fls(unsigned int x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) if (!x)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) __asm__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) " ldi 1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) " extru,<> %0,15,16,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) " zdep,TR %0,15,16,%0\n" /* xxxx0000 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) " addi 16,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) " extru,<> %0,7,8,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) " zdep,TR %0,23,24,%0\n" /* xx000000 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) " addi 8,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) " extru,<> %0,3,4,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) " zdep,TR %0,27,28,%0\n" /* x0000000 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) " addi 4,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) " extru,<> %0,1,2,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) " zdep,TR %0,29,30,%0\n" /* y0000000 (y&3 = 0) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) " addi 2,%1,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) " extru,= %0,0,1,%%r0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) " addi 1,%1,%1\n" /* if y & 8, add 1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) : "+r" (x), "=r" (ret) );
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) #include <asm-generic/bitops/__fls.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) #include <asm-generic/bitops/fls64.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) #include <asm-generic/bitops/hweight.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) #include <asm-generic/bitops/lock.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) #include <asm-generic/bitops/sched.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) #endif /* __KERNEL__ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) #include <asm-generic/bitops/find.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) #ifdef __KERNEL__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) #include <asm-generic/bitops/le.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) #include <asm-generic/bitops/ext2-atomic-setbit.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) #endif /* __KERNEL__ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) #endif /* _PARISC_BITOPS_H */