^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef __ASM_SH_BITOPS_GRB_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define __ASM_SH_BITOPS_GRB_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) static inline void set_bit(int nr, volatile void * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) int mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) volatile unsigned int *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) unsigned long tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) mask = 1 << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) " mov.l @%1, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) " or %2, %0 \n\t" /* or */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) " mov.l %0, @%1 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) : "=&r" (tmp),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) "+r" (a)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) : "r" (mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) : "memory" , "r0", "r1");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) static inline void clear_bit(int nr, volatile void * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) int mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) volatile unsigned int *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) unsigned long tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) mask = ~(1 << (nr & 0x1f));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) " mov.l @%1, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) " and %2, %0 \n\t" /* and */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) " mov.l %0, @%1 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) : "=&r" (tmp),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) "+r" (a)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) : "r" (mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) : "memory" , "r0", "r1");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) static inline void change_bit(int nr, volatile void * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) int mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) volatile unsigned int *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) unsigned long tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) mask = 1 << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) " mov.l @%1, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) " xor %2, %0 \n\t" /* xor */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) " mov.l %0, @%1 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) : "=&r" (tmp),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) "+r" (a)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) : "r" (mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) : "memory" , "r0", "r1");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) static inline int test_and_set_bit(int nr, volatile void * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) int mask, retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) volatile unsigned int *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) unsigned long tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) mask = 1 << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) " mov #-14, r15 \n\t" /* LOGIN: r15 = size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) " mov.l @%2, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) " mov %0, %1 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) " mov #-1, %1 \n\t" /* retvat = -1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) " or %3, %0 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) " mov.l %0, @%2 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) : "=&r" (tmp),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) "=&r" (retval),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) "+r" (a)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) : "r" (mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) : "memory" , "r0", "r1" ,"t");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) return retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) static inline int test_and_clear_bit(int nr, volatile void * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) int mask, retval,not_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) volatile unsigned int *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) unsigned long tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) mask = 1 << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) not_mask = ~mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) " mov #-14, r15 \n\t" /* LOGIN */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) " mov.l @%2, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) " mov %0, %1 \n\t" /* %1 = *a */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) " mov #-1, %1 \n\t" /* retvat = -1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) " and %4, %0 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) " mov.l %0, @%2 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) : "=&r" (tmp),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) "=&r" (retval),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) "+r" (a)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) : "r" (mask),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) "r" (not_mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) : "memory" , "r0", "r1", "t");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) return retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) static inline int test_and_change_bit(int nr, volatile void * addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) int mask, retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) volatile unsigned int *a = addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) unsigned long tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) a += nr >> 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) mask = 1 << (nr & 0x1f);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) " mov #-14, r15 \n\t" /* LOGIN */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) " mov.l @%2, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) " mov %0, %1 \n\t" /* %1 = *a */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) " mov #-1, %1 \n\t" /* retvat = -1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) " xor %3, %0 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) " mov.l %0, @%2 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) : "=&r" (tmp),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) "=&r" (retval),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) "+r" (a)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) : "r" (mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) : "memory" , "r0", "r1", "t");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) return retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) #include <asm-generic/bitops/non-atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) #endif /* __ASM_SH_BITOPS_GRB_H */