^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef __ARCH_M68K_ATOMIC__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define __ARCH_M68K_ATOMIC__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include <linux/types.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #include <linux/irqflags.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #include <asm/cmpxchg.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <asm/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) * Atomic operations that C can't guarantee us. Useful for
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) * resource counting etc..
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) * We do not have SMP m68k systems, so we don't have to deal with that.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #define atomic_read(v) READ_ONCE((v)->counter)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) * The ColdFire parts cannot do some immediate to memory operations,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) * so for them we do not specify the "i" asm constraint.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #ifdef CONFIG_COLDFIRE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #define ASM_DI "d"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #define ASM_DI "di"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #define ATOMIC_OP(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) static inline void atomic_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) __asm__ __volatile__(#asm_op "l %1,%0" : "+m" (*v) : ASM_DI (i));\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) #ifdef CONFIG_RMW_INSNS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) static inline int atomic_##op##_return(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) int t, tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) "1: movel %2,%1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) " " #asm_op "l %3,%1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) " casl %2,%1,%0\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) " jne 1b" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) : "+m" (*v), "=&d" (t), "=&d" (tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) : "g" (i), "2" (atomic_read(v))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) return t; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) static inline int atomic_fetch_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) int t, tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) "1: movel %2,%1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) " " #asm_op "l %3,%1\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) " casl %2,%1,%0\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) " jne 1b" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) : "+m" (*v), "=&d" (t), "=&d" (tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) : "g" (i), "2" (atomic_read(v))); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) return tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) static inline int atomic_##op##_return(int i, atomic_t * v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) unsigned long flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) int t; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) t = (v->counter c_op i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) return t; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) static inline int atomic_fetch_##op(int i, atomic_t * v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) unsigned long flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) int t; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) t = v->counter; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) v->counter c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) return t; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) #endif /* CONFIG_RMW_INSNS */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) #define ATOMIC_OPS(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) ATOMIC_OP(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) ATOMIC_OP_RETURN(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) ATOMIC_FETCH_OP(op, c_op, asm_op)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) ATOMIC_OPS(add, +=, add)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) ATOMIC_OPS(sub, -=, sub)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) #undef ATOMIC_OPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) #define ATOMIC_OPS(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) ATOMIC_OP(op, c_op, asm_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) ATOMIC_FETCH_OP(op, c_op, asm_op)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) ATOMIC_OPS(and, &=, and)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) ATOMIC_OPS(or, |=, or)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) ATOMIC_OPS(xor, ^=, eor)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) #undef ATOMIC_OPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) #undef ATOMIC_FETCH_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) #undef ATOMIC_OP_RETURN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) #undef ATOMIC_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) static inline void atomic_inc(atomic_t *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) __asm__ __volatile__("addql #1,%0" : "+m" (*v));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) #define atomic_inc atomic_inc
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) static inline void atomic_dec(atomic_t *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) __asm__ __volatile__("subql #1,%0" : "+m" (*v));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) #define atomic_dec atomic_dec
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) static inline int atomic_dec_and_test(atomic_t *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) char c;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) __asm__ __volatile__("subql #1,%1; seq %0" : "=d" (c), "+m" (*v));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) return c != 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) #define atomic_dec_and_test atomic_dec_and_test
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) static inline int atomic_dec_and_test_lt(atomic_t *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) char c;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) "subql #1,%1; slt %0"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) : "=d" (c), "=m" (*v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) : "m" (*v));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) return c != 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) static inline int atomic_inc_and_test(atomic_t *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) char c;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) __asm__ __volatile__("addql #1,%1; seq %0" : "=d" (c), "+m" (*v));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) return c != 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) #define atomic_inc_and_test atomic_inc_and_test
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) #ifdef CONFIG_RMW_INSNS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) #else /* !CONFIG_RMW_INSNS */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) int prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) local_irq_save(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) prev = atomic_read(v);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) if (prev == old)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) atomic_set(v, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) local_irq_restore(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) static inline int atomic_xchg(atomic_t *v, int new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) int prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) local_irq_save(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) prev = atomic_read(v);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) atomic_set(v, new);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) local_irq_restore(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) return prev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) #endif /* !CONFIG_RMW_INSNS */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) static inline int atomic_sub_and_test(int i, atomic_t *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) char c;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) __asm__ __volatile__("subl %2,%1; seq %0"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) : "=d" (c), "+m" (*v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) : ASM_DI (i));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) return c != 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) #define atomic_sub_and_test atomic_sub_and_test
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) static inline int atomic_add_negative(int i, atomic_t *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) char c;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) __asm__ __volatile__("addl %2,%1; smi %0"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) : "=d" (c), "+m" (*v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) : ASM_DI (i));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) return c != 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) #define atomic_add_negative atomic_add_negative
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) #endif /* __ARCH_M68K_ATOMIC __ */