^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #ifndef __ASM_CSKY_ATOMIC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) #define __ASM_CSKY_ATOMIC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #include <linux/version.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #include <asm/cmpxchg.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <asm/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #ifdef CONFIG_CPU_HAS_LDSTEX
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #define __atomic_add_unless __atomic_add_unless
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) static inline int __atomic_add_unless(atomic_t *v, int a, int u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) unsigned long tmp, ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) smp_mb();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) asm volatile (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) "1: ldex.w %0, (%3) \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) " mov %1, %0 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) " cmpne %0, %4 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) " bf 2f \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) " add %0, %2 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) " stex.w %0, (%3) \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) " bez %0, 1b \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) "2: \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) : "=&r" (tmp), "=&r" (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) : "r" (a), "r"(&v->counter), "r"(u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) if (ret != u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) smp_mb();
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) #define ATOMIC_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) static inline void atomic_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) unsigned long tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) asm volatile ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) "1: ldex.w %0, (%2) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) " " #op " %0, %1 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) " stex.w %0, (%2) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) " bez %0, 1b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) : "=&r" (tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) : "r" (i), "r"(&v->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) #define ATOMIC_OP_RETURN(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) static inline int atomic_##op##_return(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) unsigned long tmp, ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) smp_mb(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) asm volatile ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) "1: ldex.w %0, (%3) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) " " #op " %0, %2 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) " mov %1, %0 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) " stex.w %0, (%3) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) " bez %0, 1b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) : "=&r" (tmp), "=&r" (ret) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) : "r" (i), "r"(&v->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) smp_mb(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) return ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) #define ATOMIC_FETCH_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) static inline int atomic_fetch_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) unsigned long tmp, ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) smp_mb(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) asm volatile ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) "1: ldex.w %0, (%3) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) " mov %1, %0 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) " " #op " %0, %2 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) " stex.w %0, (%3) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) " bez %0, 1b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) : "=&r" (tmp), "=&r" (ret) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) : "r" (i), "r"(&v->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) smp_mb(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) return ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) #else /* CONFIG_CPU_HAS_LDSTEX */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) #include <linux/irqflags.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) #define __atomic_add_unless __atomic_add_unless
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) static inline int __atomic_add_unless(atomic_t *v, int a, int u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) unsigned long tmp, ret, flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) raw_local_irq_save(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) asm volatile (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) " ldw %0, (%3) \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) " mov %1, %0 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) " cmpne %0, %4 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) " bf 2f \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) " add %0, %2 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) " stw %0, (%3) \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) "2: \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) : "=&r" (tmp), "=&r" (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) : "r" (a), "r"(&v->counter), "r"(u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) raw_local_irq_restore(flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) #define ATOMIC_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) static inline void atomic_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) unsigned long tmp, flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) raw_local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) asm volatile ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) " ldw %0, (%2) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) " " #op " %0, %1 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) " stw %0, (%2) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) : "=&r" (tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) : "r" (i), "r"(&v->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) raw_local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) #define ATOMIC_OP_RETURN(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) static inline int atomic_##op##_return(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) unsigned long tmp, ret, flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) raw_local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) asm volatile ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) " ldw %0, (%3) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) " " #op " %0, %2 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) " stw %0, (%3) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) " mov %1, %0 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) : "=&r" (tmp), "=&r" (ret) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) : "r" (i), "r"(&v->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) raw_local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) return ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) #define ATOMIC_FETCH_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) static inline int atomic_fetch_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) unsigned long tmp, ret, flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) raw_local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) asm volatile ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) " ldw %0, (%3) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) " mov %1, %0 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) " " #op " %0, %2 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) " stw %0, (%3) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) : "=&r" (tmp), "=&r" (ret) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) : "r" (i), "r"(&v->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) raw_local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) return ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) #endif /* CONFIG_CPU_HAS_LDSTEX */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) #define atomic_add_return atomic_add_return
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) ATOMIC_OP_RETURN(add, +)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) #define atomic_sub_return atomic_sub_return
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) ATOMIC_OP_RETURN(sub, -)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) #define atomic_fetch_add atomic_fetch_add
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) ATOMIC_FETCH_OP(add, +)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) #define atomic_fetch_sub atomic_fetch_sub
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) ATOMIC_FETCH_OP(sub, -)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) #define atomic_fetch_and atomic_fetch_and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) ATOMIC_FETCH_OP(and, &)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) #define atomic_fetch_or atomic_fetch_or
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) ATOMIC_FETCH_OP(or, |)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) #define atomic_fetch_xor atomic_fetch_xor
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) ATOMIC_FETCH_OP(xor, ^)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) #define atomic_and atomic_and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) ATOMIC_OP(and, &)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) #define atomic_or atomic_or
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) ATOMIC_OP(or, |)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) #define atomic_xor atomic_xor
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) ATOMIC_OP(xor, ^)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) #undef ATOMIC_FETCH_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) #undef ATOMIC_OP_RETURN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) #undef ATOMIC_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) #include <asm-generic/atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) #endif /* __ASM_CSKY_ATOMIC_H */