^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef __ASM_SH_ATOMIC_LLSC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define __ASM_SH_ATOMIC_LLSC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * SH-4A note:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * We basically get atomic_xxx_return() for free compared with
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * atomic_xxx(). movli.l/movco.l require r0 due to the instruction
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * encoding, so the retval is automatically set without having to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) * do any special work.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) * To get proper branch prediction for the main line, we must branch
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) * forward to code at the end of this object's .text section, then
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) * branch back to restart the operation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #define ATOMIC_OP(op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) static inline void atomic_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) unsigned long tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) __asm__ __volatile__ ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) "1: movli.l @%2, %0 ! atomic_" #op "\n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) " " #op " %1, %0 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) " movco.l %0, @%2 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) " bf 1b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) : "=&z" (tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) : "r" (i), "r" (&v->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) : "t"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #define ATOMIC_OP_RETURN(op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) static inline int atomic_##op##_return(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) unsigned long temp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) __asm__ __volatile__ ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) "1: movli.l @%2, %0 ! atomic_" #op "_return \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) " " #op " %1, %0 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) " movco.l %0, @%2 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) " bf 1b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) " synco \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) : "=&z" (temp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) : "r" (i), "r" (&v->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) : "t"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) return temp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) #define ATOMIC_FETCH_OP(op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) static inline int atomic_fetch_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) unsigned long res, temp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) __asm__ __volatile__ ( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) "1: movli.l @%3, %0 ! atomic_fetch_" #op " \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) " mov %0, %1 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) " " #op " %2, %0 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) " movco.l %0, @%3 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) " bf 1b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) " synco \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) : "=&z" (temp), "=&r" (res) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) : "r" (i), "r" (&v->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) : "t"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) return res; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) ATOMIC_OPS(add)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) ATOMIC_OPS(sub)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) #undef ATOMIC_OPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) ATOMIC_OPS(and)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) ATOMIC_OPS(or)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) ATOMIC_OPS(xor)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) #undef ATOMIC_OPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) #undef ATOMIC_FETCH_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) #undef ATOMIC_OP_RETURN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) #undef ATOMIC_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) #endif /* __ASM_SH_ATOMIC_LLSC_H */