^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef __ASM_SH_ATOMIC_IRQ_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define __ASM_SH_ATOMIC_IRQ_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include <linux/irqflags.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * To get proper branch prediction for the main line, we must branch
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * forward to code at the end of this object's .text section, then
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * branch back to restart the operation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #define ATOMIC_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) static inline void atomic_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) unsigned long flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) raw_local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) v->counter c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) raw_local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #define ATOMIC_OP_RETURN(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) static inline int atomic_##op##_return(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) unsigned long temp, flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) raw_local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) temp = v->counter; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) temp c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) v->counter = temp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) raw_local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) return temp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) #define ATOMIC_FETCH_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) static inline int atomic_fetch_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) unsigned long temp, flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) raw_local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) temp = v->counter; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) v->counter c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) raw_local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) return temp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) #define ATOMIC_OPS(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) ATOMIC_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) ATOMIC_OP_RETURN(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) ATOMIC_FETCH_OP(op, c_op)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) ATOMIC_OPS(add, +=)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) ATOMIC_OPS(sub, -=)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #undef ATOMIC_OPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) #define ATOMIC_OPS(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) ATOMIC_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) ATOMIC_FETCH_OP(op, c_op)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) ATOMIC_OPS(and, &=)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) ATOMIC_OPS(or, |=)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) ATOMIC_OPS(xor, ^=)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) #undef ATOMIC_OPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) #undef ATOMIC_FETCH_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) #undef ATOMIC_OP_RETURN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) #undef ATOMIC_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) #endif /* __ASM_SH_ATOMIC_IRQ_H */