^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) * Copyright (C) 2014 Stefan Kristiansson <stefan.kristiansson@saunalahti.fi>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * This file is licensed under the terms of the GNU General Public License
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * version 2. This program is licensed "as is" without any warranty of any
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * kind, whether express or implied.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #ifndef __ASM_OPENRISC_ATOMIC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #define __ASM_OPENRISC_ATOMIC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <linux/types.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) /* Atomically perform op with v->counter and i */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #define ATOMIC_OP(op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) static inline void atomic_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) int tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) "1: l.lwa %0,0(%1) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) " l." #op " %0,%0,%2 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) " l.swa 0(%1),%0 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) " l.bnf 1b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) " l.nop \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) : "=&r"(tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) : "r"(&v->counter), "r"(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) : "cc", "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) /* Atomically perform op with v->counter and i, return the result */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #define ATOMIC_OP_RETURN(op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) static inline int atomic_##op##_return(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) int tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) "1: l.lwa %0,0(%1) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) " l." #op " %0,%0,%2 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) " l.swa 0(%1),%0 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) " l.bnf 1b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) " l.nop \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) : "=&r"(tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) : "r"(&v->counter), "r"(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) : "cc", "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) return tmp; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) /* Atomically perform op with v->counter and i, return orig v->counter */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) #define ATOMIC_FETCH_OP(op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) static inline int atomic_fetch_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) int tmp, old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) __asm__ __volatile__( \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) "1: l.lwa %0,0(%2) \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) " l." #op " %1,%0,%3 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) " l.swa 0(%2),%1 \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) " l.bnf 1b \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) " l.nop \n" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) : "=&r"(old), "=&r"(tmp) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) : "r"(&v->counter), "r"(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) : "cc", "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) return old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) ATOMIC_OP_RETURN(add)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) ATOMIC_OP_RETURN(sub)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) ATOMIC_FETCH_OP(add)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) ATOMIC_FETCH_OP(sub)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) ATOMIC_FETCH_OP(and)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) ATOMIC_FETCH_OP(or)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) ATOMIC_FETCH_OP(xor)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) ATOMIC_OP(and)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) ATOMIC_OP(or)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) ATOMIC_OP(xor)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) #undef ATOMIC_FETCH_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) #undef ATOMIC_OP_RETURN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) #undef ATOMIC_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) #define atomic_add_return atomic_add_return
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) #define atomic_sub_return atomic_sub_return
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) #define atomic_fetch_add atomic_fetch_add
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) #define atomic_fetch_sub atomic_fetch_sub
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) #define atomic_fetch_and atomic_fetch_and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) #define atomic_fetch_or atomic_fetch_or
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) #define atomic_fetch_xor atomic_fetch_xor
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) #define atomic_and atomic_and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) #define atomic_or atomic_or
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) #define atomic_xor atomic_xor
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) * Atomically add a to v->counter as long as v is not already u.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) * Returns the original value at v->counter.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) * This is often used through atomic_inc_not_zero()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) int old, tmp;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) __asm__ __volatile__(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) "1: l.lwa %0, 0(%2) \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) " l.sfeq %0, %4 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) " l.bf 2f \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) " l.add %1, %0, %3 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) " l.swa 0(%2), %1 \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) " l.bnf 1b \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) " l.nop \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) "2: \n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) : "=&r"(old), "=&r" (tmp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) : "r"(&v->counter), "r"(a), "r"(u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) : "cc", "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) return old;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) #define atomic_fetch_add_unless atomic_fetch_add_unless
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) #include <asm-generic/atomic.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) #endif /* __ASM_OPENRISC_ATOMIC_H */