^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef _ASM_IA64_ATOMIC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define _ASM_IA64_ATOMIC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Atomic operations that C can't guarantee us. Useful for
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * resource counting etc..
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * NOTE: don't mess with the types below! The "unsigned long" and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * "int" types were carefully placed so as to ensure proper operation
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) * of the macros.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) * David Mosberger-Tang <davidm@hpl.hp.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/types.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <asm/intrinsics.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <asm/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #define ATOMIC64_INIT(i) { (i) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #define atomic_read(v) READ_ONCE((v)->counter)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #define atomic64_read(v) READ_ONCE((v)->counter)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #define atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #define ATOMIC_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) static __inline__ int \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) ia64_atomic_##op (int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) __s32 old, new; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) CMPXCHG_BUGCHECK_DECL \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) do { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) CMPXCHG_BUGCHECK(v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) old = atomic_read(v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) new = old c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) return new; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) #define ATOMIC_FETCH_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) static __inline__ int \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) ia64_atomic_fetch_##op (int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) __s32 old, new; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) CMPXCHG_BUGCHECK_DECL \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) do { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) CMPXCHG_BUGCHECK(v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) old = atomic_read(v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) new = old c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) return old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) #define ATOMIC_OPS(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) ATOMIC_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) ATOMIC_FETCH_OP(op, c_op)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) ATOMIC_OPS(add, +)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) ATOMIC_OPS(sub, -)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) #ifdef __OPTIMIZE__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) #define __ia64_atomic_const(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) static const int __ia64_atomic_p = __builtin_constant_p(i) ? \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) ((i) == 1 || (i) == 4 || (i) == 8 || (i) == 16 || \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) (i) == -1 || (i) == -4 || (i) == -8 || (i) == -16) : 0;\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) __ia64_atomic_p
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) #define __ia64_atomic_const(i) 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) #define atomic_add_return(i,v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) int __ia64_aar_i = (i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) __ia64_atomic_const(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) : ia64_atomic_add(__ia64_aar_i, v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) #define atomic_sub_return(i,v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) int __ia64_asr_i = (i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) __ia64_atomic_const(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) : ia64_atomic_sub(__ia64_asr_i, v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) #define atomic_fetch_add(i,v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) int __ia64_aar_i = (i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) __ia64_atomic_const(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) : ia64_atomic_fetch_add(__ia64_aar_i, v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) #define atomic_fetch_sub(i,v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) int __ia64_asr_i = (i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) __ia64_atomic_const(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) : ia64_atomic_fetch_sub(__ia64_asr_i, v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) ATOMIC_FETCH_OP(and, &)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) ATOMIC_FETCH_OP(or, |)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) ATOMIC_FETCH_OP(xor, ^)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) #define atomic_and(i,v) (void)ia64_atomic_fetch_and(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) #define atomic_or(i,v) (void)ia64_atomic_fetch_or(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) #define atomic_xor(i,v) (void)ia64_atomic_fetch_xor(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) #define atomic_fetch_and(i,v) ia64_atomic_fetch_and(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) #define atomic_fetch_or(i,v) ia64_atomic_fetch_or(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) #define atomic_fetch_xor(i,v) ia64_atomic_fetch_xor(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) #undef ATOMIC_OPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) #undef ATOMIC_FETCH_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) #undef ATOMIC_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) #define ATOMIC64_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) static __inline__ s64 \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) ia64_atomic64_##op (s64 i, atomic64_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) s64 old, new; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) CMPXCHG_BUGCHECK_DECL \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) do { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) CMPXCHG_BUGCHECK(v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) old = atomic64_read(v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) new = old c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) return new; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) #define ATOMIC64_FETCH_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) static __inline__ s64 \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) ia64_atomic64_fetch_##op (s64 i, atomic64_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) s64 old, new; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) CMPXCHG_BUGCHECK_DECL \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) do { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) CMPXCHG_BUGCHECK(v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) old = atomic64_read(v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) new = old c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) return old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) #define ATOMIC64_OPS(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) ATOMIC64_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) ATOMIC64_FETCH_OP(op, c_op)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) ATOMIC64_OPS(add, +)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) ATOMIC64_OPS(sub, -)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) #define atomic64_add_return(i,v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) s64 __ia64_aar_i = (i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) __ia64_atomic_const(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) : ia64_atomic64_add(__ia64_aar_i, v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) #define atomic64_sub_return(i,v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) s64 __ia64_asr_i = (i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) __ia64_atomic_const(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) : ia64_atomic64_sub(__ia64_asr_i, v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) #define atomic64_fetch_add(i,v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) s64 __ia64_aar_i = (i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) __ia64_atomic_const(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) : ia64_atomic64_fetch_add(__ia64_aar_i, v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) #define atomic64_fetch_sub(i,v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) s64 __ia64_asr_i = (i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) __ia64_atomic_const(i) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) ATOMIC64_FETCH_OP(and, &)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) ATOMIC64_FETCH_OP(or, |)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) ATOMIC64_FETCH_OP(xor, ^)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) #define atomic64_and(i,v) (void)ia64_atomic64_fetch_and(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) #define atomic64_or(i,v) (void)ia64_atomic64_fetch_or(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) #define atomic64_xor(i,v) (void)ia64_atomic64_fetch_xor(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) #define atomic64_fetch_and(i,v) ia64_atomic64_fetch_and(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) #define atomic64_fetch_or(i,v) ia64_atomic64_fetch_or(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) #define atomic64_fetch_xor(i,v) ia64_atomic64_fetch_xor(i,v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) #undef ATOMIC64_OPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) #undef ATOMIC64_FETCH_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) #undef ATOMIC64_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) #define atomic64_cmpxchg(v, old, new) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) (cmpxchg(&((v)->counter), old, new))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) #define atomic_add(i,v) (void)atomic_add_return((i), (v))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) #define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) #define atomic64_add(i,v) (void)atomic64_add_return((i), (v))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) #define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) #endif /* _ASM_IA64_ATOMIC_H */