^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-or-later */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Generic C implementation of atomic counter operations. Usable on
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * UP systems only. Do not include in machine independent code.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Written by David Howells (dhowells@redhat.com)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #ifndef __ASM_GENERIC_ATOMIC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #define __ASM_GENERIC_ATOMIC_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <asm/cmpxchg.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <asm/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) * atomic_$op() - $op integer to atomic variable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) * @i: integer value to $op
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) * @v: pointer to the atomic variable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) * smp_mb__{before,after}_atomic().
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) * atomic_$op_return() - $op interer to atomic variable and returns the result
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * @i: integer value to $op
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) * @v: pointer to the atomic variable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) * Atomically $ops @i to @v. Does imply a full memory barrier.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #ifdef CONFIG_SMP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) /* we can build all atomic primitives from cmpxchg */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) #define ATOMIC_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) static inline void atomic_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) int c, old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) c = v->counter; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) c = old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) #define ATOMIC_OP_RETURN(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) static inline int atomic_##op##_return(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) int c, old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) c = v->counter; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) c = old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) return c c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #define ATOMIC_FETCH_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) static inline int atomic_fetch_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) int c, old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) c = v->counter; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) c = old; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) return c; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) #include <linux/irqflags.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) #define ATOMIC_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) static inline void atomic_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) unsigned long flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) raw_local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) v->counter = v->counter c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) raw_local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) #define ATOMIC_OP_RETURN(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) static inline int atomic_##op##_return(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) unsigned long flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) int ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) raw_local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) ret = (v->counter = v->counter c_op i); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) raw_local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) return ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) #define ATOMIC_FETCH_OP(op, c_op) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) static inline int atomic_fetch_##op(int i, atomic_t *v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) unsigned long flags; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) int ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) raw_local_irq_save(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) ret = v->counter; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) v->counter = v->counter c_op i; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) raw_local_irq_restore(flags); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) return ret; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) #endif /* CONFIG_SMP */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) #ifndef atomic_add_return
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) ATOMIC_OP_RETURN(add, +)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) #ifndef atomic_sub_return
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) ATOMIC_OP_RETURN(sub, -)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) #ifndef atomic_fetch_add
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) ATOMIC_FETCH_OP(add, +)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) #ifndef atomic_fetch_sub
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) ATOMIC_FETCH_OP(sub, -)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) #ifndef atomic_fetch_and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) ATOMIC_FETCH_OP(and, &)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) #ifndef atomic_fetch_or
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) ATOMIC_FETCH_OP(or, |)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) #ifndef atomic_fetch_xor
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) ATOMIC_FETCH_OP(xor, ^)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) #ifndef atomic_and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) ATOMIC_OP(and, &)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) #ifndef atomic_or
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) ATOMIC_OP(or, |)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) #ifndef atomic_xor
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) ATOMIC_OP(xor, ^)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) #undef ATOMIC_FETCH_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) #undef ATOMIC_OP_RETURN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) #undef ATOMIC_OP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) * Atomic operations that C can't guarantee us. Useful for
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) * resource counting etc..
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) * atomic_read - read atomic variable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) * @v: pointer of type atomic_t
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) * Atomically reads the value of @v.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) #ifndef atomic_read
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) #define atomic_read(v) READ_ONCE((v)->counter)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) * atomic_set - set atomic variable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) * @v: pointer of type atomic_t
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) * @i: required value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) * Atomically sets the value of @v to @i.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) #include <linux/irqflags.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) static inline void atomic_add(int i, atomic_t *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) atomic_add_return(i, v);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) static inline void atomic_sub(int i, atomic_t *v)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) atomic_sub_return(i, v);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) #endif /* __ASM_GENERIC_ATOMIC_H */