^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef __ASM_BARRIER_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define __ASM_BARRIER_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include <asm/alternative.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #ifndef __ASSEMBLY__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) /* The synchronize caches instruction executes as a nop on systems in
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) which all memory references are performed in order. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #define synchronize_caches() asm volatile("sync" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) ALTERNATIVE(ALT_COND_NO_SMP, INSN_NOP) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) : : : "memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #if defined(CONFIG_SMP)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #define mb() do { synchronize_caches(); } while (0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #define rmb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #define wmb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #define dma_rmb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #define dma_wmb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #define mb() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #define rmb() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #define wmb() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #define dma_rmb() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #define dma_wmb() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #define __smp_mb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #define __smp_rmb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #define __smp_wmb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #define __smp_store_release(p, v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) do { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) typeof(p) __p = (p); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) union { typeof(*p) __val; char __c[1]; } __u = \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) { .__val = (__force typeof(*p)) (v) }; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) compiletime_assert_atomic_type(*p); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) switch (sizeof(*p)) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) case 1: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) asm volatile("stb,ma %0,0(%1)" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) : : "r"(*(__u8 *)__u.__c), "r"(__p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) case 2: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) asm volatile("sth,ma %0,0(%1)" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) : : "r"(*(__u16 *)__u.__c), "r"(__p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) case 4: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) asm volatile("stw,ma %0,0(%1)" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) : : "r"(*(__u32 *)__u.__c), "r"(__p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) case 8: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) if (IS_ENABLED(CONFIG_64BIT)) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) asm volatile("std,ma %0,0(%1)" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) : : "r"(*(__u64 *)__u.__c), "r"(__p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) } while (0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) #define __smp_load_acquire(p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) union { typeof(*p) __val; char __c[1]; } __u; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) typeof(p) __p = (p); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) compiletime_assert_atomic_type(*p); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) switch (sizeof(*p)) { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) case 1: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) asm volatile("ldb,ma 0(%1),%0" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) : "=r"(*(__u8 *)__u.__c) : "r"(__p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) case 2: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) asm volatile("ldh,ma 0(%1),%0" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) : "=r"(*(__u16 *)__u.__c) : "r"(__p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) case 4: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) asm volatile("ldw,ma 0(%1),%0" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) : "=r"(*(__u32 *)__u.__c) : "r"(__p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) case 8: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) if (IS_ENABLED(CONFIG_64BIT)) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) asm volatile("ldd,ma 0(%1),%0" \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) : "=r"(*(__u64 *)__u.__c) : "r"(__p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) : "memory"); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) break; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) } \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) __u.__val; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) #include <asm-generic/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) #endif /* !__ASSEMBLY__ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) #endif /* __ASM_BARRIER_H */