^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Copyright IBM Corp. 1999, 2009
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #ifndef __ASM_BARRIER_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #define __ASM_BARRIER_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) * Force strict CPU ordering.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) * And yes, this is required on UP too when we're talking
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) * to devices.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) /* Fast-BCR without checkpoint synchronization */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #define __ASM_BARRIER "bcr 14,0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #define __ASM_BARRIER "bcr 15,0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #define mb() do { asm volatile(__ASM_BARRIER : : : "memory"); } while (0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #define rmb() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #define wmb() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #define dma_rmb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #define dma_wmb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #define __smp_mb() mb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #define __smp_rmb() rmb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #define __smp_wmb() wmb()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #define __smp_store_release(p, v) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) do { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) compiletime_assert_atomic_type(*p); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) barrier(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) WRITE_ONCE(*p, v); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) } while (0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) #define __smp_load_acquire(p) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) ({ \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) typeof(*p) ___p1 = READ_ONCE(*p); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) compiletime_assert_atomic_type(*p); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) barrier(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) ___p1; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) })
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) #define __smp_mb__before_atomic() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) #define __smp_mb__after_atomic() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) * array_index_mask_nospec - generate a mask for array_idx() that is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) * ~0UL when the bounds check succeeds and 0 otherwise
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) * @index: array element index
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) * @size: number of elements in array
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #define array_index_mask_nospec array_index_mask_nospec
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) static inline unsigned long array_index_mask_nospec(unsigned long index,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) unsigned long size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) unsigned long mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) if (__builtin_constant_p(size) && size > 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) asm(" clgr %2,%1\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) " slbgr %0,%0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) :"=d" (mask) : "d" (size-1), "d" (index) :"cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) return mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) asm(" clgr %1,%2\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) " slbgr %0,%0\n"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) :"=d" (mask) : "d" (size), "d" (index) :"cc");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) return ~mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) #include <asm-generic/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) #endif /* __ASM_BARRIER_H */