^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #include <stdlib.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #if defined(__i386__) || defined(__x86_64__)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) #define barrier() asm volatile("" ::: "memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #define virt_mb() __sync_synchronize()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #define virt_rmb() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #define virt_wmb() barrier()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) /* Atomic store should be enough, but gcc generates worse code in that case. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #define virt_store_mb(var, value) do { \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) typeof(var) virt_store_mb_value = (value); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) __atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) __ATOMIC_SEQ_CST); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) barrier(); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) } while (0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) /* Weak barriers should be used. If not - it's a bug */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) # define mb() abort()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) # define dma_rmb() abort()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) # define dma_wmb() abort()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #error Please fill in barrier macros
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22)