^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) #ifndef LIBURING_BARRIER_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #define LIBURING_BARRIER_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) #if defined(__x86_64) || defined(__i386__)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #define read_barrier() __asm__ __volatile__("":::"memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #define write_barrier() __asm__ __volatile__("":::"memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * Add arch appropriate definitions. Be safe and use full barriers for
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * archs we don't have support for.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #define read_barrier() __sync_synchronize()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #define write_barrier() __sync_synchronize()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #endif