^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) // Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) #ifndef __ASM_CSKY_BARRIER_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #define __ASM_CSKY_BARRIER_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #ifndef __ASSEMBLY__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #define nop() asm volatile ("nop\n":::"memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) * sync: completion barrier, all sync.xx instructions
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) * guarantee the last response recieved by bus transaction
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) * made by ld/st instructions before sync.s
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) * sync.s: inherit from sync, but also shareable to other cores
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) * sync.i: inherit from sync, but also flush cpu pipeline
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) * sync.is: the same with sync.i + sync.s
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) * bar.brwarw: ordering barrier for all load/store instructions before it
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) * bar.brwarws: ordering barrier for all load/store instructions before it
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) * and shareable to other cores
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) * bar.brar: ordering barrier for all load instructions before it
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) * bar.brars: ordering barrier for all load instructions before it
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) * and shareable to other cores
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) * bar.bwaw: ordering barrier for all store instructions before it
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * bar.bwaws: ordering barrier for all store instructions before it
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) * and shareable to other cores
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #ifdef CONFIG_CPU_HAS_CACHEV2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #define mb() asm volatile ("sync.s\n":::"memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #ifdef CONFIG_SMP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #define __smp_mb() asm volatile ("bar.brwarws\n":::"memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) #define __smp_rmb() asm volatile ("bar.brars\n":::"memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) #define __smp_wmb() asm volatile ("bar.bwaws\n":::"memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) #endif /* CONFIG_SMP */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) #define sync_is() asm volatile ("sync.is\n":::"memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) #else /* !CONFIG_CPU_HAS_CACHEV2 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) #define mb() asm volatile ("sync\n":::"memory")
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) #include <asm-generic/barrier.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) #endif /* __ASSEMBLY__ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) #endif /* __ASM_CSKY_BARRIER_H */