aboutsummaryrefslogtreecommitdiffstats
path: root/tools/arch/arm64/include/asm/barrier.h
diff options
context:
space:
mode:
Diffstat (limited to 'tools/arch/arm64/include/asm/barrier.h')
-rw-r--r--tools/arch/arm64/include/asm/barrier.h10
1 files changed, 10 insertions, 0 deletions
diff --git a/tools/arch/arm64/include/asm/barrier.h b/tools/arch/arm64/include/asm/barrier.h
index 378c051fa177..3b9b41331c4f 100644
--- a/tools/arch/arm64/include/asm/barrier.h
+++ b/tools/arch/arm64/include/asm/barrier.h
@@ -14,6 +14,16 @@
#define wmb() asm volatile("dmb ishst" ::: "memory")
#define rmb() asm volatile("dmb ishld" ::: "memory")
+/*
+ * Kernel uses dmb variants on arm64 for smp_*() barriers. Pretty much the same
+ * implementation as above mb()/wmb()/rmb(), though for the latter kernel uses
+ * dsb. In any case, should above mb()/wmb()/rmb() change, make sure the below
+ * smp_*() don't.
+ */
+#define smp_mb() asm volatile("dmb ish" ::: "memory")
+#define smp_wmb() asm volatile("dmb ishst" ::: "memory")
+#define smp_rmb() asm volatile("dmb ishld" ::: "memory")
+
#define smp_store_release(p, v) \
do { \
union { typeof(*p) __val; char __c[1]; } __u = \