|
@@ -100,8 +100,8 @@
|
|
|
|
|
|
#ifdef __aarch64__
|
|
#ifdef __aarch64__
|
|
#define mb() asm volatile("dmb ish" ::: "memory")
|
|
#define mb() asm volatile("dmb ish" ::: "memory")
|
|
-#define wmb() asm volatile("dmb ishld" ::: "memory")
|
|
|
|
-#define rmb() asm volatile("dmb ishst" ::: "memory")
|
|
|
|
|
|
+#define wmb() asm volatile("dmb ishst" ::: "memory")
|
|
|
|
+#define rmb() asm volatile("dmb ishld" ::: "memory")
|
|
#define cpu_relax() asm volatile("yield" ::: "memory")
|
|
#define cpu_relax() asm volatile("yield" ::: "memory")
|
|
#endif
|
|
#endif
|
|
|
|
|