|
@@ -1,15 +1,19 @@
|
|
#if defined(__i386__) || defined(__x86_64__)
|
|
#if defined(__i386__) || defined(__x86_64__)
|
|
#define barrier() asm volatile("" ::: "memory")
|
|
#define barrier() asm volatile("" ::: "memory")
|
|
-#define mb() __sync_synchronize()
|
|
|
|
-
|
|
|
|
-#define smp_mb() mb()
|
|
|
|
-# define dma_rmb() barrier()
|
|
|
|
-# define dma_wmb() barrier()
|
|
|
|
-# define smp_rmb() barrier()
|
|
|
|
-# define smp_wmb() barrier()
|
|
|
|
|
|
+#define virt_mb() __sync_synchronize()
|
|
|
|
+#define virt_rmb() barrier()
|
|
|
|
+#define virt_wmb() barrier()
|
|
|
|
+/* Atomic store should be enough, but gcc generates worse code in that case. */
|
|
|
|
+#define virt_store_mb(var, value) do { \
|
|
|
|
+ typeof(var) virt_store_mb_value = (value); \
|
|
|
|
+ __atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \
|
|
|
|
+ __ATOMIC_SEQ_CST); \
|
|
|
|
+ barrier(); \
|
|
|
|
+} while (0);
|
|
/* Weak barriers should be used. If not - it's a bug */
|
|
/* Weak barriers should be used. If not - it's a bug */
|
|
-# define rmb() abort()
|
|
|
|
-# define wmb() abort()
|
|
|
|
|
|
+# define mb() abort()
|
|
|
|
+# define rmb() abort()
|
|
|
|
+# define wmb() abort()
|
|
#else
|
|
#else
|
|
#error Please fill in barrier macros
|
|
#error Please fill in barrier macros
|
|
#endif
|
|
#endif
|