|
@@ -12,11 +12,11 @@
|
|
*/
|
|
*/
|
|
|
|
|
|
#ifdef CONFIG_X86_32
|
|
#ifdef CONFIG_X86_32
|
|
-#define mb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "mfence", \
|
|
|
|
|
|
+#define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \
|
|
X86_FEATURE_XMM2) ::: "memory", "cc")
|
|
X86_FEATURE_XMM2) ::: "memory", "cc")
|
|
-#define rmb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "lfence", \
|
|
|
|
|
|
+#define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \
|
|
X86_FEATURE_XMM2) ::: "memory", "cc")
|
|
X86_FEATURE_XMM2) ::: "memory", "cc")
|
|
-#define wmb() asm volatile(ALTERNATIVE("lock; addl $0,0(%%esp)", "sfence", \
|
|
|
|
|
|
+#define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
|
|
X86_FEATURE_XMM2) ::: "memory", "cc")
|
|
X86_FEATURE_XMM2) ::: "memory", "cc")
|
|
#else
|
|
#else
|
|
#define mb() asm volatile("mfence":::"memory")
|
|
#define mb() asm volatile("mfence":::"memory")
|
|
@@ -31,7 +31,11 @@
|
|
#endif
|
|
#endif
|
|
#define dma_wmb() barrier()
|
|
#define dma_wmb() barrier()
|
|
|
|
|
|
-#define __smp_mb() mb()
|
|
|
|
|
|
+#ifdef CONFIG_X86_32
|
|
|
|
+#define __smp_mb() asm volatile("lock; addl $0,-4(%%esp)" ::: "memory", "cc")
|
|
|
|
+#else
|
|
|
|
+#define __smp_mb() asm volatile("lock; addl $0,-4(%%rsp)" ::: "memory", "cc")
|
|
|
|
+#endif
|
|
#define __smp_rmb() dma_rmb()
|
|
#define __smp_rmb() dma_rmb()
|
|
#define __smp_wmb() barrier()
|
|
#define __smp_wmb() barrier()
|
|
#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
|
|
#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
|