|
@@ -31,17 +31,10 @@
|
|
#endif
|
|
#endif
|
|
#define dma_wmb() barrier()
|
|
#define dma_wmb() barrier()
|
|
|
|
|
|
-#ifdef CONFIG_SMP
|
|
|
|
-#define smp_mb() mb()
|
|
|
|
-#define smp_rmb() dma_rmb()
|
|
|
|
-#define smp_wmb() barrier()
|
|
|
|
-#define smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
|
|
|
|
-#else /* !SMP */
|
|
|
|
-#define smp_mb() barrier()
|
|
|
|
-#define smp_rmb() barrier()
|
|
|
|
-#define smp_wmb() barrier()
|
|
|
|
-#define smp_store_mb(var, value) do { WRITE_ONCE(var, value); barrier(); } while (0)
|
|
|
|
-#endif /* SMP */
|
|
|
|
|
|
+#define __smp_mb() mb()
|
|
|
|
+#define __smp_rmb() dma_rmb()
|
|
|
|
+#define __smp_wmb() barrier()
|
|
|
|
+#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
|
|
|
|
|
|
#if defined(CONFIG_X86_PPRO_FENCE)
|
|
#if defined(CONFIG_X86_PPRO_FENCE)
|
|
|
|
|
|
@@ -50,31 +43,31 @@
|
|
* model and we should fall back to full barriers.
|
|
* model and we should fall back to full barriers.
|
|
*/
|
|
*/
|
|
|
|
|
|
-#define smp_store_release(p, v) \
|
|
|
|
|
|
+#define __smp_store_release(p, v) \
|
|
do { \
|
|
do { \
|
|
compiletime_assert_atomic_type(*p); \
|
|
compiletime_assert_atomic_type(*p); \
|
|
- smp_mb(); \
|
|
|
|
|
|
+ __smp_mb(); \
|
|
WRITE_ONCE(*p, v); \
|
|
WRITE_ONCE(*p, v); \
|
|
} while (0)
|
|
} while (0)
|
|
|
|
|
|
-#define smp_load_acquire(p) \
|
|
|
|
|
|
+#define __smp_load_acquire(p) \
|
|
({ \
|
|
({ \
|
|
typeof(*p) ___p1 = READ_ONCE(*p); \
|
|
typeof(*p) ___p1 = READ_ONCE(*p); \
|
|
compiletime_assert_atomic_type(*p); \
|
|
compiletime_assert_atomic_type(*p); \
|
|
- smp_mb(); \
|
|
|
|
|
|
+ __smp_mb(); \
|
|
___p1; \
|
|
___p1; \
|
|
})
|
|
})
|
|
|
|
|
|
#else /* regular x86 TSO memory ordering */
|
|
#else /* regular x86 TSO memory ordering */
|
|
|
|
|
|
-#define smp_store_release(p, v) \
|
|
|
|
|
|
+#define __smp_store_release(p, v) \
|
|
do { \
|
|
do { \
|
|
compiletime_assert_atomic_type(*p); \
|
|
compiletime_assert_atomic_type(*p); \
|
|
barrier(); \
|
|
barrier(); \
|
|
WRITE_ONCE(*p, v); \
|
|
WRITE_ONCE(*p, v); \
|
|
} while (0)
|
|
} while (0)
|
|
|
|
|
|
-#define smp_load_acquire(p) \
|
|
|
|
|
|
+#define __smp_load_acquire(p) \
|
|
({ \
|
|
({ \
|
|
typeof(*p) ___p1 = READ_ONCE(*p); \
|
|
typeof(*p) ___p1 = READ_ONCE(*p); \
|
|
compiletime_assert_atomic_type(*p); \
|
|
compiletime_assert_atomic_type(*p); \
|
|
@@ -85,8 +78,8 @@ do { \
|
|
#endif
|
|
#endif
|
|
|
|
|
|
/* Atomic operations are already serializing on x86 */
|
|
/* Atomic operations are already serializing on x86 */
|
|
-#define smp_mb__before_atomic() barrier()
|
|
|
|
-#define smp_mb__after_atomic() barrier()
|
|
|
|
|
|
+#define __smp_mb__before_atomic() barrier()
|
|
|
|
+#define __smp_mb__after_atomic() barrier()
|
|
|
|
|
|
#include <asm-generic/barrier.h>
|
|
#include <asm-generic/barrier.h>
|
|
|
|
|