|
@@ -16,7 +16,10 @@
|
|
|
#ifndef __ASM_PERCPU_H
|
|
|
#define __ASM_PERCPU_H
|
|
|
|
|
|
+#include <linux/preempt.h>
|
|
|
+
|
|
|
#include <asm/alternative.h>
|
|
|
+#include <asm/cmpxchg.h>
|
|
|
#include <asm/stack_pointer.h>
|
|
|
|
|
|
static inline void set_my_cpu_offset(unsigned long off)
|
|
@@ -197,6 +200,32 @@ static inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
|
|
|
return ret;
|
|
|
}
|
|
|
|
|
|
+/* this_cpu_cmpxchg */
|
|
|
+#define _protect_cmpxchg_local(pcp, o, n) \
|
|
|
+({ \
|
|
|
+ typeof(*raw_cpu_ptr(&(pcp))) __ret; \
|
|
|
+ preempt_disable(); \
|
|
|
+ __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \
|
|
|
+ preempt_enable(); \
|
|
|
+ __ret; \
|
|
|
+})
|
|
|
+
|
|
|
+#define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
|
|
|
+#define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
|
|
|
+#define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
|
|
|
+#define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
|
|
|
+
|
|
|
+#define this_cpu_cmpxchg_double_8(ptr1, ptr2, o1, o2, n1, n2) \
|
|
|
+({ \
|
|
|
+ int __ret; \
|
|
|
+ preempt_disable(); \
|
|
|
+ __ret = cmpxchg_double_local( raw_cpu_ptr(&(ptr1)), \
|
|
|
+ raw_cpu_ptr(&(ptr2)), \
|
|
|
+ o1, o2, n1, n2); \
|
|
|
+ preempt_enable(); \
|
|
|
+ __ret; \
|
|
|
+})
|
|
|
+
|
|
|
#define _percpu_read(pcp) \
|
|
|
({ \
|
|
|
typeof(pcp) __retval; \
|