|
@@ -173,6 +173,12 @@ extern phys_addr_t per_cpu_ptr_to_phys(void *addr);
|
|
|
|
|
|
extern void __bad_size_call_parameter(void);
|
|
extern void __bad_size_call_parameter(void);
|
|
|
|
|
|
|
|
+#ifdef CONFIG_DEBUG_PREEMPT
|
|
|
|
+extern void __this_cpu_preempt_check(const char *op);
|
|
|
|
+#else
|
|
|
|
+static inline void __this_cpu_preempt_check(const char *op) { }
|
|
|
|
+#endif
|
|
|
|
+
|
|
#define __pcpu_size_call_return(stem, variable) \
|
|
#define __pcpu_size_call_return(stem, variable) \
|
|
({ typeof(variable) pscr_ret__; \
|
|
({ typeof(variable) pscr_ret__; \
|
|
__verify_pcpu_ptr(&(variable)); \
|
|
__verify_pcpu_ptr(&(variable)); \
|
|
@@ -725,18 +731,24 @@ do { \
|
|
|
|
|
|
/*
|
|
/*
|
|
* Generic percpu operations for context that are safe from preemption/interrupts.
|
|
* Generic percpu operations for context that are safe from preemption/interrupts.
|
|
- * Checks will be added here soon.
|
|
|
|
*/
|
|
*/
|
|
#ifndef __this_cpu_read
|
|
#ifndef __this_cpu_read
|
|
-# define __this_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, (pcp))
|
|
|
|
|
|
+# define __this_cpu_read(pcp) \
|
|
|
|
+ (__this_cpu_preempt_check("read"),__pcpu_size_call_return(raw_cpu_read_, (pcp)))
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#ifndef __this_cpu_write
|
|
#ifndef __this_cpu_write
|
|
-# define __this_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, (pcp), (val))
|
|
|
|
|
|
+# define __this_cpu_write(pcp, val) \
|
|
|
|
+do { __this_cpu_preempt_check("write"); \
|
|
|
|
+ __pcpu_size_call(raw_cpu_write_, (pcp), (val)); \
|
|
|
|
+} while (0)
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#ifndef __this_cpu_add
|
|
#ifndef __this_cpu_add
|
|
-# define __this_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, (pcp), (val))
|
|
|
|
|
|
+# define __this_cpu_add(pcp, val) \
|
|
|
|
+do { __this_cpu_preempt_check("add"); \
|
|
|
|
+ __pcpu_size_call(raw_cpu_add_, (pcp), (val)); \
|
|
|
|
+} while (0)
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#ifndef __this_cpu_sub
|
|
#ifndef __this_cpu_sub
|
|
@@ -752,16 +764,23 @@ do { \
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#ifndef __this_cpu_and
|
|
#ifndef __this_cpu_and
|
|
-# define __this_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, (pcp), (val))
|
|
|
|
|
|
+# define __this_cpu_and(pcp, val) \
|
|
|
|
+do { __this_cpu_preempt_check("and"); \
|
|
|
|
+ __pcpu_size_call(raw_cpu_and_, (pcp), (val)); \
|
|
|
|
+} while (0)
|
|
|
|
+
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#ifndef __this_cpu_or
|
|
#ifndef __this_cpu_or
|
|
-# define __this_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, (pcp), (val))
|
|
|
|
|
|
+# define __this_cpu_or(pcp, val) \
|
|
|
|
+do { __this_cpu_preempt_check("or"); \
|
|
|
|
+ __pcpu_size_call(raw_cpu_or_, (pcp), (val)); \
|
|
|
|
+} while (0)
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#ifndef __this_cpu_add_return
|
|
#ifndef __this_cpu_add_return
|
|
# define __this_cpu_add_return(pcp, val) \
|
|
# define __this_cpu_add_return(pcp, val) \
|
|
- __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val)
|
|
|
|
|
|
+ (__this_cpu_preempt_check("add_return"),__pcpu_size_call_return2(raw_cpu_add_return_, pcp, val))
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val))
|
|
#define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val))
|
|
@@ -770,17 +789,17 @@ do { \
|
|
|
|
|
|
#ifndef __this_cpu_xchg
|
|
#ifndef __this_cpu_xchg
|
|
# define __this_cpu_xchg(pcp, nval) \
|
|
# define __this_cpu_xchg(pcp, nval) \
|
|
- __pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval)
|
|
|
|
|
|
+ (__this_cpu_preempt_check("xchg"),__pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval))
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#ifndef __this_cpu_cmpxchg
|
|
#ifndef __this_cpu_cmpxchg
|
|
# define __this_cpu_cmpxchg(pcp, oval, nval) \
|
|
# define __this_cpu_cmpxchg(pcp, oval, nval) \
|
|
- __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)
|
|
|
|
|
|
+ (__this_cpu_preempt_check("cmpxchg"),__pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval))
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#ifndef __this_cpu_cmpxchg_double
|
|
#ifndef __this_cpu_cmpxchg_double
|
|
# define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
|
|
# define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
|
|
- __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2))
|
|
|
|
|
|
+ (__this_cpu_preempt_check("cmpxchg_double"),__pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2)))
|
|
#endif
|
|
#endif
|
|
|
|
|
|
#endif /* __LINUX_PERCPU_H */
|
|
#endif /* __LINUX_PERCPU_H */
|