|
|
@@ -4,6 +4,15 @@
|
|
|
#include <asm/atomic.h>
|
|
|
#include <asm/barrier.h>
|
|
|
|
|
|
+#ifndef atomic_read_ctrl
|
|
|
+static inline int atomic_read_ctrl(const atomic_t *v)
|
|
|
+{
|
|
|
+ int val = atomic_read(v);
|
|
|
+ smp_read_barrier_depends(); /* Enforce control dependency. */
|
|
|
+ return val;
|
|
|
+}
|
|
|
+#endif
|
|
|
+
|
|
|
/*
|
|
|
* Relaxed variants of xchg, cmpxchg and some atomic operations.
|
|
|
*
|
|
|
@@ -81,6 +90,30 @@
|
|
|
#endif
|
|
|
#endif /* atomic_add_return_relaxed */
|
|
|
|
|
|
+/* atomic_inc_return_relaxed */
|
|
|
+#ifndef atomic_inc_return_relaxed
|
|
|
+#define atomic_inc_return_relaxed atomic_inc_return
|
|
|
+#define atomic_inc_return_acquire atomic_inc_return
|
|
|
+#define atomic_inc_return_release atomic_inc_return
|
|
|
+
|
|
|
+#else /* atomic_inc_return_relaxed */
|
|
|
+
|
|
|
+#ifndef atomic_inc_return_acquire
|
|
|
+#define atomic_inc_return_acquire(...) \
|
|
|
+ __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+
|
|
|
+#ifndef atomic_inc_return_release
|
|
|
+#define atomic_inc_return_release(...) \
|
|
|
+ __atomic_op_release(atomic_inc_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+
|
|
|
+#ifndef atomic_inc_return
|
|
|
+#define atomic_inc_return(...) \
|
|
|
+ __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+#endif /* atomic_inc_return_relaxed */
|
|
|
+
|
|
|
/* atomic_sub_return_relaxed */
|
|
|
#ifndef atomic_sub_return_relaxed
|
|
|
#define atomic_sub_return_relaxed atomic_sub_return
|
|
|
@@ -105,6 +138,30 @@
|
|
|
#endif
|
|
|
#endif /* atomic_sub_return_relaxed */
|
|
|
|
|
|
+/* atomic_dec_return_relaxed */
|
|
|
+#ifndef atomic_dec_return_relaxed
|
|
|
+#define atomic_dec_return_relaxed atomic_dec_return
|
|
|
+#define atomic_dec_return_acquire atomic_dec_return
|
|
|
+#define atomic_dec_return_release atomic_dec_return
|
|
|
+
|
|
|
+#else /* atomic_dec_return_relaxed */
|
|
|
+
|
|
|
+#ifndef atomic_dec_return_acquire
|
|
|
+#define atomic_dec_return_acquire(...) \
|
|
|
+ __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+
|
|
|
+#ifndef atomic_dec_return_release
|
|
|
+#define atomic_dec_return_release(...) \
|
|
|
+ __atomic_op_release(atomic_dec_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+
|
|
|
+#ifndef atomic_dec_return
|
|
|
+#define atomic_dec_return(...) \
|
|
|
+ __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+#endif /* atomic_dec_return_relaxed */
|
|
|
+
|
|
|
/* atomic_xchg_relaxed */
|
|
|
#ifndef atomic_xchg_relaxed
|
|
|
#define atomic_xchg_relaxed atomic_xchg
|
|
|
@@ -185,6 +242,31 @@
|
|
|
#endif
|
|
|
#endif /* atomic64_add_return_relaxed */
|
|
|
|
|
|
+/* atomic64_inc_return_relaxed */
|
|
|
+#ifndef atomic64_inc_return_relaxed
|
|
|
+#define atomic64_inc_return_relaxed atomic64_inc_return
|
|
|
+#define atomic64_inc_return_acquire atomic64_inc_return
|
|
|
+#define atomic64_inc_return_release atomic64_inc_return
|
|
|
+
|
|
|
+#else /* atomic64_inc_return_relaxed */
|
|
|
+
|
|
|
+#ifndef atomic64_inc_return_acquire
|
|
|
+#define atomic64_inc_return_acquire(...) \
|
|
|
+ __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+
|
|
|
+#ifndef atomic64_inc_return_release
|
|
|
+#define atomic64_inc_return_release(...) \
|
|
|
+ __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+
|
|
|
+#ifndef atomic64_inc_return
|
|
|
+#define atomic64_inc_return(...) \
|
|
|
+ __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+#endif /* atomic64_inc_return_relaxed */
|
|
|
+
|
|
|
+
|
|
|
/* atomic64_sub_return_relaxed */
|
|
|
#ifndef atomic64_sub_return_relaxed
|
|
|
#define atomic64_sub_return_relaxed atomic64_sub_return
|
|
|
@@ -209,6 +291,30 @@
|
|
|
#endif
|
|
|
#endif /* atomic64_sub_return_relaxed */
|
|
|
|
|
|
+/* atomic64_dec_return_relaxed */
|
|
|
+#ifndef atomic64_dec_return_relaxed
|
|
|
+#define atomic64_dec_return_relaxed atomic64_dec_return
|
|
|
+#define atomic64_dec_return_acquire atomic64_dec_return
|
|
|
+#define atomic64_dec_return_release atomic64_dec_return
|
|
|
+
|
|
|
+#else /* atomic64_dec_return_relaxed */
|
|
|
+
|
|
|
+#ifndef atomic64_dec_return_acquire
|
|
|
+#define atomic64_dec_return_acquire(...) \
|
|
|
+ __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+
|
|
|
+#ifndef atomic64_dec_return_release
|
|
|
+#define atomic64_dec_return_release(...) \
|
|
|
+ __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+
|
|
|
+#ifndef atomic64_dec_return
|
|
|
+#define atomic64_dec_return(...) \
|
|
|
+ __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
|
|
|
+#endif
|
|
|
+#endif /* atomic64_dec_return_relaxed */
|
|
|
+
|
|
|
/* atomic64_xchg_relaxed */
|
|
|
#ifndef atomic64_xchg_relaxed
|
|
|
#define atomic64_xchg_relaxed atomic64_xchg
|
|
|
@@ -451,11 +557,19 @@ static inline int atomic_dec_if_positive(atomic_t *v)
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
-#include <asm-generic/atomic-long.h>
|
|
|
#ifdef CONFIG_GENERIC_ATOMIC64
|
|
|
#include <asm-generic/atomic64.h>
|
|
|
#endif
|
|
|
|
|
|
+#ifndef atomic64_read_ctrl
|
|
|
+static inline long long atomic64_read_ctrl(const atomic64_t *v)
|
|
|
+{
|
|
|
+ long long val = atomic64_read(v);
|
|
|
+ smp_read_barrier_depends(); /* Enforce control dependency. */
|
|
|
+ return val;
|
|
|
+}
|
|
|
+#endif
|
|
|
+
|
|
|
#ifndef atomic64_andnot
|
|
|
static inline void atomic64_andnot(long long i, atomic64_t *v)
|
|
|
{
|
|
|
@@ -463,4 +577,6 @@ static inline void atomic64_andnot(long long i, atomic64_t *v)
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
+#include <asm-generic/atomic-long.h>
|
|
|
+
|
|
|
#endif /* _LINUX_ATOMIC_H */
|