|
|
@@ -210,8 +210,8 @@ ATOMIC_OP(xor, ^=, eor)
|
|
|
|
|
|
#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
|
|
|
#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
|
|
|
-#define atomic_inc_return(v) (atomic_add_return(1, v))
|
|
|
-#define atomic_dec_return(v) (atomic_sub_return(1, v))
|
|
|
+#define atomic_inc_return_relaxed(v) (atomic_add_return_relaxed(1, v))
|
|
|
+#define atomic_dec_return_relaxed(v) (atomic_sub_return_relaxed(1, v))
|
|
|
#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
|
|
|
|
|
|
#define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
|
|
|
@@ -442,11 +442,11 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
|
|
|
|
|
|
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
|
|
|
#define atomic64_inc(v) atomic64_add(1LL, (v))
|
|
|
-#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
|
|
|
+#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1LL, (v))
|
|
|
#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
|
|
|
#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
|
|
|
#define atomic64_dec(v) atomic64_sub(1LL, (v))
|
|
|
-#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
|
|
|
+#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v))
|
|
|
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
|
|
|
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
|
|
|
|