|
@@ -525,7 +525,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
|
|
|
#define atomic64_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))
|
|
|
|
|
|
/**
|
|
|
- * atomic64_add_unless - add unless the number is a given value
|
|
|
+ * atomic64_fetch_add_unless - add unless the number is a given value
|
|
|
* @v: pointer of type atomic64_t
|
|
|
* @a: the amount to add to v...
|
|
|
* @u: ...unless v is equal to u.
|
|
@@ -533,13 +533,13 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
|
|
|
* Atomically adds @a to @v, so long as it was not @u.
|
|
|
* Returns the old value of @v.
|
|
|
*/
|
|
|
-static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
|
|
|
+static __inline__ long atomic64_fetch_add_unless(atomic64_t *v, long a, long u)
|
|
|
{
|
|
|
long t;
|
|
|
|
|
|
__asm__ __volatile__ (
|
|
|
PPC_ATOMIC_ENTRY_BARRIER
|
|
|
-"1: ldarx %0,0,%1 # atomic_fetch_add_unless\n\
|
|
|
+"1: ldarx %0,0,%1 # atomic64_fetch_add_unless\n\
|
|
|
cmpd 0,%0,%3 \n\
|
|
|
beq 2f \n\
|
|
|
add %0,%2,%0 \n"
|
|
@@ -552,8 +552,9 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
|
|
|
: "r" (&v->counter), "r" (a), "r" (u)
|
|
|
: "cc", "memory");
|
|
|
|
|
|
- return t != u;
|
|
|
+ return t;
|
|
|
}
|
|
|
+#define atomic64_fetch_add_unless atomic64_fetch_add_unless
|
|
|
|
|
|
/**
|
|
|
* atomic_inc64_not_zero - increment unless the number is zero
|