|
@@ -531,41 +531,40 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
|
|
|
}
|
|
|
|
|
|
/**
|
|
|
- * atomic64_add_unless - add unless the number is a given value
|
|
|
+ * atomic64_fetch_add_unless - add unless the number is a given value
|
|
|
* @v: pointer of type atomic64_t
|
|
|
* @a: the amount to add to v...
|
|
|
* @u: ...unless v is equal to u.
|
|
|
*
|
|
|
- * if (v != u) { v += a; ret = 1} else {ret = 0}
|
|
|
- * Returns 1 iff @v was not @u (i.e. if add actually happened)
|
|
|
+ * Atomically adds @a to @v, if it was not @u.
|
|
|
+ * Returns the old value of @v
|
|
|
*/
|
|
|
-static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
|
|
|
+static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
|
|
|
+ long long u)
|
|
|
{
|
|
|
- long long val;
|
|
|
- int op_done;
|
|
|
+ long long old, temp;
|
|
|
|
|
|
smp_mb();
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
"1: llockd %0, [%2] \n"
|
|
|
- " mov %1, 1 \n"
|
|
|
" brne %L0, %L4, 2f # continue to add since v != u \n"
|
|
|
" breq.d %H0, %H4, 3f # return since v == u \n"
|
|
|
- " mov %1, 0 \n"
|
|
|
"2: \n"
|
|
|
- " add.f %L0, %L0, %L3 \n"
|
|
|
- " adc %H0, %H0, %H3 \n"
|
|
|
- " scondd %0, [%2] \n"
|
|
|
+ " add.f %L1, %L0, %L3 \n"
|
|
|
+ " adc %H1, %H0, %H3 \n"
|
|
|
+ " scondd %1, [%2] \n"
|
|
|
" bnz 1b \n"
|
|
|
"3: \n"
|
|
|
- : "=&r"(val), "=&r" (op_done)
|
|
|
+ : "=&r"(old), "=&r" (temp)
|
|
|
: "r"(&v->counter), "r"(a), "r"(u)
|
|
|
: "cc"); /* memory clobber comes from smp_mb() */
|
|
|
|
|
|
smp_mb();
|
|
|
|
|
|
- return op_done;
|
|
|
+ return old;
|
|
|
}
|
|
|
+#define atomic64_fetch_add_unless atomic64_fetch_add_unless
|
|
|
|
|
|
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
|
|
|
#define atomic64_inc(v) atomic64_add(1LL, (v))
|