|
@@ -117,35 +117,37 @@ static inline int atomic_sub_return(int i, atomic_t *v)
|
|
|
#define atomic_sub(i, v) atomic_sub_return(i, (v))
|
|
|
|
|
|
/**
|
|
|
- * atomic_add_unless - add unless the number is a given value
|
|
|
+ * __atomic_add_unless - add unless the number is a given value
|
|
|
* @v: pointer to value
|
|
|
* @a: amount to add
|
|
|
* @u: unless value is equal to u
|
|
|
*
|
|
|
- * Returns 1 if the add happened, 0 if it didn't.
|
|
|
+ * Returns old value.
|
|
|
+ *
|
|
|
*/
|
|
|
+
|
|
|
static inline int __atomic_add_unless(atomic_t *v, int a, int u)
|
|
|
{
|
|
|
- int output, __oldval;
|
|
|
+ int __oldval;
|
|
|
+ register int tmp;
|
|
|
+
|
|
|
asm volatile(
|
|
|
"1: %0 = memw_locked(%2);"
|
|
|
" {"
|
|
|
" p3 = cmp.eq(%0, %4);"
|
|
|
" if (p3.new) jump:nt 2f;"
|
|
|
- " %0 = add(%0, %3);"
|
|
|
- " %1 = #0;"
|
|
|
+ " %1 = add(%0, %3);"
|
|
|
" }"
|
|
|
- " memw_locked(%2, p3) = %0;"
|
|
|
+ " memw_locked(%2, p3) = %1;"
|
|
|
" {"
|
|
|
" if !p3 jump 1b;"
|
|
|
- " %1 = #1;"
|
|
|
" }"
|
|
|
"2:"
|
|
|
- : "=&r" (__oldval), "=&r" (output)
|
|
|
+ : "=&r" (__oldval), "=&r" (tmp)
|
|
|
: "r" (v), "r" (a), "r" (u)
|
|
|
: "memory", "p3"
|
|
|
);
|
|
|
- return output;
|
|
|
+ return __oldval;
|
|
|
}
|
|
|
|
|
|
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
|