|
@@ -218,7 +218,7 @@ static __inline__ int atomic_dec_return_relaxed(atomic_t *v)
|
|
|
#define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))
|
|
|
|
|
|
/**
|
|
|
- * __atomic_add_unless - add unless the number is a given value
|
|
|
+ * atomic_fetch_add_unless - add unless the number is a given value
|
|
|
* @v: pointer of type atomic_t
|
|
|
* @a: the amount to add to v...
|
|
|
* @u: ...unless v is equal to u.
|
|
@@ -226,13 +226,13 @@ static __inline__ int atomic_dec_return_relaxed(atomic_t *v)
|
|
|
* Atomically adds @a to @v, so long as it was not @u.
|
|
|
* Returns the old value of @v.
|
|
|
*/
|
|
|
-static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
|
|
|
+static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|
|
{
|
|
|
int t;
|
|
|
|
|
|
__asm__ __volatile__ (
|
|
|
PPC_ATOMIC_ENTRY_BARRIER
|
|
|
-"1: lwarx %0,0,%1 # __atomic_add_unless\n\
|
|
|
+"1: lwarx %0,0,%1 # atomic_fetch_add_unless\n\
|
|
|
cmpw 0,%0,%3 \n\
|
|
|
beq 2f \n\
|
|
|
add %0,%2,%0 \n"
|
|
@@ -538,7 +538,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
|
|
|
|
|
|
__asm__ __volatile__ (
|
|
|
PPC_ATOMIC_ENTRY_BARRIER
|
|
|
-"1: ldarx %0,0,%1 # __atomic_add_unless\n\
|
|
|
+"1: ldarx %0,0,%1 # atomic_fetch_add_unless\n\
|
|
|
cmpd 0,%0,%3 \n\
|
|
|
beq 2f \n\
|
|
|
add %0,%2,%0 \n"
|