|
@@ -101,14 +101,13 @@ __LL_SC_PREFIX(atomic_cmpxchg(atomic_t *ptr, int old, int new))
|
|
|
|
|
|
asm volatile("// atomic_cmpxchg\n"
|
|
|
"1: ldxr %w1, %2\n"
|
|
|
-" cmp %w1, %w3\n"
|
|
|
-" b.ne 2f\n"
|
|
|
+" eor %w0, %w1, %w3\n"
|
|
|
+" cbnz %w0, 2f\n"
|
|
|
" stxr %w0, %w4, %2\n"
|
|
|
" cbnz %w0, 1b\n"
|
|
|
"2:"
|
|
|
: "=&r" (tmp), "=&r" (oldval), "+Q" (ptr->counter)
|
|
|
- : "Ir" (old), "r" (new)
|
|
|
- : "cc");
|
|
|
+ : "Lr" (old), "r" (new));
|
|
|
|
|
|
smp_mb();
|
|
|
return oldval;
|
|
@@ -179,14 +178,13 @@ __LL_SC_PREFIX(atomic64_cmpxchg(atomic64_t *ptr, long old, long new))
|
|
|
|
|
|
asm volatile("// atomic64_cmpxchg\n"
|
|
|
"1: ldxr %1, %2\n"
|
|
|
-" cmp %1, %3\n"
|
|
|
-" b.ne 2f\n"
|
|
|
+" eor %0, %1, %3\n"
|
|
|
+" cbnz %w0, 2f\n"
|
|
|
" stxr %w0, %4, %2\n"
|
|
|
" cbnz %w0, 1b\n"
|
|
|
"2:"
|
|
|
: "=&r" (res), "=&r" (oldval), "+Q" (ptr->counter)
|
|
|
- : "Ir" (old), "r" (new)
|
|
|
- : "cc");
|
|
|
+ : "Lr" (old), "r" (new));
|
|
|
|
|
|
smp_mb();
|
|
|
return oldval;
|