|
@@ -140,10 +140,11 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
|
|
unsigned int tmp;
|
|
unsigned int tmp;
|
|
|
|
|
|
asm volatile(
|
|
asm volatile(
|
|
- " ldaxr %w0, %1\n"
|
|
|
|
- " cbnz %w0, 1f\n"
|
|
|
|
|
|
+ "1: ldaxr %w0, %1\n"
|
|
|
|
+ " cbnz %w0, 2f\n"
|
|
" stxr %w0, %w2, %1\n"
|
|
" stxr %w0, %w2, %1\n"
|
|
- "1:\n"
|
|
|
|
|
|
+ " cbnz %w0, 1b\n"
|
|
|
|
+ "2:\n"
|
|
: "=&r" (tmp), "+Q" (rw->lock)
|
|
: "=&r" (tmp), "+Q" (rw->lock)
|
|
: "r" (0x80000000)
|
|
: "r" (0x80000000)
|
|
: "memory");
|
|
: "memory");
|
|
@@ -209,11 +210,12 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
|
|
unsigned int tmp, tmp2 = 1;
|
|
unsigned int tmp, tmp2 = 1;
|
|
|
|
|
|
asm volatile(
|
|
asm volatile(
|
|
- " ldaxr %w0, %2\n"
|
|
|
|
|
|
+ "1: ldaxr %w0, %2\n"
|
|
" add %w0, %w0, #1\n"
|
|
" add %w0, %w0, #1\n"
|
|
- " tbnz %w0, #31, 1f\n"
|
|
|
|
|
|
+ " tbnz %w0, #31, 2f\n"
|
|
" stxr %w1, %w0, %2\n"
|
|
" stxr %w1, %w0, %2\n"
|
|
- "1:\n"
|
|
|
|
|
|
+ " cbnz %w1, 1b\n"
|
|
|
|
+ "2:\n"
|
|
: "=&r" (tmp), "+r" (tmp2), "+Q" (rw->lock)
|
|
: "=&r" (tmp), "+r" (tmp2), "+Q" (rw->lock)
|
|
:
|
|
:
|
|
: "memory");
|
|
: "memory");
|