|
@@ -22,6 +22,7 @@
|
|
#include <linux/mmdebug.h>
|
|
#include <linux/mmdebug.h>
|
|
|
|
|
|
#include <asm/barrier.h>
|
|
#include <asm/barrier.h>
|
|
|
|
+#include <asm/lse.h>
|
|
|
|
|
|
static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
|
|
static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
|
|
{
|
|
{
|
|
@@ -29,37 +30,65 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
|
|
|
|
|
|
switch (size) {
|
|
switch (size) {
|
|
case 1:
|
|
case 1:
|
|
- asm volatile("// __xchg1\n"
|
|
|
|
|
|
+ asm volatile(ARM64_LSE_ATOMIC_INSN(
|
|
|
|
+ /* LL/SC */
|
|
"1: ldxrb %w0, %2\n"
|
|
"1: ldxrb %w0, %2\n"
|
|
" stlxrb %w1, %w3, %2\n"
|
|
" stlxrb %w1, %w3, %2\n"
|
|
" cbnz %w1, 1b\n"
|
|
" cbnz %w1, 1b\n"
|
|
|
|
+ " dmb ish",
|
|
|
|
+ /* LSE atomics */
|
|
|
|
+ " nop\n"
|
|
|
|
+ " swpalb %w3, %w0, %2\n"
|
|
|
|
+ " nop\n"
|
|
|
|
+ " nop")
|
|
: "=&r" (ret), "=&r" (tmp), "+Q" (*(u8 *)ptr)
|
|
: "=&r" (ret), "=&r" (tmp), "+Q" (*(u8 *)ptr)
|
|
: "r" (x)
|
|
: "r" (x)
|
|
: "memory");
|
|
: "memory");
|
|
break;
|
|
break;
|
|
case 2:
|
|
case 2:
|
|
- asm volatile("// __xchg2\n"
|
|
|
|
|
|
+ asm volatile(ARM64_LSE_ATOMIC_INSN(
|
|
|
|
+ /* LL/SC */
|
|
"1: ldxrh %w0, %2\n"
|
|
"1: ldxrh %w0, %2\n"
|
|
" stlxrh %w1, %w3, %2\n"
|
|
" stlxrh %w1, %w3, %2\n"
|
|
" cbnz %w1, 1b\n"
|
|
" cbnz %w1, 1b\n"
|
|
|
|
+ " dmb ish",
|
|
|
|
+ /* LSE atomics */
|
|
|
|
+ " nop\n"
|
|
|
|
+ " swpalh %w3, %w0, %2\n"
|
|
|
|
+ " nop\n"
|
|
|
|
+ " nop")
|
|
: "=&r" (ret), "=&r" (tmp), "+Q" (*(u16 *)ptr)
|
|
: "=&r" (ret), "=&r" (tmp), "+Q" (*(u16 *)ptr)
|
|
: "r" (x)
|
|
: "r" (x)
|
|
: "memory");
|
|
: "memory");
|
|
break;
|
|
break;
|
|
case 4:
|
|
case 4:
|
|
- asm volatile("// __xchg4\n"
|
|
|
|
|
|
+ asm volatile(ARM64_LSE_ATOMIC_INSN(
|
|
|
|
+ /* LL/SC */
|
|
"1: ldxr %w0, %2\n"
|
|
"1: ldxr %w0, %2\n"
|
|
" stlxr %w1, %w3, %2\n"
|
|
" stlxr %w1, %w3, %2\n"
|
|
" cbnz %w1, 1b\n"
|
|
" cbnz %w1, 1b\n"
|
|
|
|
+ " dmb ish",
|
|
|
|
+ /* LSE atomics */
|
|
|
|
+ " nop\n"
|
|
|
|
+ " swpal %w3, %w0, %2\n"
|
|
|
|
+ " nop\n"
|
|
|
|
+ " nop")
|
|
: "=&r" (ret), "=&r" (tmp), "+Q" (*(u32 *)ptr)
|
|
: "=&r" (ret), "=&r" (tmp), "+Q" (*(u32 *)ptr)
|
|
: "r" (x)
|
|
: "r" (x)
|
|
: "memory");
|
|
: "memory");
|
|
break;
|
|
break;
|
|
case 8:
|
|
case 8:
|
|
- asm volatile("// __xchg8\n"
|
|
|
|
|
|
+ asm volatile(ARM64_LSE_ATOMIC_INSN(
|
|
|
|
+ /* LL/SC */
|
|
"1: ldxr %0, %2\n"
|
|
"1: ldxr %0, %2\n"
|
|
" stlxr %w1, %3, %2\n"
|
|
" stlxr %w1, %3, %2\n"
|
|
" cbnz %w1, 1b\n"
|
|
" cbnz %w1, 1b\n"
|
|
|
|
+ " dmb ish",
|
|
|
|
+ /* LSE atomics */
|
|
|
|
+ " nop\n"
|
|
|
|
+ " swpal %3, %0, %2\n"
|
|
|
|
+ " nop\n"
|
|
|
|
+ " nop")
|
|
: "=&r" (ret), "=&r" (tmp), "+Q" (*(u64 *)ptr)
|
|
: "=&r" (ret), "=&r" (tmp), "+Q" (*(u64 *)ptr)
|
|
: "r" (x)
|
|
: "r" (x)
|
|
: "memory");
|
|
: "memory");
|
|
@@ -68,7 +97,6 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
|
|
BUILD_BUG();
|
|
BUILD_BUG();
|
|
}
|
|
}
|
|
|
|
|
|
- smp_mb();
|
|
|
|
return ret;
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
|