|
@@ -24,36 +24,43 @@
|
|
|
# define __scbeqz "beqz"
|
|
|
#endif
|
|
|
|
|
|
+#define __xchg_asm(ld, st, m, val) \
|
|
|
+({ \
|
|
|
+ __typeof(*(m)) __ret; \
|
|
|
+ \
|
|
|
+ if (kernel_uses_llsc) { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ " .set push \n" \
|
|
|
+ " .set noat \n" \
|
|
|
+ " .set " MIPS_ISA_ARCH_LEVEL " \n" \
|
|
|
+ "1: " ld " %0, %2 # __xchg_asm \n" \
|
|
|
+ " .set mips0 \n" \
|
|
|
+ " move $1, %z3 \n" \
|
|
|
+ " .set " MIPS_ISA_ARCH_LEVEL " \n" \
|
|
|
+ " " st " $1, %1 \n" \
|
|
|
+ "\t" __scbeqz " $1, 1b \n" \
|
|
|
+ " .set pop \n" \
|
|
|
+ : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
|
|
|
+ : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
|
|
|
+ : "memory"); \
|
|
|
+ } else { \
|
|
|
+ unsigned long __flags; \
|
|
|
+ \
|
|
|
+ raw_local_irq_save(__flags); \
|
|
|
+ __ret = *m; \
|
|
|
+ *m = val; \
|
|
|
+ raw_local_irq_restore(__flags); \
|
|
|
+ } \
|
|
|
+ \
|
|
|
+ __ret; \
|
|
|
+})
|
|
|
+
|
|
|
static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
|
|
|
{
|
|
|
__u32 retval;
|
|
|
|
|
|
smp_mb__before_llsc();
|
|
|
-
|
|
|
- if (kernel_uses_llsc) {
|
|
|
- unsigned long dummy;
|
|
|
-
|
|
|
- __asm__ __volatile__(
|
|
|
- " .set " MIPS_ISA_ARCH_LEVEL " \n"
|
|
|
- "1: ll %0, %3 # xchg_u32 \n"
|
|
|
- " .set mips0 \n"
|
|
|
- " move %2, %z4 \n"
|
|
|
- " .set " MIPS_ISA_ARCH_LEVEL " \n"
|
|
|
- " sc %2, %1 \n"
|
|
|
- "\t" __scbeqz " %2, 1b \n"
|
|
|
- " .set mips0 \n"
|
|
|
- : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
|
|
|
- : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
|
|
|
- : "memory");
|
|
|
- } else {
|
|
|
- unsigned long flags;
|
|
|
-
|
|
|
- raw_local_irq_save(flags);
|
|
|
- retval = *m;
|
|
|
- *m = val;
|
|
|
- raw_local_irq_restore(flags); /* implies memory barrier */
|
|
|
- }
|
|
|
-
|
|
|
+ retval = __xchg_asm("ll", "sc", m, val);
|
|
|
smp_llsc_mb();
|
|
|
|
|
|
return retval;
|
|
@@ -65,29 +72,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
|
|
|
__u64 retval;
|
|
|
|
|
|
smp_mb__before_llsc();
|
|
|
-
|
|
|
- if (kernel_uses_llsc) {
|
|
|
- unsigned long dummy;
|
|
|
-
|
|
|
- __asm__ __volatile__(
|
|
|
- " .set " MIPS_ISA_ARCH_LEVEL " \n"
|
|
|
- "1: lld %0, %3 # xchg_u64 \n"
|
|
|
- " move %2, %z4 \n"
|
|
|
- " scd %2, %1 \n"
|
|
|
- "\t" __scbeqz " %2, 1b \n"
|
|
|
- " .set mips0 \n"
|
|
|
- : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
|
|
|
- : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
|
|
|
- : "memory");
|
|
|
- } else {
|
|
|
- unsigned long flags;
|
|
|
-
|
|
|
- raw_local_irq_save(flags);
|
|
|
- retval = *m;
|
|
|
- *m = val;
|
|
|
- raw_local_irq_restore(flags); /* implies memory barrier */
|
|
|
- }
|
|
|
-
|
|
|
+ retval = __xchg_asm("lld", "scd", m, val);
|
|
|
smp_llsc_mb();
|
|
|
|
|
|
return retval;
|