|
@@ -117,7 +117,7 @@ static inline void atomic_and(int i, atomic_t *v)
|
|
/* LSE atomics */
|
|
/* LSE atomics */
|
|
" mvn %w[i], %w[i]\n"
|
|
" mvn %w[i], %w[i]\n"
|
|
" stclr %w[i], %[v]")
|
|
" stclr %w[i], %[v]")
|
|
- : [i] "+r" (w0), [v] "+Q" (v->counter)
|
|
|
|
|
|
+ : [i] "+&r" (w0), [v] "+Q" (v->counter)
|
|
: "r" (x1)
|
|
: "r" (x1)
|
|
: __LL_SC_CLOBBERS);
|
|
: __LL_SC_CLOBBERS);
|
|
}
|
|
}
|
|
@@ -135,7 +135,7 @@ static inline int atomic_fetch_and##name(int i, atomic_t *v) \
|
|
/* LSE atomics */ \
|
|
/* LSE atomics */ \
|
|
" mvn %w[i], %w[i]\n" \
|
|
" mvn %w[i], %w[i]\n" \
|
|
" ldclr" #mb " %w[i], %w[i], %[v]") \
|
|
" ldclr" #mb " %w[i], %w[i], %[v]") \
|
|
- : [i] "+r" (w0), [v] "+Q" (v->counter) \
|
|
|
|
|
|
+ : [i] "+&r" (w0), [v] "+Q" (v->counter) \
|
|
: "r" (x1) \
|
|
: "r" (x1) \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
\
|
|
\
|
|
@@ -161,7 +161,7 @@ static inline void atomic_sub(int i, atomic_t *v)
|
|
/* LSE atomics */
|
|
/* LSE atomics */
|
|
" neg %w[i], %w[i]\n"
|
|
" neg %w[i], %w[i]\n"
|
|
" stadd %w[i], %[v]")
|
|
" stadd %w[i], %[v]")
|
|
- : [i] "+r" (w0), [v] "+Q" (v->counter)
|
|
|
|
|
|
+ : [i] "+&r" (w0), [v] "+Q" (v->counter)
|
|
: "r" (x1)
|
|
: "r" (x1)
|
|
: __LL_SC_CLOBBERS);
|
|
: __LL_SC_CLOBBERS);
|
|
}
|
|
}
|
|
@@ -180,7 +180,7 @@ static inline int atomic_sub_return##name(int i, atomic_t *v) \
|
|
" neg %w[i], %w[i]\n" \
|
|
" neg %w[i], %w[i]\n" \
|
|
" ldadd" #mb " %w[i], w30, %[v]\n" \
|
|
" ldadd" #mb " %w[i], w30, %[v]\n" \
|
|
" add %w[i], %w[i], w30") \
|
|
" add %w[i], %w[i], w30") \
|
|
- : [i] "+r" (w0), [v] "+Q" (v->counter) \
|
|
|
|
|
|
+ : [i] "+&r" (w0), [v] "+Q" (v->counter) \
|
|
: "r" (x1) \
|
|
: "r" (x1) \
|
|
: __LL_SC_CLOBBERS , ##cl); \
|
|
: __LL_SC_CLOBBERS , ##cl); \
|
|
\
|
|
\
|
|
@@ -207,7 +207,7 @@ static inline int atomic_fetch_sub##name(int i, atomic_t *v) \
|
|
/* LSE atomics */ \
|
|
/* LSE atomics */ \
|
|
" neg %w[i], %w[i]\n" \
|
|
" neg %w[i], %w[i]\n" \
|
|
" ldadd" #mb " %w[i], %w[i], %[v]") \
|
|
" ldadd" #mb " %w[i], %w[i], %[v]") \
|
|
- : [i] "+r" (w0), [v] "+Q" (v->counter) \
|
|
|
|
|
|
+ : [i] "+&r" (w0), [v] "+Q" (v->counter) \
|
|
: "r" (x1) \
|
|
: "r" (x1) \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
\
|
|
\
|
|
@@ -314,7 +314,7 @@ static inline void atomic64_and(long i, atomic64_t *v)
|
|
/* LSE atomics */
|
|
/* LSE atomics */
|
|
" mvn %[i], %[i]\n"
|
|
" mvn %[i], %[i]\n"
|
|
" stclr %[i], %[v]")
|
|
" stclr %[i], %[v]")
|
|
- : [i] "+r" (x0), [v] "+Q" (v->counter)
|
|
|
|
|
|
+ : [i] "+&r" (x0), [v] "+Q" (v->counter)
|
|
: "r" (x1)
|
|
: "r" (x1)
|
|
: __LL_SC_CLOBBERS);
|
|
: __LL_SC_CLOBBERS);
|
|
}
|
|
}
|
|
@@ -332,7 +332,7 @@ static inline long atomic64_fetch_and##name(long i, atomic64_t *v) \
|
|
/* LSE atomics */ \
|
|
/* LSE atomics */ \
|
|
" mvn %[i], %[i]\n" \
|
|
" mvn %[i], %[i]\n" \
|
|
" ldclr" #mb " %[i], %[i], %[v]") \
|
|
" ldclr" #mb " %[i], %[i], %[v]") \
|
|
- : [i] "+r" (x0), [v] "+Q" (v->counter) \
|
|
|
|
|
|
+ : [i] "+&r" (x0), [v] "+Q" (v->counter) \
|
|
: "r" (x1) \
|
|
: "r" (x1) \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
\
|
|
\
|
|
@@ -358,7 +358,7 @@ static inline void atomic64_sub(long i, atomic64_t *v)
|
|
/* LSE atomics */
|
|
/* LSE atomics */
|
|
" neg %[i], %[i]\n"
|
|
" neg %[i], %[i]\n"
|
|
" stadd %[i], %[v]")
|
|
" stadd %[i], %[v]")
|
|
- : [i] "+r" (x0), [v] "+Q" (v->counter)
|
|
|
|
|
|
+ : [i] "+&r" (x0), [v] "+Q" (v->counter)
|
|
: "r" (x1)
|
|
: "r" (x1)
|
|
: __LL_SC_CLOBBERS);
|
|
: __LL_SC_CLOBBERS);
|
|
}
|
|
}
|
|
@@ -377,7 +377,7 @@ static inline long atomic64_sub_return##name(long i, atomic64_t *v) \
|
|
" neg %[i], %[i]\n" \
|
|
" neg %[i], %[i]\n" \
|
|
" ldadd" #mb " %[i], x30, %[v]\n" \
|
|
" ldadd" #mb " %[i], x30, %[v]\n" \
|
|
" add %[i], %[i], x30") \
|
|
" add %[i], %[i], x30") \
|
|
- : [i] "+r" (x0), [v] "+Q" (v->counter) \
|
|
|
|
|
|
+ : [i] "+&r" (x0), [v] "+Q" (v->counter) \
|
|
: "r" (x1) \
|
|
: "r" (x1) \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
\
|
|
\
|
|
@@ -404,7 +404,7 @@ static inline long atomic64_fetch_sub##name(long i, atomic64_t *v) \
|
|
/* LSE atomics */ \
|
|
/* LSE atomics */ \
|
|
" neg %[i], %[i]\n" \
|
|
" neg %[i], %[i]\n" \
|
|
" ldadd" #mb " %[i], %[i], %[v]") \
|
|
" ldadd" #mb " %[i], %[i], %[v]") \
|
|
- : [i] "+r" (x0), [v] "+Q" (v->counter) \
|
|
|
|
|
|
+ : [i] "+&r" (x0), [v] "+Q" (v->counter) \
|
|
: "r" (x1) \
|
|
: "r" (x1) \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
: __LL_SC_CLOBBERS, ##cl); \
|
|
\
|
|
\
|
|
@@ -435,7 +435,7 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
|
|
" sub x30, x30, %[ret]\n"
|
|
" sub x30, x30, %[ret]\n"
|
|
" cbnz x30, 1b\n"
|
|
" cbnz x30, 1b\n"
|
|
"2:")
|
|
"2:")
|
|
- : [ret] "+r" (x0), [v] "+Q" (v->counter)
|
|
|
|
|
|
+ : [ret] "+&r" (x0), [v] "+Q" (v->counter)
|
|
:
|
|
:
|
|
: __LL_SC_CLOBBERS, "cc", "memory");
|
|
: __LL_SC_CLOBBERS, "cc", "memory");
|
|
|
|
|
|
@@ -516,7 +516,7 @@ static inline long __cmpxchg_double##name(unsigned long old1, \
|
|
" eor %[old1], %[old1], %[oldval1]\n" \
|
|
" eor %[old1], %[old1], %[oldval1]\n" \
|
|
" eor %[old2], %[old2], %[oldval2]\n" \
|
|
" eor %[old2], %[old2], %[oldval2]\n" \
|
|
" orr %[old1], %[old1], %[old2]") \
|
|
" orr %[old1], %[old1], %[old2]") \
|
|
- : [old1] "+r" (x0), [old2] "+r" (x1), \
|
|
|
|
|
|
+ : [old1] "+&r" (x0), [old2] "+&r" (x1), \
|
|
[v] "+Q" (*(unsigned long *)ptr) \
|
|
[v] "+Q" (*(unsigned long *)ptr) \
|
|
: [new1] "r" (x2), [new2] "r" (x3), [ptr] "r" (x4), \
|
|
: [new1] "r" (x2), [new2] "r" (x3), [ptr] "r" (x4), \
|
|
[oldval1] "r" (oldval1), [oldval2] "r" (oldval2) \
|
|
[oldval1] "r" (oldval1), [oldval2] "r" (oldval2) \
|