|
@@ -36,7 +36,7 @@ static inline void atomic_andnot(int i, atomic_t *v)
|
|
|
" stclr %w[i], %[v]\n")
|
|
|
: [i] "+r" (w0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
static inline void atomic_or(int i, atomic_t *v)
|
|
@@ -48,7 +48,7 @@ static inline void atomic_or(int i, atomic_t *v)
|
|
|
" stset %w[i], %[v]\n")
|
|
|
: [i] "+r" (w0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
static inline void atomic_xor(int i, atomic_t *v)
|
|
@@ -60,7 +60,7 @@ static inline void atomic_xor(int i, atomic_t *v)
|
|
|
" steor %w[i], %[v]\n")
|
|
|
: [i] "+r" (w0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
static inline void atomic_add(int i, atomic_t *v)
|
|
@@ -72,7 +72,7 @@ static inline void atomic_add(int i, atomic_t *v)
|
|
|
" stadd %w[i], %[v]\n")
|
|
|
: [i] "+r" (w0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
#define ATOMIC_OP_ADD_RETURN(name, mb, cl...) \
|
|
@@ -90,7 +90,7 @@ static inline int atomic_add_return##name(int i, atomic_t *v) \
|
|
|
" add %w[i], %w[i], w30") \
|
|
|
: [i] "+r" (w0), [v] "+Q" (v->counter) \
|
|
|
: "r" (x1) \
|
|
|
- : "x30" , ##cl); \
|
|
|
+ : __LL_SC_CLOBBERS, ##cl); \
|
|
|
\
|
|
|
return w0; \
|
|
|
}
|
|
@@ -116,7 +116,7 @@ static inline void atomic_and(int i, atomic_t *v)
|
|
|
" stclr %w[i], %[v]")
|
|
|
: [i] "+r" (w0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
static inline void atomic_sub(int i, atomic_t *v)
|
|
@@ -133,7 +133,7 @@ static inline void atomic_sub(int i, atomic_t *v)
|
|
|
" stadd %w[i], %[v]")
|
|
|
: [i] "+r" (w0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
#define ATOMIC_OP_SUB_RETURN(name, mb, cl...) \
|
|
@@ -153,7 +153,7 @@ static inline int atomic_sub_return##name(int i, atomic_t *v) \
|
|
|
" add %w[i], %w[i], w30") \
|
|
|
: [i] "+r" (w0), [v] "+Q" (v->counter) \
|
|
|
: "r" (x1) \
|
|
|
- : "x30" , ##cl); \
|
|
|
+ : __LL_SC_CLOBBERS , ##cl); \
|
|
|
\
|
|
|
return w0; \
|
|
|
}
|
|
@@ -177,7 +177,7 @@ static inline void atomic64_andnot(long i, atomic64_t *v)
|
|
|
" stclr %[i], %[v]\n")
|
|
|
: [i] "+r" (x0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
static inline void atomic64_or(long i, atomic64_t *v)
|
|
@@ -189,7 +189,7 @@ static inline void atomic64_or(long i, atomic64_t *v)
|
|
|
" stset %[i], %[v]\n")
|
|
|
: [i] "+r" (x0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
static inline void atomic64_xor(long i, atomic64_t *v)
|
|
@@ -201,7 +201,7 @@ static inline void atomic64_xor(long i, atomic64_t *v)
|
|
|
" steor %[i], %[v]\n")
|
|
|
: [i] "+r" (x0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
static inline void atomic64_add(long i, atomic64_t *v)
|
|
@@ -213,7 +213,7 @@ static inline void atomic64_add(long i, atomic64_t *v)
|
|
|
" stadd %[i], %[v]\n")
|
|
|
: [i] "+r" (x0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
#define ATOMIC64_OP_ADD_RETURN(name, mb, cl...) \
|
|
@@ -231,7 +231,7 @@ static inline long atomic64_add_return##name(long i, atomic64_t *v) \
|
|
|
" add %[i], %[i], x30") \
|
|
|
: [i] "+r" (x0), [v] "+Q" (v->counter) \
|
|
|
: "r" (x1) \
|
|
|
- : "x30" , ##cl); \
|
|
|
+ : __LL_SC_CLOBBERS, ##cl); \
|
|
|
\
|
|
|
return x0; \
|
|
|
}
|
|
@@ -257,7 +257,7 @@ static inline void atomic64_and(long i, atomic64_t *v)
|
|
|
" stclr %[i], %[v]")
|
|
|
: [i] "+r" (x0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
static inline void atomic64_sub(long i, atomic64_t *v)
|
|
@@ -274,7 +274,7 @@ static inline void atomic64_sub(long i, atomic64_t *v)
|
|
|
" stadd %[i], %[v]")
|
|
|
: [i] "+r" (x0), [v] "+Q" (v->counter)
|
|
|
: "r" (x1)
|
|
|
- : "x30");
|
|
|
+ : __LL_SC_CLOBBERS);
|
|
|
}
|
|
|
|
|
|
#define ATOMIC64_OP_SUB_RETURN(name, mb, cl...) \
|
|
@@ -294,7 +294,7 @@ static inline long atomic64_sub_return##name(long i, atomic64_t *v) \
|
|
|
" add %[i], %[i], x30") \
|
|
|
: [i] "+r" (x0), [v] "+Q" (v->counter) \
|
|
|
: "r" (x1) \
|
|
|
- : "x30" , ##cl); \
|
|
|
+ : __LL_SC_CLOBBERS, ##cl); \
|
|
|
\
|
|
|
return x0; \
|
|
|
}
|
|
@@ -330,7 +330,7 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
|
|
|
"2:")
|
|
|
: [ret] "+&r" (x0), [v] "+Q" (v->counter)
|
|
|
:
|
|
|
- : "x30", "cc", "memory");
|
|
|
+ : __LL_SC_CLOBBERS, "cc", "memory");
|
|
|
|
|
|
return x0;
|
|
|
}
|
|
@@ -359,7 +359,7 @@ static inline unsigned long __cmpxchg_case_##name(volatile void *ptr, \
|
|
|
" mov %" #w "[ret], " #w "30") \
|
|
|
: [ret] "+r" (x0), [v] "+Q" (*(unsigned long *)ptr) \
|
|
|
: [old] "r" (x1), [new] "r" (x2) \
|
|
|
- : "x30" , ##cl); \
|
|
|
+ : __LL_SC_CLOBBERS, ##cl); \
|
|
|
\
|
|
|
return x0; \
|
|
|
}
|
|
@@ -416,7 +416,7 @@ static inline long __cmpxchg_double##name(unsigned long old1, \
|
|
|
[v] "+Q" (*(unsigned long *)ptr) \
|
|
|
: [new1] "r" (x2), [new2] "r" (x3), [ptr] "r" (x4), \
|
|
|
[oldval1] "r" (oldval1), [oldval2] "r" (oldval2) \
|
|
|
- : "x30" , ##cl); \
|
|
|
+ : __LL_SC_CLOBBERS, ##cl); \
|
|
|
\
|
|
|
return x0; \
|
|
|
}
|