|
@@ -26,76 +26,53 @@ static __inline__ void atomic_set(atomic_t *v, int i)
|
|
|
__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
|
|
|
}
|
|
|
|
|
|
-static __inline__ void atomic_add(int a, atomic_t *v)
|
|
|
-{
|
|
|
- int t;
|
|
|
-
|
|
|
- __asm__ __volatile__(
|
|
|
-"1: lwarx %0,0,%3 # atomic_add\n\
|
|
|
- add %0,%2,%0\n"
|
|
|
- PPC405_ERR77(0,%3)
|
|
|
-" stwcx. %0,0,%3 \n\
|
|
|
- bne- 1b"
|
|
|
- : "=&r" (t), "+m" (v->counter)
|
|
|
- : "r" (a), "r" (&v->counter)
|
|
|
- : "cc");
|
|
|
+#define ATOMIC_OP(op, asm_op) \
|
|
|
+static __inline__ void atomic_##op(int a, atomic_t *v) \
|
|
|
+{ \
|
|
|
+ int t; \
|
|
|
+ \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+"1: lwarx %0,0,%3 # atomic_" #op "\n" \
|
|
|
+ #asm_op " %0,%2,%0\n" \
|
|
|
+ PPC405_ERR77(0,%3) \
|
|
|
+" stwcx. %0,0,%3 \n" \
|
|
|
+" bne- 1b\n" \
|
|
|
+ : "=&r" (t), "+m" (v->counter) \
|
|
|
+ : "r" (a), "r" (&v->counter) \
|
|
|
+ : "cc"); \
|
|
|
+} \
|
|
|
+
|
|
|
+#define ATOMIC_OP_RETURN(op, asm_op) \
|
|
|
+static __inline__ int atomic_##op##_return(int a, atomic_t *v) \
|
|
|
+{ \
|
|
|
+ int t; \
|
|
|
+ \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ PPC_ATOMIC_ENTRY_BARRIER \
|
|
|
+"1: lwarx %0,0,%2 # atomic_" #op "_return\n" \
|
|
|
+ #asm_op " %0,%1,%0\n" \
|
|
|
+ PPC405_ERR77(0,%2) \
|
|
|
+" stwcx. %0,0,%2 \n" \
|
|
|
+" bne- 1b\n" \
|
|
|
+ PPC_ATOMIC_EXIT_BARRIER \
|
|
|
+ : "=&r" (t) \
|
|
|
+ : "r" (a), "r" (&v->counter) \
|
|
|
+ : "cc", "memory"); \
|
|
|
+ \
|
|
|
+ return t; \
|
|
|
}
|
|
|
|
|
|
-static __inline__ int atomic_add_return(int a, atomic_t *v)
|
|
|
-{
|
|
|
- int t;
|
|
|
+#define ATOMIC_OPS(op, asm_op) ATOMIC_OP(op, asm_op) ATOMIC_OP_RETURN(op, asm_op)
|
|
|
|
|
|
- __asm__ __volatile__(
|
|
|
- PPC_ATOMIC_ENTRY_BARRIER
|
|
|
-"1: lwarx %0,0,%2 # atomic_add_return\n\
|
|
|
- add %0,%1,%0\n"
|
|
|
- PPC405_ERR77(0,%2)
|
|
|
-" stwcx. %0,0,%2 \n\
|
|
|
- bne- 1b"
|
|
|
- PPC_ATOMIC_EXIT_BARRIER
|
|
|
- : "=&r" (t)
|
|
|
- : "r" (a), "r" (&v->counter)
|
|
|
- : "cc", "memory");
|
|
|
+ATOMIC_OPS(add, add)
|
|
|
+ATOMIC_OPS(sub, subf)
|
|
|
|
|
|
- return t;
|
|
|
-}
|
|
|
+#undef ATOMIC_OPS
|
|
|
+#undef ATOMIC_OP_RETURN
|
|
|
+#undef ATOMIC_OP
|
|
|
|
|
|
#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
|
|
|
|
|
|
-static __inline__ void atomic_sub(int a, atomic_t *v)
|
|
|
-{
|
|
|
- int t;
|
|
|
-
|
|
|
- __asm__ __volatile__(
|
|
|
-"1: lwarx %0,0,%3 # atomic_sub\n\
|
|
|
- subf %0,%2,%0\n"
|
|
|
- PPC405_ERR77(0,%3)
|
|
|
-" stwcx. %0,0,%3 \n\
|
|
|
- bne- 1b"
|
|
|
- : "=&r" (t), "+m" (v->counter)
|
|
|
- : "r" (a), "r" (&v->counter)
|
|
|
- : "cc");
|
|
|
-}
|
|
|
-
|
|
|
-static __inline__ int atomic_sub_return(int a, atomic_t *v)
|
|
|
-{
|
|
|
- int t;
|
|
|
-
|
|
|
- __asm__ __volatile__(
|
|
|
- PPC_ATOMIC_ENTRY_BARRIER
|
|
|
-"1: lwarx %0,0,%2 # atomic_sub_return\n\
|
|
|
- subf %0,%1,%0\n"
|
|
|
- PPC405_ERR77(0,%2)
|
|
|
-" stwcx. %0,0,%2 \n\
|
|
|
- bne- 1b"
|
|
|
- PPC_ATOMIC_EXIT_BARRIER
|
|
|
- : "=&r" (t)
|
|
|
- : "r" (a), "r" (&v->counter)
|
|
|
- : "cc", "memory");
|
|
|
-
|
|
|
- return t;
|
|
|
-}
|
|
|
-
|
|
|
static __inline__ void atomic_inc(atomic_t *v)
|
|
|
{
|
|
|
int t;
|
|
@@ -289,71 +266,50 @@ static __inline__ void atomic64_set(atomic64_t *v, long i)
|
|
|
__asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
|
|
|
}
|
|
|
|
|
|
-static __inline__ void atomic64_add(long a, atomic64_t *v)
|
|
|
-{
|
|
|
- long t;
|
|
|
-
|
|
|
- __asm__ __volatile__(
|
|
|
-"1: ldarx %0,0,%3 # atomic64_add\n\
|
|
|
- add %0,%2,%0\n\
|
|
|
- stdcx. %0,0,%3 \n\
|
|
|
- bne- 1b"
|
|
|
- : "=&r" (t), "+m" (v->counter)
|
|
|
- : "r" (a), "r" (&v->counter)
|
|
|
- : "cc");
|
|
|
+#define ATOMIC64_OP(op, asm_op) \
|
|
|
+static __inline__ void atomic64_##op(long a, atomic64_t *v) \
|
|
|
+{ \
|
|
|
+ long t; \
|
|
|
+ \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+"1: ldarx %0,0,%3 # atomic64_" #op "\n" \
|
|
|
+ #asm_op " %0,%2,%0\n" \
|
|
|
+" stdcx. %0,0,%3 \n" \
|
|
|
+" bne- 1b\n" \
|
|
|
+ : "=&r" (t), "+m" (v->counter) \
|
|
|
+ : "r" (a), "r" (&v->counter) \
|
|
|
+ : "cc"); \
|
|
|
}
|
|
|
|
|
|
-static __inline__ long atomic64_add_return(long a, atomic64_t *v)
|
|
|
-{
|
|
|
- long t;
|
|
|
-
|
|
|
- __asm__ __volatile__(
|
|
|
- PPC_ATOMIC_ENTRY_BARRIER
|
|
|
-"1: ldarx %0,0,%2 # atomic64_add_return\n\
|
|
|
- add %0,%1,%0\n\
|
|
|
- stdcx. %0,0,%2 \n\
|
|
|
- bne- 1b"
|
|
|
- PPC_ATOMIC_EXIT_BARRIER
|
|
|
- : "=&r" (t)
|
|
|
- : "r" (a), "r" (&v->counter)
|
|
|
- : "cc", "memory");
|
|
|
-
|
|
|
- return t;
|
|
|
+#define ATOMIC64_OP_RETURN(op, asm_op) \
|
|
|
+static __inline__ long atomic64_##op##_return(long a, atomic64_t *v) \
|
|
|
+{ \
|
|
|
+ long t; \
|
|
|
+ \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ PPC_ATOMIC_ENTRY_BARRIER \
|
|
|
+"1: ldarx %0,0,%2 # atomic64_" #op "_return\n" \
|
|
|
+ #asm_op " %0,%1,%0\n" \
|
|
|
+" stdcx. %0,0,%2 \n" \
|
|
|
+" bne- 1b\n" \
|
|
|
+ PPC_ATOMIC_EXIT_BARRIER \
|
|
|
+ : "=&r" (t) \
|
|
|
+ : "r" (a), "r" (&v->counter) \
|
|
|
+ : "cc", "memory"); \
|
|
|
+ \
|
|
|
+ return t; \
|
|
|
}
|
|
|
|
|
|
-#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
|
|
|
-
|
|
|
-static __inline__ void atomic64_sub(long a, atomic64_t *v)
|
|
|
-{
|
|
|
- long t;
|
|
|
-
|
|
|
- __asm__ __volatile__(
|
|
|
-"1: ldarx %0,0,%3 # atomic64_sub\n\
|
|
|
- subf %0,%2,%0\n\
|
|
|
- stdcx. %0,0,%3 \n\
|
|
|
- bne- 1b"
|
|
|
- : "=&r" (t), "+m" (v->counter)
|
|
|
- : "r" (a), "r" (&v->counter)
|
|
|
- : "cc");
|
|
|
-}
|
|
|
+#define ATOMIC64_OPS(op, asm_op) ATOMIC64_OP(op, asm_op) ATOMIC64_OP_RETURN(op, asm_op)
|
|
|
|
|
|
-static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
|
|
|
-{
|
|
|
- long t;
|
|
|
+ATOMIC64_OPS(add, add)
|
|
|
+ATOMIC64_OPS(sub, subf)
|
|
|
|
|
|
- __asm__ __volatile__(
|
|
|
- PPC_ATOMIC_ENTRY_BARRIER
|
|
|
-"1: ldarx %0,0,%2 # atomic64_sub_return\n\
|
|
|
- subf %0,%1,%0\n\
|
|
|
- stdcx. %0,0,%2 \n\
|
|
|
- bne- 1b"
|
|
|
- PPC_ATOMIC_EXIT_BARRIER
|
|
|
- : "=&r" (t)
|
|
|
- : "r" (a), "r" (&v->counter)
|
|
|
- : "cc", "memory");
|
|
|
+#undef ATOMIC64_OPS
|
|
|
+#undef ATOMIC64_OP_RETURN
|
|
|
+#undef ATOMIC64_OP
|
|
|
|
|
|
- return t;
|
|
|
-}
|
|
|
+#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
|
|
|
|
|
|
static __inline__ void atomic64_inc(atomic64_t *v)
|
|
|
{
|