|
@@ -16,83 +16,54 @@
|
|
|
|
|
|
#include <linux/kernel.h>
|
|
|
|
|
|
-static inline int atomic_add_return(int i, atomic_t *v)
|
|
|
-{
|
|
|
- h8300flags flags;
|
|
|
- int ret;
|
|
|
-
|
|
|
- flags = arch_local_irq_save();
|
|
|
- ret = v->counter += i;
|
|
|
- arch_local_irq_restore(flags);
|
|
|
- return ret;
|
|
|
+#define ATOMIC_OP_RETURN(op, c_op) \
|
|
|
+static inline int atomic_##op##_return(int i, atomic_t *v) \
|
|
|
+{ \
|
|
|
+ h8300flags flags; \
|
|
|
+ int ret; \
|
|
|
+ \
|
|
|
+ flags = arch_local_irq_save(); \
|
|
|
+ ret = v->counter c_op i; \
|
|
|
+ arch_local_irq_restore(flags); \
|
|
|
+ return ret; \
|
|
|
}
|
|
|
|
|
|
-#define atomic_add(i, v) atomic_add_return(i, v)
|
|
|
-#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
|
|
|
-
|
|
|
-static inline int atomic_sub_return(int i, atomic_t *v)
|
|
|
-{
|
|
|
- h8300flags flags;
|
|
|
- int ret;
|
|
|
-
|
|
|
- flags = arch_local_irq_save();
|
|
|
- ret = v->counter -= i;
|
|
|
- arch_local_irq_restore(flags);
|
|
|
- return ret;
|
|
|
+#define ATOMIC_OP(op, c_op) \
|
|
|
+static inline void atomic_##op(int i, atomic_t *v) \
|
|
|
+{ \
|
|
|
+ h8300flags flags; \
|
|
|
+ \
|
|
|
+ flags = arch_local_irq_save(); \
|
|
|
+ v->counter c_op i; \
|
|
|
+ arch_local_irq_restore(flags); \
|
|
|
}
|
|
|
|
|
|
-#define atomic_sub(i, v) atomic_sub_return(i, v)
|
|
|
-#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
|
|
|
+ATOMIC_OP_RETURN(add, +=)
|
|
|
+ATOMIC_OP_RETURN(sub, -=)
|
|
|
|
|
|
-static inline int atomic_inc_return(atomic_t *v)
|
|
|
-{
|
|
|
- h8300flags flags;
|
|
|
- int ret;
|
|
|
-
|
|
|
- flags = arch_local_irq_save();
|
|
|
- v->counter++;
|
|
|
- ret = v->counter;
|
|
|
- arch_local_irq_restore(flags);
|
|
|
- return ret;
|
|
|
-}
|
|
|
+#define CONFIG_ARCH_HAS_ATOMIC_OR
|
|
|
|
|
|
-#define atomic_inc(v) atomic_inc_return(v)
|
|
|
+ATOMIC_OP(and, &=)
|
|
|
+ATOMIC_OP(or, |=)
|
|
|
+ATOMIC_OP(xor, ^=)
|
|
|
|
|
|
-/*
|
|
|
- * atomic_inc_and_test - increment and test
|
|
|
- * @v: pointer of type atomic_t
|
|
|
- *
|
|
|
- * Atomically increments @v by 1
|
|
|
- * and returns true if the result is zero, or false for all
|
|
|
- * other cases.
|
|
|
- */
|
|
|
-#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
|
|
|
+#undef ATOMIC_OP_RETURN
|
|
|
+#undef ATOMIC_OP
|
|
|
|
|
|
-static inline int atomic_dec_return(atomic_t *v)
|
|
|
-{
|
|
|
- h8300flags flags;
|
|
|
- int ret;
|
|
|
+#define atomic_add(i, v) (void)atomic_add_return(i, v)
|
|
|
+#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
|
|
|
|
|
|
- flags = arch_local_irq_save();
|
|
|
- --v->counter;
|
|
|
- ret = v->counter;
|
|
|
- arch_local_irq_restore(flags);
|
|
|
- return ret;
|
|
|
-}
|
|
|
+#define atomic_sub(i, v) (void)atomic_sub_return(i, v)
|
|
|
+#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
|
|
|
|
|
|
-#define atomic_dec(v) atomic_dec_return(v)
|
|
|
+#define atomic_inc_return(v) atomic_add_return(1, v)
|
|
|
+#define atomic_dec_return(v) atomic_sub_return(1, v)
|
|
|
|
|
|
-static inline int atomic_dec_and_test(atomic_t *v)
|
|
|
-{
|
|
|
- h8300flags flags;
|
|
|
- int ret;
|
|
|
+#define atomic_inc(v) (void)atomic_inc_return(v)
|
|
|
+#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
|
|
|
|
|
|
- flags = arch_local_irq_save();
|
|
|
- --v->counter;
|
|
|
- ret = v->counter;
|
|
|
- arch_local_irq_restore(flags);
|
|
|
- return ret == 0;
|
|
|
-}
|
|
|
+#define atomic_dec(v) (void)atomic_dec_return(v)
|
|
|
+#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
|
|
|
|
|
|
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
|
|
|
{
|
|
@@ -120,40 +91,14 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
|
|
|
return ret;
|
|
|
}
|
|
|
|
|
|
-static inline void atomic_clear_mask(unsigned long mask, unsigned long *v)
|
|
|
+static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
|
|
|
{
|
|
|
- unsigned char ccr;
|
|
|
- unsigned long tmp;
|
|
|
-
|
|
|
- __asm__ __volatile__("stc ccr,%w3\n\t"
|
|
|
- "orc #0x80,ccr\n\t"
|
|
|
- "mov.l %0,%1\n\t"
|
|
|
- "and.l %2,%1\n\t"
|
|
|
- "mov.l %1,%0\n\t"
|
|
|
- "ldc %w3,ccr"
|
|
|
- : "=m"(*v), "=r"(tmp)
|
|
|
- : "g"(~(mask)), "r"(ccr));
|
|
|
+ atomic_and(~mask, v);
|
|
|
}
|
|
|
|
|
|
-static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
|
|
|
+static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
|
|
|
{
|
|
|
- unsigned char ccr;
|
|
|
- unsigned long tmp;
|
|
|
-
|
|
|
- __asm__ __volatile__("stc ccr,%w3\n\t"
|
|
|
- "orc #0x80,ccr\n\t"
|
|
|
- "mov.l %0,%1\n\t"
|
|
|
- "or.l %2,%1\n\t"
|
|
|
- "mov.l %1,%0\n\t"
|
|
|
- "ldc %w3,ccr"
|
|
|
- : "=m"(*v), "=r"(tmp)
|
|
|
- : "g"(~(mask)), "r"(ccr));
|
|
|
+ atomic_or(mask, v);
|
|
|
}
|
|
|
|
|
|
-/* Atomic operations are already serializing */
|
|
|
-#define smp_mb__before_atomic_dec() barrier()
|
|
|
-#define smp_mb__after_atomic_dec() barrier()
|
|
|
-#define smp_mb__before_atomic_inc() barrier()
|
|
|
-#define smp_mb__after_atomic_inc() barrier()
|
|
|
-
|
|
|
#endif /* __ARCH_H8300_ATOMIC __ */
|