|
@@ -27,22 +27,38 @@ static DEFINE_SPINLOCK(dummy);
|
|
|
|
|
|
#endif /* SMP */
|
|
#endif /* SMP */
|
|
|
|
|
|
-#define ATOMIC_OP(op, cop) \
|
|
|
|
|
|
+#define ATOMIC_OP_RETURN(op, c_op) \
|
|
int atomic_##op##_return(int i, atomic_t *v) \
|
|
int atomic_##op##_return(int i, atomic_t *v) \
|
|
{ \
|
|
{ \
|
|
int ret; \
|
|
int ret; \
|
|
unsigned long flags; \
|
|
unsigned long flags; \
|
|
spin_lock_irqsave(ATOMIC_HASH(v), flags); \
|
|
spin_lock_irqsave(ATOMIC_HASH(v), flags); \
|
|
\
|
|
\
|
|
- ret = (v->counter cop i); \
|
|
|
|
|
|
+ ret = (v->counter c_op i); \
|
|
\
|
|
\
|
|
spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
|
|
spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
|
|
return ret; \
|
|
return ret; \
|
|
} \
|
|
} \
|
|
EXPORT_SYMBOL(atomic_##op##_return);
|
|
EXPORT_SYMBOL(atomic_##op##_return);
|
|
|
|
|
|
-ATOMIC_OP(add, +=)
|
|
|
|
|
|
+#define ATOMIC_OP(op, c_op) \
|
|
|
|
+void atomic_##op(int i, atomic_t *v) \
|
|
|
|
+{ \
|
|
|
|
+ unsigned long flags; \
|
|
|
|
+ spin_lock_irqsave(ATOMIC_HASH(v), flags); \
|
|
|
|
+ \
|
|
|
|
+ v->counter c_op i; \
|
|
|
|
+ \
|
|
|
|
+ spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
|
|
|
|
+} \
|
|
|
|
+EXPORT_SYMBOL(atomic_##op);
|
|
|
|
+
|
|
|
|
+ATOMIC_OP_RETURN(add, +=)
|
|
|
|
+ATOMIC_OP(and, &=)
|
|
|
|
+ATOMIC_OP(or, |=)
|
|
|
|
+ATOMIC_OP(xor, ^=)
|
|
|
|
|
|
|
|
+#undef ATOMIC_OP_RETURN
|
|
#undef ATOMIC_OP
|
|
#undef ATOMIC_OP
|
|
|
|
|
|
int atomic_xchg(atomic_t *v, int new)
|
|
int atomic_xchg(atomic_t *v, int new)
|