浏览代码

sparc: Provide atomic_{or,xor,and}

Implement atomic logic ops -- atomic_{or,xor,and}.

These will replace the atomic_{set,clear}_mask functions that are
available on some archs.

Acked-by: David S. Miller <davem@davemloft.net>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Peter Zijlstra 11 年之前
父节点
当前提交
304a0d699a

+ 4 - 0
arch/sparc/include/asm/atomic_32.h

@@ -17,10 +17,14 @@
 #include <asm/barrier.h>
 #include <asm/barrier.h>
 #include <asm-generic/atomic64.h>
 #include <asm-generic/atomic64.h>
 
 
+#define CONFIG_ARCH_HAS_ATOMIC_OR
 
 
 #define ATOMIC_INIT(i)  { (i) }
 #define ATOMIC_INIT(i)  { (i) }
 
 
 int atomic_add_return(int, atomic_t *);
 int atomic_add_return(int, atomic_t *);
+void atomic_and(int, atomic_t *);
+void atomic_or(int, atomic_t *);
+void atomic_xor(int, atomic_t *);
 int atomic_cmpxchg(atomic_t *, int, int);
 int atomic_cmpxchg(atomic_t *, int, int);
 int atomic_xchg(atomic_t *, int);
 int atomic_xchg(atomic_t *, int);
 int __atomic_add_unless(atomic_t *, int, int);
 int __atomic_add_unless(atomic_t *, int, int);

+ 6 - 0
arch/sparc/include/asm/atomic_64.h

@@ -33,6 +33,12 @@ long atomic64_##op##_return(long, atomic64_t *);
 ATOMIC_OPS(add)
 ATOMIC_OPS(add)
 ATOMIC_OPS(sub)
 ATOMIC_OPS(sub)
 
 
+#define CONFIG_ARCH_HAS_ATOMIC_OR
+
+ATOMIC_OP(and)
+ATOMIC_OP(or)
+ATOMIC_OP(xor)
+
 #undef ATOMIC_OPS
 #undef ATOMIC_OPS
 #undef ATOMIC_OP_RETURN
 #undef ATOMIC_OP_RETURN
 #undef ATOMIC_OP
 #undef ATOMIC_OP

+ 19 - 3
arch/sparc/lib/atomic32.c

@@ -27,22 +27,38 @@ static DEFINE_SPINLOCK(dummy);
 
 
 #endif /* SMP */
 #endif /* SMP */
 
 
-#define ATOMIC_OP(op, cop)						\
+#define ATOMIC_OP_RETURN(op, c_op)					\
 int atomic_##op##_return(int i, atomic_t *v)				\
 int atomic_##op##_return(int i, atomic_t *v)				\
 {									\
 {									\
 	int ret;							\
 	int ret;							\
 	unsigned long flags;						\
 	unsigned long flags;						\
 	spin_lock_irqsave(ATOMIC_HASH(v), flags);			\
 	spin_lock_irqsave(ATOMIC_HASH(v), flags);			\
 									\
 									\
-	ret = (v->counter cop i);					\
+	ret = (v->counter c_op i);					\
 									\
 									\
 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);			\
 	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);			\
 	return ret;							\
 	return ret;							\
 }									\
 }									\
 EXPORT_SYMBOL(atomic_##op##_return);
 EXPORT_SYMBOL(atomic_##op##_return);
 
 
-ATOMIC_OP(add, +=)
+#define ATOMIC_OP(op, c_op)						\
+void atomic_##op(int i, atomic_t *v)					\
+{									\
+	unsigned long flags;						\
+	spin_lock_irqsave(ATOMIC_HASH(v), flags);			\
+									\
+	v->counter c_op i;						\
+									\
+	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);			\
+}									\
+EXPORT_SYMBOL(atomic_##op);
+
+ATOMIC_OP_RETURN(add, +=)
+ATOMIC_OP(and, &=)
+ATOMIC_OP(or, |=)
+ATOMIC_OP(xor, ^=)
 
 
+#undef ATOMIC_OP_RETURN
 #undef ATOMIC_OP
 #undef ATOMIC_OP
 
 
 int atomic_xchg(atomic_t *v, int new)
 int atomic_xchg(atomic_t *v, int new)

+ 6 - 0
arch/sparc/lib/atomic_64.S

@@ -47,6 +47,9 @@ ENDPROC(atomic_##op##_return);
 
 
 ATOMIC_OPS(add)
 ATOMIC_OPS(add)
 ATOMIC_OPS(sub)
 ATOMIC_OPS(sub)
+ATOMIC_OP(and)
+ATOMIC_OP(or)
+ATOMIC_OP(xor)
 
 
 #undef ATOMIC_OPS
 #undef ATOMIC_OPS
 #undef ATOMIC_OP_RETURN
 #undef ATOMIC_OP_RETURN
@@ -84,6 +87,9 @@ ENDPROC(atomic64_##op##_return);
 
 
 ATOMIC64_OPS(add)
 ATOMIC64_OPS(add)
 ATOMIC64_OPS(sub)
 ATOMIC64_OPS(sub)
+ATOMIC64_OP(and)
+ATOMIC64_OP(or)
+ATOMIC64_OP(xor)
 
 
 #undef ATOMIC64_OPS
 #undef ATOMIC64_OPS
 #undef ATOMIC64_OP_RETURN
 #undef ATOMIC64_OP_RETURN

+ 3 - 0
arch/sparc/lib/ksyms.c

@@ -111,6 +111,9 @@ EXPORT_SYMBOL(atomic64_##op##_return);
 
 
 ATOMIC_OPS(add)
 ATOMIC_OPS(add)
 ATOMIC_OPS(sub)
 ATOMIC_OPS(sub)
+ATOMIC_OP(and)
+ATOMIC_OP(or)
+ATOMIC_OP(xor)
 
 
 #undef ATOMIC_OPS
 #undef ATOMIC_OPS
 #undef ATOMIC_OP_RETURN
 #undef ATOMIC_OP_RETURN