|
@@ -89,6 +89,12 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
|
|
ATOMIC_OPS(add)
|
|
ATOMIC_OPS(add)
|
|
ATOMIC_OPS(sub)
|
|
ATOMIC_OPS(sub)
|
|
|
|
|
|
|
|
+#define CONFIG_ARCH_HAS_ATOMIC_OR
|
|
|
|
+
|
|
|
|
+ATOMIC_OP(and)
|
|
|
|
+ATOMIC_OP(or)
|
|
|
|
+ATOMIC_OP(xor)
|
|
|
|
+
|
|
#undef ATOMIC_OPS
|
|
#undef ATOMIC_OPS
|
|
#undef ATOMIC_OP_RETURN
|
|
#undef ATOMIC_OP_RETURN
|
|
#undef ATOMIC_OP
|
|
#undef ATOMIC_OP
|
|
@@ -134,31 +140,9 @@ static inline void atomic_dec(atomic_t *v)
|
|
*
|
|
*
|
|
* Atomically clears the bits set in mask from the memory word specified.
|
|
* Atomically clears the bits set in mask from the memory word specified.
|
|
*/
|
|
*/
|
|
-static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
|
|
|
|
|
|
+static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
|
|
{
|
|
{
|
|
-#ifdef CONFIG_SMP
|
|
|
|
- int status;
|
|
|
|
-
|
|
|
|
- asm volatile(
|
|
|
|
- "1: mov %3,(_AAR,%2) \n"
|
|
|
|
- " mov (_ADR,%2),%0 \n"
|
|
|
|
- " and %4,%0 \n"
|
|
|
|
- " mov %0,(_ADR,%2) \n"
|
|
|
|
- " mov (_ADR,%2),%0 \n" /* flush */
|
|
|
|
- " mov (_ASR,%2),%0 \n"
|
|
|
|
- " or %0,%0 \n"
|
|
|
|
- " bne 1b \n"
|
|
|
|
- : "=&r"(status), "=m"(*addr)
|
|
|
|
- : "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(~mask)
|
|
|
|
- : "memory", "cc");
|
|
|
|
-#else
|
|
|
|
- unsigned long flags;
|
|
|
|
-
|
|
|
|
- mask = ~mask;
|
|
|
|
- flags = arch_local_cli_save();
|
|
|
|
- *addr &= mask;
|
|
|
|
- arch_local_irq_restore(flags);
|
|
|
|
-#endif
|
|
|
|
|
|
+ atomic_and(~mask, v);
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
@@ -168,30 +152,9 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
|
|
*
|
|
*
|
|
* Atomically sets the bits set in mask from the memory word specified.
|
|
* Atomically sets the bits set in mask from the memory word specified.
|
|
*/
|
|
*/
|
|
-static inline void atomic_set_mask(unsigned long mask, unsigned long *addr)
|
|
|
|
|
|
+static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
|
|
{
|
|
{
|
|
-#ifdef CONFIG_SMP
|
|
|
|
- int status;
|
|
|
|
-
|
|
|
|
- asm volatile(
|
|
|
|
- "1: mov %3,(_AAR,%2) \n"
|
|
|
|
- " mov (_ADR,%2),%0 \n"
|
|
|
|
- " or %4,%0 \n"
|
|
|
|
- " mov %0,(_ADR,%2) \n"
|
|
|
|
- " mov (_ADR,%2),%0 \n" /* flush */
|
|
|
|
- " mov (_ASR,%2),%0 \n"
|
|
|
|
- " or %0,%0 \n"
|
|
|
|
- " bne 1b \n"
|
|
|
|
- : "=&r"(status), "=m"(*addr)
|
|
|
|
- : "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(mask)
|
|
|
|
- : "memory", "cc");
|
|
|
|
-#else
|
|
|
|
- unsigned long flags;
|
|
|
|
-
|
|
|
|
- flags = arch_local_cli_save();
|
|
|
|
- *addr |= mask;
|
|
|
|
- arch_local_irq_restore(flags);
|
|
|
|
-#endif
|
|
|
|
|
|
+ atomic_or(mask, v);
|
|
}
|
|
}
|
|
|
|
|
|
#endif /* __KERNEL__ */
|
|
#endif /* __KERNEL__ */
|