|
@@ -1,9 +1,6 @@
|
|
#ifndef __ASM_SH_CMPXCHG_LLSC_H
|
|
#ifndef __ASM_SH_CMPXCHG_LLSC_H
|
|
#define __ASM_SH_CMPXCHG_LLSC_H
|
|
#define __ASM_SH_CMPXCHG_LLSC_H
|
|
|
|
|
|
-#include <linux/bitops.h>
|
|
|
|
-#include <asm/byteorder.h>
|
|
|
|
-
|
|
|
|
static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
|
|
static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
|
|
{
|
|
{
|
|
unsigned long retval;
|
|
unsigned long retval;
|
|
@@ -50,36 +47,6 @@ __cmpxchg_u32(volatile u32 *m, unsigned long old, unsigned long new)
|
|
return retval;
|
|
return retval;
|
|
}
|
|
}
|
|
|
|
|
|
-static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
|
|
|
|
-{
|
|
|
|
- int off = (unsigned long)ptr % sizeof(u32);
|
|
|
|
- volatile u32 *p = ptr - off;
|
|
|
|
-#ifdef __BIG_ENDIAN
|
|
|
|
- int bitoff = (sizeof(u32) - 1 - off) * BITS_PER_BYTE;
|
|
|
|
-#else
|
|
|
|
- int bitoff = off * BITS_PER_BYTE;
|
|
|
|
-#endif
|
|
|
|
- u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
|
|
|
|
- u32 oldv, newv;
|
|
|
|
- u32 ret;
|
|
|
|
-
|
|
|
|
- do {
|
|
|
|
- oldv = READ_ONCE(*p);
|
|
|
|
- ret = (oldv & bitmask) >> bitoff;
|
|
|
|
- newv = (oldv & ~bitmask) | (x << bitoff);
|
|
|
|
- } while (__cmpxchg_u32(p, oldv, newv) != oldv);
|
|
|
|
-
|
|
|
|
- return ret;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
|
|
|
|
-{
|
|
|
|
- return __xchg_cmpxchg(m, val, sizeof *m);
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
|
|
|
|
-{
|
|
|
|
- return __xchg_cmpxchg(m, val, sizeof *m);
|
|
|
|
-}
|
|
|
|
|
|
+#include <asm/cmpxchg-xchg.h>
|
|
|
|
|
|
#endif /* __ASM_SH_CMPXCHG_LLSC_H */
|
|
#endif /* __ASM_SH_CMPXCHG_LLSC_H */
|