|
@@ -87,6 +87,33 @@ __cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)
|
|
|
return new;
|
|
|
}
|
|
|
|
|
|
+/*
|
|
|
+ * Use 4 byte cas instruction to achieve 1 byte cmpxchg. Main logic
|
|
|
+ * here is to get the bit shift of the byte we are interested in.
|
|
|
+ * The XOR is handy for reversing the bits for big-endian byte order
|
|
|
+ */
|
|
|
+static inline unsigned long
|
|
|
+__cmpxchg_u8(volatile unsigned char *m, unsigned char old, unsigned char new)
|
|
|
+{
|
|
|
+ unsigned long maddr = (unsigned long)m;
|
|
|
+ int bit_shift = (((unsigned long)m & 3) ^ 3) << 3;
|
|
|
+ unsigned int mask = 0xff << bit_shift;
|
|
|
+ unsigned int *ptr = (unsigned int *) (maddr & ~3);
|
|
|
+ unsigned int old32, new32, load;
|
|
|
+ unsigned int load32 = *ptr;
|
|
|
+
|
|
|
+ do {
|
|
|
+ new32 = (load32 & ~mask) | (new << bit_shift);
|
|
|
+ old32 = (load32 & ~mask) | (old << bit_shift);
|
|
|
+ load32 = __cmpxchg_u32(ptr, old32, new32);
|
|
|
+ if (load32 == old32)
|
|
|
+ return old;
|
|
|
+ load = (load32 & mask) >> bit_shift;
|
|
|
+ } while (load == old);
|
|
|
+
|
|
|
+ return load;
|
|
|
+}
|
|
|
+
|
|
|
/* This function doesn't exist, so you'll get a linker error
|
|
|
if something tries to do an invalid cmpxchg(). */
|
|
|
void __cmpxchg_called_with_bad_pointer(void);
|
|
@@ -95,6 +122,8 @@ static inline unsigned long
|
|
|
__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
|
|
|
{
|
|
|
switch (size) {
|
|
|
+ case 1:
|
|
|
+ return __cmpxchg_u8(ptr, old, new);
|
|
|
case 4:
|
|
|
return __cmpxchg_u32(ptr, old, new);
|
|
|
case 8:
|