|
@@ -28,8 +28,6 @@
|
|
|
#include <asm/synch.h>
|
|
#include <asm/synch.h>
|
|
|
#include <asm/ppc-opcode.h>
|
|
#include <asm/ppc-opcode.h>
|
|
|
|
|
|
|
|
-#define arch_spin_is_locked(x) ((x)->slock != 0)
|
|
|
|
|
-
|
|
|
|
|
#ifdef CONFIG_PPC64
|
|
#ifdef CONFIG_PPC64
|
|
|
/* use 0x800000yy when locked, where yy == CPU number */
|
|
/* use 0x800000yy when locked, where yy == CPU number */
|
|
|
#ifdef __BIG_ENDIAN__
|
|
#ifdef __BIG_ENDIAN__
|
|
@@ -59,6 +57,11 @@ static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock)
|
|
|
return lock.slock == 0;
|
|
return lock.slock == 0;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
+static inline int arch_spin_is_locked(arch_spinlock_t *lock)
|
|
|
|
|
+{
|
|
|
|
|
+ return !arch_spin_value_unlocked(*lock);
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
/*
|
|
/*
|
|
|
* This returns the old value in the lock, so we succeeded
|
|
* This returns the old value in the lock, so we succeeded
|
|
|
* in getting the lock if the return value is 0.
|
|
* in getting the lock if the return value is 0.
|