|
@@ -560,7 +560,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
|
|
|
* Atomically increments @v by 1, so long as @v is non-zero.
|
|
|
* Returns non-zero if @v was non-zero, and zero otherwise.
|
|
|
*/
|
|
|
-static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
|
|
|
+static __inline__ int atomic64_inc_not_zero(atomic64_t *v)
|
|
|
{
|
|
|
long t1, t2;
|
|
|
|
|
@@ -579,7 +579,7 @@ static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
|
|
|
: "r" (&v->counter)
|
|
|
: "cc", "xer", "memory");
|
|
|
|
|
|
- return t1;
|
|
|
+ return t1 != 0;
|
|
|
}
|
|
|
|
|
|
#endif /* __powerpc64__ */
|