Эх сурвалжийг харах

atomics/treewide: Make atomic64_inc_not_zero() optional

We define a trivial fallback for atomic_inc_not_zero(), but don't do
the same for atomic64_inc_not_zero(), leading most architectures to
define the same boilerplate.

Let's add a fallback in <linux/atomic.h>, and remove the redundant
implementations. Note that atomic64_add_unless() is always defined in
<linux/atomic.h>, and promotes its arguments to the requisite types, so
we need not do this explicitly.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Reviewed-by: Will Deacon <will.deacon@arm.com>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: Palmer Dabbelt <palmer@sifive.com>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: https://lore.kernel.org/lkml/20180621121321.4761-6-mark.rutland@arm.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Mark Rutland 7 жил өмнө
parent
commit
bef828204a

+ 0 - 2
arch/alpha/include/asm/atomic.h

@@ -296,8 +296,6 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
 	return old - 1;
 	return old - 1;
 }
 }
 
 
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
 
 

+ 0 - 1
arch/arc/include/asm/atomic.h

@@ -603,7 +603,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
 #define atomic64_dec(v)			atomic64_sub(1LL, (v))
 #define atomic64_dec(v)			atomic64_sub(1LL, (v))
 #define atomic64_dec_return(v)		atomic64_sub_return(1LL, (v))
 #define atomic64_dec_return(v)		atomic64_sub_return(1LL, (v))
 #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
 #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
-#define atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1LL, 0LL)
 
 
 #endif	/* !CONFIG_GENERIC_ATOMIC64 */
 #endif	/* !CONFIG_GENERIC_ATOMIC64 */
 
 

+ 0 - 1
arch/arm/include/asm/atomic.h

@@ -534,7 +534,6 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
 #define atomic64_dec(v)			atomic64_sub(1LL, (v))
 #define atomic64_dec(v)			atomic64_sub(1LL, (v))
 #define atomic64_dec_return_relaxed(v)	atomic64_sub_return_relaxed(1LL, (v))
 #define atomic64_dec_return_relaxed(v)	atomic64_sub_return_relaxed(1LL, (v))
 #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
 #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
-#define atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1LL, 0LL)
 
 
 #endif /* !CONFIG_GENERIC_ATOMIC64 */
 #endif /* !CONFIG_GENERIC_ATOMIC64 */
 #endif
 #endif

+ 0 - 2
arch/arm64/include/asm/atomic.h

@@ -204,7 +204,5 @@
 #define atomic64_add_unless(v, a, u)	(___atomic_add_unless(v, a, u, 64) != u)
 #define atomic64_add_unless(v, a, u)	(___atomic_add_unless(v, a, u, 64) != u)
 #define atomic64_andnot			atomic64_andnot
 #define atomic64_andnot			atomic64_andnot
 
 
-#define atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1, 0)
-
 #endif
 #endif
 #endif
 #endif

+ 0 - 2
arch/ia64/include/asm/atomic.h

@@ -246,8 +246,6 @@ static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
 	return c != (u);
 	return c != (u);
 }
 }
 
 
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
 {
 {
 	long c, old, dec;
 	long c, old, dec;

+ 0 - 2
arch/mips/include/asm/atomic.h

@@ -644,8 +644,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 	return c != (u);
 	return c != (u);
 }
 }
 
 
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
 
 

+ 0 - 2
arch/parisc/include/asm/atomic.h

@@ -305,8 +305,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 	return c != (u);
 	return c != (u);
 }
 }
 
 
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
 /*
 /*
  * atomic64_dec_if_positive - decrement by 1 if old value positive
  * atomic64_dec_if_positive - decrement by 1 if old value positive
  * @v: pointer of type atomic_t
  * @v: pointer of type atomic_t

+ 1 - 0
arch/powerpc/include/asm/atomic.h

@@ -582,6 +582,7 @@ static __inline__ int atomic64_inc_not_zero(atomic64_t *v)
 
 
 	return t1 != 0;
 	return t1 != 0;
 }
 }
+#define atomic64_inc_not_zero(v) atomic64_inc_not_zero((v))
 
 
 #endif /* __powerpc64__ */
 #endif /* __powerpc64__ */
 
 

+ 0 - 7
arch/riscv/include/asm/atomic.h

@@ -375,13 +375,6 @@ static __always_inline int atomic64_add_unless(atomic64_t *v, long a, long u)
 }
 }
 #endif
 #endif
 
 
-#ifndef CONFIG_GENERIC_ATOMIC64
-static __always_inline long atomic64_inc_not_zero(atomic64_t *v)
-{
-        return atomic64_add_unless(v, 1, 0);
-}
-#endif
-
 /*
 /*
  * atomic_{cmp,}xchg is required to have exactly the same ordering semantics as
  * atomic_{cmp,}xchg is required to have exactly the same ordering semantics as
  * {cmp,}xchg and the operations that return, so they need a full barrier.
  * {cmp,}xchg and the operations that return, so they need a full barrier.

+ 0 - 1
arch/s390/include/asm/atomic.h

@@ -212,6 +212,5 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
 #define atomic64_dec(_v)		atomic64_sub(1, _v)
 #define atomic64_dec(_v)		atomic64_sub(1, _v)
 #define atomic64_dec_return(_v)		atomic64_sub_return(1, _v)
 #define atomic64_dec_return(_v)		atomic64_sub_return(1, _v)
 #define atomic64_dec_and_test(_v)	(atomic64_sub_return(1, _v) == 0)
 #define atomic64_dec_and_test(_v)	(atomic64_sub_return(1, _v) == 0)
-#define atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1, 0)
 
 
 #endif /* __ARCH_S390_ATOMIC__  */
 #endif /* __ARCH_S390_ATOMIC__  */

+ 0 - 2
arch/sparc/include/asm/atomic_64.h

@@ -123,8 +123,6 @@ static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
 	return c != (u);
 	return c != (u);
 }
 }
 
 
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-
 long atomic64_dec_if_positive(atomic64_t *v);
 long atomic64_dec_if_positive(atomic64_t *v);
 
 
 #endif /* !(__ARCH_SPARC64_ATOMIC__) */
 #endif /* !(__ARCH_SPARC64_ATOMIC__) */

+ 1 - 1
arch/x86/include/asm/atomic64_32.h

@@ -295,7 +295,7 @@ static inline int arch_atomic64_add_unless(atomic64_t *v, long long a,
 	return (int)a;
 	return (int)a;
 }
 }
 
 
-
+#define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
 static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
 static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
 {
 {
 	int r;
 	int r;

+ 0 - 2
arch/x86/include/asm/atomic64_64.h

@@ -207,8 +207,6 @@ static inline bool arch_atomic64_add_unless(atomic64_t *v, long a, long u)
 	return true;
 	return true;
 }
 }
 
 
-#define arch_atomic64_inc_not_zero(v) arch_atomic64_add_unless((v), 1, 0)
-
 /*
 /*
  * arch_atomic64_dec_if_positive - decrement by 1 if old value positive
  * arch_atomic64_dec_if_positive - decrement by 1 if old value positive
  * @v: pointer of type atomic_t
  * @v: pointer of type atomic_t

+ 3 - 0
include/asm-generic/atomic-instrumented.h

@@ -205,11 +205,14 @@ static __always_inline s64 atomic64_dec_return(atomic64_t *v)
 	return arch_atomic64_dec_return(v);
 	return arch_atomic64_dec_return(v);
 }
 }
 
 
+#ifdef arch_atomic64_inc_not_zero
+#define atomic64_inc_not_zero atomic64_inc_not_zero
 static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
 static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
 {
 {
 	kasan_check_write(v, sizeof(*v));
 	kasan_check_write(v, sizeof(*v));
 	return arch_atomic64_inc_not_zero(v);
 	return arch_atomic64_inc_not_zero(v);
 }
 }
+#endif
 
 
 static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
 static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
 {
 {

+ 0 - 1
include/asm-generic/atomic64.h

@@ -63,6 +63,5 @@ extern bool	 atomic64_add_unless(atomic64_t *v, long long a, long long u);
 #define atomic64_dec(v)			atomic64_sub(1LL, (v))
 #define atomic64_dec(v)			atomic64_sub(1LL, (v))
 #define atomic64_dec_return(v)		atomic64_sub_return(1LL, (v))
 #define atomic64_dec_return(v)		atomic64_sub_return(1LL, (v))
 #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
 #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
-#define atomic64_inc_not_zero(v) 	atomic64_add_unless((v), 1LL, 0LL)
 
 
 #endif  /*  _ASM_GENERIC_ATOMIC64_H  */
 #endif  /*  _ASM_GENERIC_ATOMIC64_H  */

+ 11 - 0
include/linux/atomic.h

@@ -1019,6 +1019,17 @@ static inline int atomic_dec_if_positive(atomic_t *v)
 #define atomic64_try_cmpxchg_release	atomic64_try_cmpxchg
 #define atomic64_try_cmpxchg_release	atomic64_try_cmpxchg
 #endif /* atomic64_try_cmpxchg */
 #endif /* atomic64_try_cmpxchg */
 
 
+/**
+ * atomic64_inc_not_zero - increment unless the number is zero
+ * @v: pointer of type atomic64_t
+ *
+ * Atomically increments @v by 1, if @v is non-zero.
+ * Returns true if the increment was done.
+ */
+#ifndef atomic64_inc_not_zero
+#define atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1, 0)
+#endif
+
 #ifndef atomic64_andnot
 #ifndef atomic64_andnot
 static inline void atomic64_andnot(long long i, atomic64_t *v)
 static inline void atomic64_andnot(long long i, atomic64_t *v)
 {
 {