atomic.h 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216
  1. /*
  2. * Copyright IBM Corp. 1999, 2016
  3. * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  4. * Denis Joseph Barrow,
  5. * Arnd Bergmann,
  6. */
  7. #ifndef __ARCH_S390_ATOMIC__
  8. #define __ARCH_S390_ATOMIC__
  9. #include <linux/compiler.h>
  10. #include <linux/types.h>
  11. #include <asm/atomic_ops.h>
  12. #include <asm/barrier.h>
  13. #include <asm/cmpxchg.h>
  14. #define ATOMIC_INIT(i) { (i) }
  15. static inline int atomic_read(const atomic_t *v)
  16. {
  17. int c;
  18. asm volatile(
  19. " l %0,%1\n"
  20. : "=d" (c) : "Q" (v->counter));
  21. return c;
  22. }
  23. static inline void atomic_set(atomic_t *v, int i)
  24. {
  25. asm volatile(
  26. " st %1,%0\n"
  27. : "=Q" (v->counter) : "d" (i));
  28. }
  29. static inline int atomic_add_return(int i, atomic_t *v)
  30. {
  31. return __atomic_add_barrier(i, &v->counter) + i;
  32. }
  33. static inline int atomic_fetch_add(int i, atomic_t *v)
  34. {
  35. return __atomic_add_barrier(i, &v->counter);
  36. }
  37. static inline void atomic_add(int i, atomic_t *v)
  38. {
  39. #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
  40. if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
  41. __atomic_add_const(i, &v->counter);
  42. return;
  43. }
  44. #endif
  45. __atomic_add(i, &v->counter);
  46. }
  47. #define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
  48. #define atomic_inc(_v) atomic_add(1, _v)
  49. #define atomic_inc_return(_v) atomic_add_return(1, _v)
  50. #define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
  51. #define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
  52. #define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
  53. #define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v)
  54. #define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
  55. #define atomic_dec(_v) atomic_sub(1, _v)
  56. #define atomic_dec_return(_v) atomic_sub_return(1, _v)
  57. #define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
  58. #define ATOMIC_OPS(op) \
  59. static inline void atomic_##op(int i, atomic_t *v) \
  60. { \
  61. __atomic_##op(i, &v->counter); \
  62. } \
  63. static inline int atomic_fetch_##op(int i, atomic_t *v) \
  64. { \
  65. return __atomic_##op##_barrier(i, &v->counter); \
  66. }
  67. ATOMIC_OPS(and)
  68. ATOMIC_OPS(or)
  69. ATOMIC_OPS(xor)
  70. #undef ATOMIC_OPS
  71. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  72. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  73. {
  74. return __atomic_cmpxchg(&v->counter, old, new);
  75. }
  76. static inline int __atomic_add_unless(atomic_t *v, int a, int u)
  77. {
  78. int c, old;
  79. c = atomic_read(v);
  80. for (;;) {
  81. if (unlikely(c == u))
  82. break;
  83. old = atomic_cmpxchg(v, c, c + a);
  84. if (likely(old == c))
  85. break;
  86. c = old;
  87. }
  88. return c;
  89. }
  90. #define ATOMIC64_INIT(i) { (i) }
  91. static inline long atomic64_read(const atomic64_t *v)
  92. {
  93. long c;
  94. asm volatile(
  95. " lg %0,%1\n"
  96. : "=d" (c) : "Q" (v->counter));
  97. return c;
  98. }
  99. static inline void atomic64_set(atomic64_t *v, long i)
  100. {
  101. asm volatile(
  102. " stg %1,%0\n"
  103. : "=Q" (v->counter) : "d" (i));
  104. }
  105. static inline long atomic64_add_return(long i, atomic64_t *v)
  106. {
  107. return __atomic64_add_barrier(i, &v->counter) + i;
  108. }
  109. static inline long atomic64_fetch_add(long i, atomic64_t *v)
  110. {
  111. return __atomic64_add_barrier(i, &v->counter);
  112. }
  113. static inline void atomic64_add(long i, atomic64_t *v)
  114. {
  115. #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
  116. if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
  117. __atomic64_add_const(i, &v->counter);
  118. return;
  119. }
  120. #endif
  121. __atomic64_add(i, &v->counter);
  122. }
  123. #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  124. static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
  125. {
  126. return __atomic64_cmpxchg(&v->counter, old, new);
  127. }
  128. #define ATOMIC64_OPS(op) \
  129. static inline void atomic64_##op(long i, atomic64_t *v) \
  130. { \
  131. __atomic64_##op(i, &v->counter); \
  132. } \
  133. static inline long atomic64_fetch_##op(long i, atomic64_t *v) \
  134. { \
  135. return __atomic64_##op##_barrier(i, &v->counter); \
  136. }
  137. ATOMIC64_OPS(and)
  138. ATOMIC64_OPS(or)
  139. ATOMIC64_OPS(xor)
  140. #undef ATOMIC64_OPS
  141. static inline int atomic64_add_unless(atomic64_t *v, long i, long u)
  142. {
  143. long c, old;
  144. c = atomic64_read(v);
  145. for (;;) {
  146. if (unlikely(c == u))
  147. break;
  148. old = atomic64_cmpxchg(v, c, c + i);
  149. if (likely(old == c))
  150. break;
  151. c = old;
  152. }
  153. return c != u;
  154. }
  155. static inline long atomic64_dec_if_positive(atomic64_t *v)
  156. {
  157. long c, old, dec;
  158. c = atomic64_read(v);
  159. for (;;) {
  160. dec = c - 1;
  161. if (unlikely(dec < 0))
  162. break;
  163. old = atomic64_cmpxchg((v), c, dec);
  164. if (likely(old == c))
  165. break;
  166. c = old;
  167. }
  168. return dec;
  169. }
  170. #define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
  171. #define atomic64_inc(_v) atomic64_add(1, _v)
  172. #define atomic64_inc_return(_v) atomic64_add_return(1, _v)
  173. #define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
  174. #define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v)
  175. #define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v)
  176. #define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v)
  177. #define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
  178. #define atomic64_dec(_v) atomic64_sub(1, _v)
  179. #define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
  180. #define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
  181. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  182. #endif /* __ARCH_S390_ATOMIC__ */