atomic.h 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_X86_ATOMIC_H
  3. #define _ASM_X86_ATOMIC_H
  4. #include <linux/compiler.h>
  5. #include <linux/types.h>
  6. #include <asm/alternative.h>
  7. #include <asm/cmpxchg.h>
  8. #include <asm/rmwcc.h>
  9. #include <asm/barrier.h>
  10. /*
  11. * Atomic operations that C can't guarantee us. Useful for
  12. * resource counting etc..
  13. */
  14. #define ATOMIC_INIT(i) { (i) }
  15. /**
  16. * arch_atomic_read - read atomic variable
  17. * @v: pointer of type atomic_t
  18. *
  19. * Atomically reads the value of @v.
  20. */
  21. static __always_inline int arch_atomic_read(const atomic_t *v)
  22. {
  23. /*
  24. * Note for KASAN: we deliberately don't use READ_ONCE_NOCHECK() here,
  25. * it's non-inlined function that increases binary size and stack usage.
  26. */
  27. return READ_ONCE((v)->counter);
  28. }
  29. /**
  30. * arch_atomic_set - set atomic variable
  31. * @v: pointer of type atomic_t
  32. * @i: required value
  33. *
  34. * Atomically sets the value of @v to @i.
  35. */
  36. static __always_inline void arch_atomic_set(atomic_t *v, int i)
  37. {
  38. WRITE_ONCE(v->counter, i);
  39. }
  40. /**
  41. * arch_atomic_add - add integer to atomic variable
  42. * @i: integer value to add
  43. * @v: pointer of type atomic_t
  44. *
  45. * Atomically adds @i to @v.
  46. */
  47. static __always_inline void arch_atomic_add(int i, atomic_t *v)
  48. {
  49. asm volatile(LOCK_PREFIX "addl %1,%0"
  50. : "+m" (v->counter)
  51. : "ir" (i));
  52. }
  53. /**
  54. * arch_atomic_sub - subtract integer from atomic variable
  55. * @i: integer value to subtract
  56. * @v: pointer of type atomic_t
  57. *
  58. * Atomically subtracts @i from @v.
  59. */
  60. static __always_inline void arch_atomic_sub(int i, atomic_t *v)
  61. {
  62. asm volatile(LOCK_PREFIX "subl %1,%0"
  63. : "+m" (v->counter)
  64. : "ir" (i));
  65. }
  66. /**
  67. * arch_atomic_sub_and_test - subtract value from variable and test result
  68. * @i: integer value to subtract
  69. * @v: pointer of type atomic_t
  70. *
  71. * Atomically subtracts @i from @v and returns
  72. * true if the result is zero, or false for all
  73. * other cases.
  74. */
  75. static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
  76. {
  77. GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
  78. }
  79. #define arch_atomic_sub_and_test arch_atomic_sub_and_test
  80. /**
  81. * arch_atomic_inc - increment atomic variable
  82. * @v: pointer of type atomic_t
  83. *
  84. * Atomically increments @v by 1.
  85. */
  86. static __always_inline void arch_atomic_inc(atomic_t *v)
  87. {
  88. asm volatile(LOCK_PREFIX "incl %0"
  89. : "+m" (v->counter));
  90. }
  91. #define arch_atomic_inc arch_atomic_inc
  92. /**
  93. * arch_atomic_dec - decrement atomic variable
  94. * @v: pointer of type atomic_t
  95. *
  96. * Atomically decrements @v by 1.
  97. */
  98. static __always_inline void arch_atomic_dec(atomic_t *v)
  99. {
  100. asm volatile(LOCK_PREFIX "decl %0"
  101. : "+m" (v->counter));
  102. }
  103. #define arch_atomic_dec arch_atomic_dec
  104. /**
  105. * arch_atomic_dec_and_test - decrement and test
  106. * @v: pointer of type atomic_t
  107. *
  108. * Atomically decrements @v by 1 and
  109. * returns true if the result is 0, or false for all other
  110. * cases.
  111. */
  112. static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
  113. {
  114. GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e);
  115. }
  116. #define arch_atomic_dec_and_test arch_atomic_dec_and_test
  117. /**
  118. * arch_atomic_inc_and_test - increment and test
  119. * @v: pointer of type atomic_t
  120. *
  121. * Atomically increments @v by 1
  122. * and returns true if the result is zero, or false for all
  123. * other cases.
  124. */
  125. static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
  126. {
  127. GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e);
  128. }
  129. #define arch_atomic_inc_and_test arch_atomic_inc_and_test
  130. /**
  131. * arch_atomic_add_negative - add and test if negative
  132. * @i: integer value to add
  133. * @v: pointer of type atomic_t
  134. *
  135. * Atomically adds @i to @v and returns true
  136. * if the result is negative, or false when
  137. * result is greater than or equal to zero.
  138. */
  139. static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v)
  140. {
  141. GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s);
  142. }
  143. #define arch_atomic_add_negative arch_atomic_add_negative
  144. /**
  145. * arch_atomic_add_return - add integer and return
  146. * @i: integer value to add
  147. * @v: pointer of type atomic_t
  148. *
  149. * Atomically adds @i to @v and returns @i + @v
  150. */
  151. static __always_inline int arch_atomic_add_return(int i, atomic_t *v)
  152. {
  153. return i + xadd(&v->counter, i);
  154. }
  155. /**
  156. * arch_atomic_sub_return - subtract integer and return
  157. * @v: pointer of type atomic_t
  158. * @i: integer value to subtract
  159. *
  160. * Atomically subtracts @i from @v and returns @v - @i
  161. */
  162. static __always_inline int arch_atomic_sub_return(int i, atomic_t *v)
  163. {
  164. return arch_atomic_add_return(-i, v);
  165. }
  166. static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
  167. {
  168. return xadd(&v->counter, i);
  169. }
  170. static __always_inline int arch_atomic_fetch_sub(int i, atomic_t *v)
  171. {
  172. return xadd(&v->counter, -i);
  173. }
  174. static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
  175. {
  176. return arch_cmpxchg(&v->counter, old, new);
  177. }
  178. #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
  179. static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
  180. {
  181. return try_cmpxchg(&v->counter, old, new);
  182. }
  183. static inline int arch_atomic_xchg(atomic_t *v, int new)
  184. {
  185. return arch_xchg(&v->counter, new);
  186. }
  187. static inline void arch_atomic_and(int i, atomic_t *v)
  188. {
  189. asm volatile(LOCK_PREFIX "andl %1,%0"
  190. : "+m" (v->counter)
  191. : "ir" (i)
  192. : "memory");
  193. }
  194. static inline int arch_atomic_fetch_and(int i, atomic_t *v)
  195. {
  196. int val = arch_atomic_read(v);
  197. do { } while (!arch_atomic_try_cmpxchg(v, &val, val & i));
  198. return val;
  199. }
  200. static inline void arch_atomic_or(int i, atomic_t *v)
  201. {
  202. asm volatile(LOCK_PREFIX "orl %1,%0"
  203. : "+m" (v->counter)
  204. : "ir" (i)
  205. : "memory");
  206. }
  207. static inline int arch_atomic_fetch_or(int i, atomic_t *v)
  208. {
  209. int val = arch_atomic_read(v);
  210. do { } while (!arch_atomic_try_cmpxchg(v, &val, val | i));
  211. return val;
  212. }
  213. static inline void arch_atomic_xor(int i, atomic_t *v)
  214. {
  215. asm volatile(LOCK_PREFIX "xorl %1,%0"
  216. : "+m" (v->counter)
  217. : "ir" (i)
  218. : "memory");
  219. }
  220. static inline int arch_atomic_fetch_xor(int i, atomic_t *v)
  221. {
  222. int val = arch_atomic_read(v);
  223. do { } while (!arch_atomic_try_cmpxchg(v, &val, val ^ i));
  224. return val;
  225. }
  226. #ifdef CONFIG_X86_32
  227. # include <asm/atomic64_32.h>
  228. #else
  229. # include <asm/atomic64_64.h>
  230. #endif
  231. #include <asm-generic/atomic-instrumented.h>
  232. #endif /* _ASM_X86_ATOMIC_H */