atomic64_32.h 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_X86_ATOMIC64_32_H
  3. #define _ASM_X86_ATOMIC64_32_H
  4. #include <linux/compiler.h>
  5. #include <linux/types.h>
  6. //#include <asm/cmpxchg.h>
  7. /* An 64bit atomic type */
  8. typedef struct {
  9. u64 __aligned(8) counter;
  10. } atomic64_t;
  11. #define ATOMIC64_INIT(val) { (val) }
  12. #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
  13. #ifndef ATOMIC64_EXPORT
  14. #define ATOMIC64_DECL_ONE __ATOMIC64_DECL
  15. #else
  16. #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
  17. ATOMIC64_EXPORT(atomic64_##sym)
  18. #endif
  19. #ifdef CONFIG_X86_CMPXCHG64
  20. #define __alternative_atomic64(f, g, out, in...) \
  21. asm volatile("call %P[func]" \
  22. : out : [func] "i" (atomic64_##g##_cx8), ## in)
  23. #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
  24. #else
  25. #define __alternative_atomic64(f, g, out, in...) \
  26. alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
  27. X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
  28. #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
  29. ATOMIC64_DECL_ONE(sym##_386)
  30. ATOMIC64_DECL_ONE(add_386);
  31. ATOMIC64_DECL_ONE(sub_386);
  32. ATOMIC64_DECL_ONE(inc_386);
  33. ATOMIC64_DECL_ONE(dec_386);
  34. #endif
  35. #define alternative_atomic64(f, out, in...) \
  36. __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
  37. ATOMIC64_DECL(read);
  38. ATOMIC64_DECL(set);
  39. ATOMIC64_DECL(xchg);
  40. ATOMIC64_DECL(add_return);
  41. ATOMIC64_DECL(sub_return);
  42. ATOMIC64_DECL(inc_return);
  43. ATOMIC64_DECL(dec_return);
  44. ATOMIC64_DECL(dec_if_positive);
  45. ATOMIC64_DECL(inc_not_zero);
  46. ATOMIC64_DECL(add_unless);
  47. #undef ATOMIC64_DECL
  48. #undef ATOMIC64_DECL_ONE
  49. #undef __ATOMIC64_DECL
  50. #undef ATOMIC64_EXPORT
  51. /**
  52. * arch_atomic64_cmpxchg - cmpxchg atomic64 variable
  53. * @v: pointer to type atomic64_t
  54. * @o: expected value
  55. * @n: new value
  56. *
  57. * Atomically sets @v to @n if it was equal to @o and returns
  58. * the old value.
  59. */
  60. static inline long long arch_atomic64_cmpxchg(atomic64_t *v, long long o,
  61. long long n)
  62. {
  63. return arch_cmpxchg64(&v->counter, o, n);
  64. }
  65. /**
  66. * arch_atomic64_xchg - xchg atomic64 variable
  67. * @v: pointer to type atomic64_t
  68. * @n: value to assign
  69. *
  70. * Atomically xchgs the value of @v to @n and returns
  71. * the old value.
  72. */
  73. static inline long long arch_atomic64_xchg(atomic64_t *v, long long n)
  74. {
  75. long long o;
  76. unsigned high = (unsigned)(n >> 32);
  77. unsigned low = (unsigned)n;
  78. alternative_atomic64(xchg, "=&A" (o),
  79. "S" (v), "b" (low), "c" (high)
  80. : "memory");
  81. return o;
  82. }
  83. /**
  84. * arch_atomic64_set - set atomic64 variable
  85. * @v: pointer to type atomic64_t
  86. * @i: value to assign
  87. *
  88. * Atomically sets the value of @v to @n.
  89. */
  90. static inline void arch_atomic64_set(atomic64_t *v, long long i)
  91. {
  92. unsigned high = (unsigned)(i >> 32);
  93. unsigned low = (unsigned)i;
  94. alternative_atomic64(set, /* no output */,
  95. "S" (v), "b" (low), "c" (high)
  96. : "eax", "edx", "memory");
  97. }
  98. /**
  99. * arch_atomic64_read - read atomic64 variable
  100. * @v: pointer to type atomic64_t
  101. *
  102. * Atomically reads the value of @v and returns it.
  103. */
  104. static inline long long arch_atomic64_read(const atomic64_t *v)
  105. {
  106. long long r;
  107. alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
  108. return r;
  109. }
  110. /**
  111. * arch_atomic64_add_return - add and return
  112. * @i: integer value to add
  113. * @v: pointer to type atomic64_t
  114. *
  115. * Atomically adds @i to @v and returns @i + *@v
  116. */
  117. static inline long long arch_atomic64_add_return(long long i, atomic64_t *v)
  118. {
  119. alternative_atomic64(add_return,
  120. ASM_OUTPUT2("+A" (i), "+c" (v)),
  121. ASM_NO_INPUT_CLOBBER("memory"));
  122. return i;
  123. }
  124. /*
  125. * Other variants with different arithmetic operators:
  126. */
  127. static inline long long arch_atomic64_sub_return(long long i, atomic64_t *v)
  128. {
  129. alternative_atomic64(sub_return,
  130. ASM_OUTPUT2("+A" (i), "+c" (v)),
  131. ASM_NO_INPUT_CLOBBER("memory"));
  132. return i;
  133. }
  134. static inline long long arch_atomic64_inc_return(atomic64_t *v)
  135. {
  136. long long a;
  137. alternative_atomic64(inc_return, "=&A" (a),
  138. "S" (v) : "memory", "ecx");
  139. return a;
  140. }
  141. #define arch_atomic64_inc_return arch_atomic64_inc_return
  142. static inline long long arch_atomic64_dec_return(atomic64_t *v)
  143. {
  144. long long a;
  145. alternative_atomic64(dec_return, "=&A" (a),
  146. "S" (v) : "memory", "ecx");
  147. return a;
  148. }
  149. #define arch_atomic64_dec_return arch_atomic64_dec_return
  150. /**
  151. * arch_atomic64_add - add integer to atomic64 variable
  152. * @i: integer value to add
  153. * @v: pointer to type atomic64_t
  154. *
  155. * Atomically adds @i to @v.
  156. */
  157. static inline long long arch_atomic64_add(long long i, atomic64_t *v)
  158. {
  159. __alternative_atomic64(add, add_return,
  160. ASM_OUTPUT2("+A" (i), "+c" (v)),
  161. ASM_NO_INPUT_CLOBBER("memory"));
  162. return i;
  163. }
  164. /**
  165. * arch_atomic64_sub - subtract the atomic64 variable
  166. * @i: integer value to subtract
  167. * @v: pointer to type atomic64_t
  168. *
  169. * Atomically subtracts @i from @v.
  170. */
  171. static inline long long arch_atomic64_sub(long long i, atomic64_t *v)
  172. {
  173. __alternative_atomic64(sub, sub_return,
  174. ASM_OUTPUT2("+A" (i), "+c" (v)),
  175. ASM_NO_INPUT_CLOBBER("memory"));
  176. return i;
  177. }
  178. /**
  179. * arch_atomic64_inc - increment atomic64 variable
  180. * @v: pointer to type atomic64_t
  181. *
  182. * Atomically increments @v by 1.
  183. */
  184. static inline void arch_atomic64_inc(atomic64_t *v)
  185. {
  186. __alternative_atomic64(inc, inc_return, /* no output */,
  187. "S" (v) : "memory", "eax", "ecx", "edx");
  188. }
  189. #define arch_atomic64_inc arch_atomic64_inc
  190. /**
  191. * arch_atomic64_dec - decrement atomic64 variable
  192. * @v: pointer to type atomic64_t
  193. *
  194. * Atomically decrements @v by 1.
  195. */
  196. static inline void arch_atomic64_dec(atomic64_t *v)
  197. {
  198. __alternative_atomic64(dec, dec_return, /* no output */,
  199. "S" (v) : "memory", "eax", "ecx", "edx");
  200. }
  201. #define arch_atomic64_dec arch_atomic64_dec
  202. /**
  203. * arch_atomic64_add_unless - add unless the number is a given value
  204. * @v: pointer of type atomic64_t
  205. * @a: the amount to add to v...
  206. * @u: ...unless v is equal to u.
  207. *
  208. * Atomically adds @a to @v, so long as it was not @u.
  209. * Returns non-zero if the add was done, zero otherwise.
  210. */
  211. static inline int arch_atomic64_add_unless(atomic64_t *v, long long a,
  212. long long u)
  213. {
  214. unsigned low = (unsigned)u;
  215. unsigned high = (unsigned)(u >> 32);
  216. alternative_atomic64(add_unless,
  217. ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
  218. "S" (v) : "memory");
  219. return (int)a;
  220. }
  221. static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
  222. {
  223. int r;
  224. alternative_atomic64(inc_not_zero, "=&a" (r),
  225. "S" (v) : "ecx", "edx", "memory");
  226. return r;
  227. }
  228. #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
  229. static inline long long arch_atomic64_dec_if_positive(atomic64_t *v)
  230. {
  231. long long r;
  232. alternative_atomic64(dec_if_positive, "=&A" (r),
  233. "S" (v) : "ecx", "memory");
  234. return r;
  235. }
  236. #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
  237. #undef alternative_atomic64
  238. #undef __alternative_atomic64
  239. static inline void arch_atomic64_and(long long i, atomic64_t *v)
  240. {
  241. long long old, c = 0;
  242. while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
  243. c = old;
  244. }
  245. static inline long long arch_atomic64_fetch_and(long long i, atomic64_t *v)
  246. {
  247. long long old, c = 0;
  248. while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
  249. c = old;
  250. return old;
  251. }
  252. static inline void arch_atomic64_or(long long i, atomic64_t *v)
  253. {
  254. long long old, c = 0;
  255. while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
  256. c = old;
  257. }
  258. static inline long long arch_atomic64_fetch_or(long long i, atomic64_t *v)
  259. {
  260. long long old, c = 0;
  261. while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
  262. c = old;
  263. return old;
  264. }
  265. static inline void arch_atomic64_xor(long long i, atomic64_t *v)
  266. {
  267. long long old, c = 0;
  268. while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
  269. c = old;
  270. }
  271. static inline long long arch_atomic64_fetch_xor(long long i, atomic64_t *v)
  272. {
  273. long long old, c = 0;
  274. while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
  275. c = old;
  276. return old;
  277. }
  278. static inline long long arch_atomic64_fetch_add(long long i, atomic64_t *v)
  279. {
  280. long long old, c = 0;
  281. while ((old = arch_atomic64_cmpxchg(v, c, c + i)) != c)
  282. c = old;
  283. return old;
  284. }
  285. #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), (v))
  286. #endif /* _ASM_X86_ATOMIC64_32_H */