atomic.h 30 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /* Atomic operations usable in machine independent code */
  3. #ifndef _LINUX_ATOMIC_H
  4. #define _LINUX_ATOMIC_H
  5. #include <asm/atomic.h>
  6. #include <asm/barrier.h>
  7. /*
  8. * Relaxed variants of xchg, cmpxchg and some atomic operations.
  9. *
  10. * We support four variants:
  11. *
  12. * - Fully ordered: The default implementation, no suffix required.
  13. * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
  14. * - Release: Provides RELEASE semantics, _release suffix.
  15. * - Relaxed: No ordering guarantees, _relaxed suffix.
  16. *
  17. * For compound atomics performing both a load and a store, ACQUIRE
  18. * semantics apply only to the load and RELEASE semantics only to the
  19. * store portion of the operation. Note that a failed cmpxchg_acquire
  20. * does -not- imply any memory ordering constraints.
  21. *
  22. * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
  23. */
  24. #ifndef atomic_read_acquire
  25. #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
  26. #endif
  27. #ifndef atomic_set_release
  28. #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
  29. #endif
  30. /*
  31. * The idea here is to build acquire/release variants by adding explicit
  32. * barriers on top of the relaxed variant. In the case where the relaxed
  33. * variant is already fully ordered, no additional barriers are needed.
  34. *
  35. * Besides, if an arch has a special barrier for acquire/release, it could
  36. * implement its own __atomic_op_* and use the same framework for building
  37. * variants
  38. *
  39. * If an architecture overrides __atomic_op_acquire() it will probably want
  40. * to define smp_mb__after_spinlock().
  41. */
  42. #ifndef __atomic_op_acquire
  43. #define __atomic_op_acquire(op, args...) \
  44. ({ \
  45. typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
  46. smp_mb__after_atomic(); \
  47. __ret; \
  48. })
  49. #endif
  50. #ifndef __atomic_op_release
  51. #define __atomic_op_release(op, args...) \
  52. ({ \
  53. smp_mb__before_atomic(); \
  54. op##_relaxed(args); \
  55. })
  56. #endif
  57. #ifndef __atomic_op_fence
  58. #define __atomic_op_fence(op, args...) \
  59. ({ \
  60. typeof(op##_relaxed(args)) __ret; \
  61. smp_mb__before_atomic(); \
  62. __ret = op##_relaxed(args); \
  63. smp_mb__after_atomic(); \
  64. __ret; \
  65. })
  66. #endif
  67. /* atomic_add_return_relaxed */
  68. #ifndef atomic_add_return_relaxed
  69. #define atomic_add_return_relaxed atomic_add_return
  70. #define atomic_add_return_acquire atomic_add_return
  71. #define atomic_add_return_release atomic_add_return
  72. #else /* atomic_add_return_relaxed */
  73. #ifndef atomic_add_return_acquire
  74. #define atomic_add_return_acquire(...) \
  75. __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
  76. #endif
  77. #ifndef atomic_add_return_release
  78. #define atomic_add_return_release(...) \
  79. __atomic_op_release(atomic_add_return, __VA_ARGS__)
  80. #endif
  81. #ifndef atomic_add_return
  82. #define atomic_add_return(...) \
  83. __atomic_op_fence(atomic_add_return, __VA_ARGS__)
  84. #endif
  85. #endif /* atomic_add_return_relaxed */
  86. /* atomic_inc_return_relaxed */
  87. #ifndef atomic_inc_return_relaxed
  88. #define atomic_inc_return_relaxed atomic_inc_return
  89. #define atomic_inc_return_acquire atomic_inc_return
  90. #define atomic_inc_return_release atomic_inc_return
  91. #else /* atomic_inc_return_relaxed */
  92. #ifndef atomic_inc_return_acquire
  93. #define atomic_inc_return_acquire(...) \
  94. __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
  95. #endif
  96. #ifndef atomic_inc_return_release
  97. #define atomic_inc_return_release(...) \
  98. __atomic_op_release(atomic_inc_return, __VA_ARGS__)
  99. #endif
  100. #ifndef atomic_inc_return
  101. #define atomic_inc_return(...) \
  102. __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
  103. #endif
  104. #endif /* atomic_inc_return_relaxed */
  105. /* atomic_sub_return_relaxed */
  106. #ifndef atomic_sub_return_relaxed
  107. #define atomic_sub_return_relaxed atomic_sub_return
  108. #define atomic_sub_return_acquire atomic_sub_return
  109. #define atomic_sub_return_release atomic_sub_return
  110. #else /* atomic_sub_return_relaxed */
  111. #ifndef atomic_sub_return_acquire
  112. #define atomic_sub_return_acquire(...) \
  113. __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
  114. #endif
  115. #ifndef atomic_sub_return_release
  116. #define atomic_sub_return_release(...) \
  117. __atomic_op_release(atomic_sub_return, __VA_ARGS__)
  118. #endif
  119. #ifndef atomic_sub_return
  120. #define atomic_sub_return(...) \
  121. __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
  122. #endif
  123. #endif /* atomic_sub_return_relaxed */
  124. /* atomic_dec_return_relaxed */
  125. #ifndef atomic_dec_return_relaxed
  126. #define atomic_dec_return_relaxed atomic_dec_return
  127. #define atomic_dec_return_acquire atomic_dec_return
  128. #define atomic_dec_return_release atomic_dec_return
  129. #else /* atomic_dec_return_relaxed */
  130. #ifndef atomic_dec_return_acquire
  131. #define atomic_dec_return_acquire(...) \
  132. __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
  133. #endif
  134. #ifndef atomic_dec_return_release
  135. #define atomic_dec_return_release(...) \
  136. __atomic_op_release(atomic_dec_return, __VA_ARGS__)
  137. #endif
  138. #ifndef atomic_dec_return
  139. #define atomic_dec_return(...) \
  140. __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
  141. #endif
  142. #endif /* atomic_dec_return_relaxed */
  143. /* atomic_fetch_add_relaxed */
  144. #ifndef atomic_fetch_add_relaxed
  145. #define atomic_fetch_add_relaxed atomic_fetch_add
  146. #define atomic_fetch_add_acquire atomic_fetch_add
  147. #define atomic_fetch_add_release atomic_fetch_add
  148. #else /* atomic_fetch_add_relaxed */
  149. #ifndef atomic_fetch_add_acquire
  150. #define atomic_fetch_add_acquire(...) \
  151. __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
  152. #endif
  153. #ifndef atomic_fetch_add_release
  154. #define atomic_fetch_add_release(...) \
  155. __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
  156. #endif
  157. #ifndef atomic_fetch_add
  158. #define atomic_fetch_add(...) \
  159. __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
  160. #endif
  161. #endif /* atomic_fetch_add_relaxed */
  162. /* atomic_fetch_inc_relaxed */
  163. #ifndef atomic_fetch_inc_relaxed
  164. #ifndef atomic_fetch_inc
  165. #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
  166. #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
  167. #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
  168. #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
  169. #else /* atomic_fetch_inc */
  170. #define atomic_fetch_inc_relaxed atomic_fetch_inc
  171. #define atomic_fetch_inc_acquire atomic_fetch_inc
  172. #define atomic_fetch_inc_release atomic_fetch_inc
  173. #endif /* atomic_fetch_inc */
  174. #else /* atomic_fetch_inc_relaxed */
  175. #ifndef atomic_fetch_inc_acquire
  176. #define atomic_fetch_inc_acquire(...) \
  177. __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
  178. #endif
  179. #ifndef atomic_fetch_inc_release
  180. #define atomic_fetch_inc_release(...) \
  181. __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
  182. #endif
  183. #ifndef atomic_fetch_inc
  184. #define atomic_fetch_inc(...) \
  185. __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
  186. #endif
  187. #endif /* atomic_fetch_inc_relaxed */
  188. /* atomic_fetch_sub_relaxed */
  189. #ifndef atomic_fetch_sub_relaxed
  190. #define atomic_fetch_sub_relaxed atomic_fetch_sub
  191. #define atomic_fetch_sub_acquire atomic_fetch_sub
  192. #define atomic_fetch_sub_release atomic_fetch_sub
  193. #else /* atomic_fetch_sub_relaxed */
  194. #ifndef atomic_fetch_sub_acquire
  195. #define atomic_fetch_sub_acquire(...) \
  196. __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
  197. #endif
  198. #ifndef atomic_fetch_sub_release
  199. #define atomic_fetch_sub_release(...) \
  200. __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
  201. #endif
  202. #ifndef atomic_fetch_sub
  203. #define atomic_fetch_sub(...) \
  204. __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
  205. #endif
  206. #endif /* atomic_fetch_sub_relaxed */
  207. /* atomic_fetch_dec_relaxed */
  208. #ifndef atomic_fetch_dec_relaxed
  209. #ifndef atomic_fetch_dec
  210. #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
  211. #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
  212. #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
  213. #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
  214. #else /* atomic_fetch_dec */
  215. #define atomic_fetch_dec_relaxed atomic_fetch_dec
  216. #define atomic_fetch_dec_acquire atomic_fetch_dec
  217. #define atomic_fetch_dec_release atomic_fetch_dec
  218. #endif /* atomic_fetch_dec */
  219. #else /* atomic_fetch_dec_relaxed */
  220. #ifndef atomic_fetch_dec_acquire
  221. #define atomic_fetch_dec_acquire(...) \
  222. __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
  223. #endif
  224. #ifndef atomic_fetch_dec_release
  225. #define atomic_fetch_dec_release(...) \
  226. __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
  227. #endif
  228. #ifndef atomic_fetch_dec
  229. #define atomic_fetch_dec(...) \
  230. __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
  231. #endif
  232. #endif /* atomic_fetch_dec_relaxed */
  233. /* atomic_fetch_or_relaxed */
  234. #ifndef atomic_fetch_or_relaxed
  235. #define atomic_fetch_or_relaxed atomic_fetch_or
  236. #define atomic_fetch_or_acquire atomic_fetch_or
  237. #define atomic_fetch_or_release atomic_fetch_or
  238. #else /* atomic_fetch_or_relaxed */
  239. #ifndef atomic_fetch_or_acquire
  240. #define atomic_fetch_or_acquire(...) \
  241. __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
  242. #endif
  243. #ifndef atomic_fetch_or_release
  244. #define atomic_fetch_or_release(...) \
  245. __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
  246. #endif
  247. #ifndef atomic_fetch_or
  248. #define atomic_fetch_or(...) \
  249. __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
  250. #endif
  251. #endif /* atomic_fetch_or_relaxed */
  252. /* atomic_fetch_and_relaxed */
  253. #ifndef atomic_fetch_and_relaxed
  254. #define atomic_fetch_and_relaxed atomic_fetch_and
  255. #define atomic_fetch_and_acquire atomic_fetch_and
  256. #define atomic_fetch_and_release atomic_fetch_and
  257. #else /* atomic_fetch_and_relaxed */
  258. #ifndef atomic_fetch_and_acquire
  259. #define atomic_fetch_and_acquire(...) \
  260. __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
  261. #endif
  262. #ifndef atomic_fetch_and_release
  263. #define atomic_fetch_and_release(...) \
  264. __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
  265. #endif
  266. #ifndef atomic_fetch_and
  267. #define atomic_fetch_and(...) \
  268. __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
  269. #endif
  270. #endif /* atomic_fetch_and_relaxed */
  271. #ifdef atomic_andnot
  272. /* atomic_fetch_andnot_relaxed */
  273. #ifndef atomic_fetch_andnot_relaxed
  274. #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
  275. #define atomic_fetch_andnot_acquire atomic_fetch_andnot
  276. #define atomic_fetch_andnot_release atomic_fetch_andnot
  277. #else /* atomic_fetch_andnot_relaxed */
  278. #ifndef atomic_fetch_andnot_acquire
  279. #define atomic_fetch_andnot_acquire(...) \
  280. __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
  281. #endif
  282. #ifndef atomic_fetch_andnot_release
  283. #define atomic_fetch_andnot_release(...) \
  284. __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
  285. #endif
  286. #ifndef atomic_fetch_andnot
  287. #define atomic_fetch_andnot(...) \
  288. __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
  289. #endif
  290. #endif /* atomic_fetch_andnot_relaxed */
  291. #endif /* atomic_andnot */
  292. /* atomic_fetch_xor_relaxed */
  293. #ifndef atomic_fetch_xor_relaxed
  294. #define atomic_fetch_xor_relaxed atomic_fetch_xor
  295. #define atomic_fetch_xor_acquire atomic_fetch_xor
  296. #define atomic_fetch_xor_release atomic_fetch_xor
  297. #else /* atomic_fetch_xor_relaxed */
  298. #ifndef atomic_fetch_xor_acquire
  299. #define atomic_fetch_xor_acquire(...) \
  300. __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
  301. #endif
  302. #ifndef atomic_fetch_xor_release
  303. #define atomic_fetch_xor_release(...) \
  304. __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
  305. #endif
  306. #ifndef atomic_fetch_xor
  307. #define atomic_fetch_xor(...) \
  308. __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
  309. #endif
  310. #endif /* atomic_fetch_xor_relaxed */
  311. /* atomic_xchg_relaxed */
  312. #ifndef atomic_xchg_relaxed
  313. #define atomic_xchg_relaxed atomic_xchg
  314. #define atomic_xchg_acquire atomic_xchg
  315. #define atomic_xchg_release atomic_xchg
  316. #else /* atomic_xchg_relaxed */
  317. #ifndef atomic_xchg_acquire
  318. #define atomic_xchg_acquire(...) \
  319. __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
  320. #endif
  321. #ifndef atomic_xchg_release
  322. #define atomic_xchg_release(...) \
  323. __atomic_op_release(atomic_xchg, __VA_ARGS__)
  324. #endif
  325. #ifndef atomic_xchg
  326. #define atomic_xchg(...) \
  327. __atomic_op_fence(atomic_xchg, __VA_ARGS__)
  328. #endif
  329. #endif /* atomic_xchg_relaxed */
  330. /* atomic_cmpxchg_relaxed */
  331. #ifndef atomic_cmpxchg_relaxed
  332. #define atomic_cmpxchg_relaxed atomic_cmpxchg
  333. #define atomic_cmpxchg_acquire atomic_cmpxchg
  334. #define atomic_cmpxchg_release atomic_cmpxchg
  335. #else /* atomic_cmpxchg_relaxed */
  336. #ifndef atomic_cmpxchg_acquire
  337. #define atomic_cmpxchg_acquire(...) \
  338. __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
  339. #endif
  340. #ifndef atomic_cmpxchg_release
  341. #define atomic_cmpxchg_release(...) \
  342. __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
  343. #endif
  344. #ifndef atomic_cmpxchg
  345. #define atomic_cmpxchg(...) \
  346. __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
  347. #endif
  348. #endif /* atomic_cmpxchg_relaxed */
  349. #ifndef atomic_try_cmpxchg
  350. #define __atomic_try_cmpxchg(type, _p, _po, _n) \
  351. ({ \
  352. typeof(_po) __po = (_po); \
  353. typeof(*(_po)) __r, __o = *__po; \
  354. __r = atomic_cmpxchg##type((_p), __o, (_n)); \
  355. if (unlikely(__r != __o)) \
  356. *__po = __r; \
  357. likely(__r == __o); \
  358. })
  359. #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
  360. #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
  361. #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
  362. #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
  363. #else /* atomic_try_cmpxchg */
  364. #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
  365. #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
  366. #define atomic_try_cmpxchg_release atomic_try_cmpxchg
  367. #endif /* atomic_try_cmpxchg */
  368. /* cmpxchg_relaxed */
  369. #ifndef cmpxchg_relaxed
  370. #define cmpxchg_relaxed cmpxchg
  371. #define cmpxchg_acquire cmpxchg
  372. #define cmpxchg_release cmpxchg
  373. #else /* cmpxchg_relaxed */
  374. #ifndef cmpxchg_acquire
  375. #define cmpxchg_acquire(...) \
  376. __atomic_op_acquire(cmpxchg, __VA_ARGS__)
  377. #endif
  378. #ifndef cmpxchg_release
  379. #define cmpxchg_release(...) \
  380. __atomic_op_release(cmpxchg, __VA_ARGS__)
  381. #endif
  382. #ifndef cmpxchg
  383. #define cmpxchg(...) \
  384. __atomic_op_fence(cmpxchg, __VA_ARGS__)
  385. #endif
  386. #endif /* cmpxchg_relaxed */
  387. /* cmpxchg64_relaxed */
  388. #ifndef cmpxchg64_relaxed
  389. #define cmpxchg64_relaxed cmpxchg64
  390. #define cmpxchg64_acquire cmpxchg64
  391. #define cmpxchg64_release cmpxchg64
  392. #else /* cmpxchg64_relaxed */
  393. #ifndef cmpxchg64_acquire
  394. #define cmpxchg64_acquire(...) \
  395. __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
  396. #endif
  397. #ifndef cmpxchg64_release
  398. #define cmpxchg64_release(...) \
  399. __atomic_op_release(cmpxchg64, __VA_ARGS__)
  400. #endif
  401. #ifndef cmpxchg64
  402. #define cmpxchg64(...) \
  403. __atomic_op_fence(cmpxchg64, __VA_ARGS__)
  404. #endif
  405. #endif /* cmpxchg64_relaxed */
  406. /* xchg_relaxed */
  407. #ifndef xchg_relaxed
  408. #define xchg_relaxed xchg
  409. #define xchg_acquire xchg
  410. #define xchg_release xchg
  411. #else /* xchg_relaxed */
  412. #ifndef xchg_acquire
  413. #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
  414. #endif
  415. #ifndef xchg_release
  416. #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
  417. #endif
  418. #ifndef xchg
  419. #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
  420. #endif
  421. #endif /* xchg_relaxed */
  422. /**
  423. * atomic_add_unless - add unless the number is already a given value
  424. * @v: pointer of type atomic_t
  425. * @a: the amount to add to v...
  426. * @u: ...unless v is equal to u.
  427. *
  428. * Atomically adds @a to @v, so long as @v was not already @u.
  429. * Returns non-zero if @v was not @u, and zero otherwise.
  430. */
  431. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  432. {
  433. return __atomic_add_unless(v, a, u) != u;
  434. }
  435. /**
  436. * atomic_inc_not_zero - increment unless the number is zero
  437. * @v: pointer of type atomic_t
  438. *
  439. * Atomically increments @v by 1, so long as @v is non-zero.
  440. * Returns non-zero if @v was non-zero, and zero otherwise.
  441. */
  442. #ifndef atomic_inc_not_zero
  443. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  444. #endif
  445. #ifndef atomic_andnot
  446. static inline void atomic_andnot(int i, atomic_t *v)
  447. {
  448. atomic_and(~i, v);
  449. }
  450. static inline int atomic_fetch_andnot(int i, atomic_t *v)
  451. {
  452. return atomic_fetch_and(~i, v);
  453. }
  454. static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
  455. {
  456. return atomic_fetch_and_relaxed(~i, v);
  457. }
  458. static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
  459. {
  460. return atomic_fetch_and_acquire(~i, v);
  461. }
  462. static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
  463. {
  464. return atomic_fetch_and_release(~i, v);
  465. }
  466. #endif
  467. /**
  468. * atomic_inc_not_zero_hint - increment if not null
  469. * @v: pointer of type atomic_t
  470. * @hint: probable value of the atomic before the increment
  471. *
  472. * This version of atomic_inc_not_zero() gives a hint of probable
  473. * value of the atomic. This helps processor to not read the memory
  474. * before doing the atomic read/modify/write cycle, lowering
  475. * number of bus transactions on some arches.
  476. *
  477. * Returns: 0 if increment was not done, 1 otherwise.
  478. */
  479. #ifndef atomic_inc_not_zero_hint
  480. static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
  481. {
  482. int val, c = hint;
  483. /* sanity test, should be removed by compiler if hint is a constant */
  484. if (!hint)
  485. return atomic_inc_not_zero(v);
  486. do {
  487. val = atomic_cmpxchg(v, c, c + 1);
  488. if (val == c)
  489. return 1;
  490. c = val;
  491. } while (c);
  492. return 0;
  493. }
  494. #endif
  495. #ifndef atomic_inc_unless_negative
  496. static inline int atomic_inc_unless_negative(atomic_t *p)
  497. {
  498. int v, v1;
  499. for (v = 0; v >= 0; v = v1) {
  500. v1 = atomic_cmpxchg(p, v, v + 1);
  501. if (likely(v1 == v))
  502. return 1;
  503. }
  504. return 0;
  505. }
  506. #endif
  507. #ifndef atomic_dec_unless_positive
  508. static inline int atomic_dec_unless_positive(atomic_t *p)
  509. {
  510. int v, v1;
  511. for (v = 0; v <= 0; v = v1) {
  512. v1 = atomic_cmpxchg(p, v, v - 1);
  513. if (likely(v1 == v))
  514. return 1;
  515. }
  516. return 0;
  517. }
  518. #endif
  519. /*
  520. * atomic_dec_if_positive - decrement by 1 if old value positive
  521. * @v: pointer of type atomic_t
  522. *
  523. * The function returns the old value of *v minus 1, even if
  524. * the atomic variable, v, was not decremented.
  525. */
  526. #ifndef atomic_dec_if_positive
  527. static inline int atomic_dec_if_positive(atomic_t *v)
  528. {
  529. int c, old, dec;
  530. c = atomic_read(v);
  531. for (;;) {
  532. dec = c - 1;
  533. if (unlikely(dec < 0))
  534. break;
  535. old = atomic_cmpxchg((v), c, dec);
  536. if (likely(old == c))
  537. break;
  538. c = old;
  539. }
  540. return dec;
  541. }
  542. #endif
  543. #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
  544. #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
  545. #ifdef CONFIG_GENERIC_ATOMIC64
  546. #include <asm-generic/atomic64.h>
  547. #endif
  548. #ifndef atomic64_read_acquire
  549. #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
  550. #endif
  551. #ifndef atomic64_set_release
  552. #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
  553. #endif
  554. /* atomic64_add_return_relaxed */
  555. #ifndef atomic64_add_return_relaxed
  556. #define atomic64_add_return_relaxed atomic64_add_return
  557. #define atomic64_add_return_acquire atomic64_add_return
  558. #define atomic64_add_return_release atomic64_add_return
  559. #else /* atomic64_add_return_relaxed */
  560. #ifndef atomic64_add_return_acquire
  561. #define atomic64_add_return_acquire(...) \
  562. __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
  563. #endif
  564. #ifndef atomic64_add_return_release
  565. #define atomic64_add_return_release(...) \
  566. __atomic_op_release(atomic64_add_return, __VA_ARGS__)
  567. #endif
  568. #ifndef atomic64_add_return
  569. #define atomic64_add_return(...) \
  570. __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
  571. #endif
  572. #endif /* atomic64_add_return_relaxed */
  573. /* atomic64_inc_return_relaxed */
  574. #ifndef atomic64_inc_return_relaxed
  575. #define atomic64_inc_return_relaxed atomic64_inc_return
  576. #define atomic64_inc_return_acquire atomic64_inc_return
  577. #define atomic64_inc_return_release atomic64_inc_return
  578. #else /* atomic64_inc_return_relaxed */
  579. #ifndef atomic64_inc_return_acquire
  580. #define atomic64_inc_return_acquire(...) \
  581. __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
  582. #endif
  583. #ifndef atomic64_inc_return_release
  584. #define atomic64_inc_return_release(...) \
  585. __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
  586. #endif
  587. #ifndef atomic64_inc_return
  588. #define atomic64_inc_return(...) \
  589. __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
  590. #endif
  591. #endif /* atomic64_inc_return_relaxed */
  592. /* atomic64_sub_return_relaxed */
  593. #ifndef atomic64_sub_return_relaxed
  594. #define atomic64_sub_return_relaxed atomic64_sub_return
  595. #define atomic64_sub_return_acquire atomic64_sub_return
  596. #define atomic64_sub_return_release atomic64_sub_return
  597. #else /* atomic64_sub_return_relaxed */
  598. #ifndef atomic64_sub_return_acquire
  599. #define atomic64_sub_return_acquire(...) \
  600. __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
  601. #endif
  602. #ifndef atomic64_sub_return_release
  603. #define atomic64_sub_return_release(...) \
  604. __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
  605. #endif
  606. #ifndef atomic64_sub_return
  607. #define atomic64_sub_return(...) \
  608. __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
  609. #endif
  610. #endif /* atomic64_sub_return_relaxed */
  611. /* atomic64_dec_return_relaxed */
  612. #ifndef atomic64_dec_return_relaxed
  613. #define atomic64_dec_return_relaxed atomic64_dec_return
  614. #define atomic64_dec_return_acquire atomic64_dec_return
  615. #define atomic64_dec_return_release atomic64_dec_return
  616. #else /* atomic64_dec_return_relaxed */
  617. #ifndef atomic64_dec_return_acquire
  618. #define atomic64_dec_return_acquire(...) \
  619. __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
  620. #endif
  621. #ifndef atomic64_dec_return_release
  622. #define atomic64_dec_return_release(...) \
  623. __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
  624. #endif
  625. #ifndef atomic64_dec_return
  626. #define atomic64_dec_return(...) \
  627. __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
  628. #endif
  629. #endif /* atomic64_dec_return_relaxed */
  630. /* atomic64_fetch_add_relaxed */
  631. #ifndef atomic64_fetch_add_relaxed
  632. #define atomic64_fetch_add_relaxed atomic64_fetch_add
  633. #define atomic64_fetch_add_acquire atomic64_fetch_add
  634. #define atomic64_fetch_add_release atomic64_fetch_add
  635. #else /* atomic64_fetch_add_relaxed */
  636. #ifndef atomic64_fetch_add_acquire
  637. #define atomic64_fetch_add_acquire(...) \
  638. __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
  639. #endif
  640. #ifndef atomic64_fetch_add_release
  641. #define atomic64_fetch_add_release(...) \
  642. __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
  643. #endif
  644. #ifndef atomic64_fetch_add
  645. #define atomic64_fetch_add(...) \
  646. __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
  647. #endif
  648. #endif /* atomic64_fetch_add_relaxed */
  649. /* atomic64_fetch_inc_relaxed */
  650. #ifndef atomic64_fetch_inc_relaxed
  651. #ifndef atomic64_fetch_inc
  652. #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
  653. #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
  654. #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
  655. #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
  656. #else /* atomic64_fetch_inc */
  657. #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
  658. #define atomic64_fetch_inc_acquire atomic64_fetch_inc
  659. #define atomic64_fetch_inc_release atomic64_fetch_inc
  660. #endif /* atomic64_fetch_inc */
  661. #else /* atomic64_fetch_inc_relaxed */
  662. #ifndef atomic64_fetch_inc_acquire
  663. #define atomic64_fetch_inc_acquire(...) \
  664. __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
  665. #endif
  666. #ifndef atomic64_fetch_inc_release
  667. #define atomic64_fetch_inc_release(...) \
  668. __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
  669. #endif
  670. #ifndef atomic64_fetch_inc
  671. #define atomic64_fetch_inc(...) \
  672. __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
  673. #endif
  674. #endif /* atomic64_fetch_inc_relaxed */
  675. /* atomic64_fetch_sub_relaxed */
  676. #ifndef atomic64_fetch_sub_relaxed
  677. #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
  678. #define atomic64_fetch_sub_acquire atomic64_fetch_sub
  679. #define atomic64_fetch_sub_release atomic64_fetch_sub
  680. #else /* atomic64_fetch_sub_relaxed */
  681. #ifndef atomic64_fetch_sub_acquire
  682. #define atomic64_fetch_sub_acquire(...) \
  683. __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
  684. #endif
  685. #ifndef atomic64_fetch_sub_release
  686. #define atomic64_fetch_sub_release(...) \
  687. __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
  688. #endif
  689. #ifndef atomic64_fetch_sub
  690. #define atomic64_fetch_sub(...) \
  691. __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
  692. #endif
  693. #endif /* atomic64_fetch_sub_relaxed */
  694. /* atomic64_fetch_dec_relaxed */
  695. #ifndef atomic64_fetch_dec_relaxed
  696. #ifndef atomic64_fetch_dec
  697. #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
  698. #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
  699. #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
  700. #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
  701. #else /* atomic64_fetch_dec */
  702. #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
  703. #define atomic64_fetch_dec_acquire atomic64_fetch_dec
  704. #define atomic64_fetch_dec_release atomic64_fetch_dec
  705. #endif /* atomic64_fetch_dec */
  706. #else /* atomic64_fetch_dec_relaxed */
  707. #ifndef atomic64_fetch_dec_acquire
  708. #define atomic64_fetch_dec_acquire(...) \
  709. __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
  710. #endif
  711. #ifndef atomic64_fetch_dec_release
  712. #define atomic64_fetch_dec_release(...) \
  713. __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
  714. #endif
  715. #ifndef atomic64_fetch_dec
  716. #define atomic64_fetch_dec(...) \
  717. __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
  718. #endif
  719. #endif /* atomic64_fetch_dec_relaxed */
  720. /* atomic64_fetch_or_relaxed */
  721. #ifndef atomic64_fetch_or_relaxed
  722. #define atomic64_fetch_or_relaxed atomic64_fetch_or
  723. #define atomic64_fetch_or_acquire atomic64_fetch_or
  724. #define atomic64_fetch_or_release atomic64_fetch_or
  725. #else /* atomic64_fetch_or_relaxed */
  726. #ifndef atomic64_fetch_or_acquire
  727. #define atomic64_fetch_or_acquire(...) \
  728. __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
  729. #endif
  730. #ifndef atomic64_fetch_or_release
  731. #define atomic64_fetch_or_release(...) \
  732. __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
  733. #endif
  734. #ifndef atomic64_fetch_or
  735. #define atomic64_fetch_or(...) \
  736. __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
  737. #endif
  738. #endif /* atomic64_fetch_or_relaxed */
  739. /* atomic64_fetch_and_relaxed */
  740. #ifndef atomic64_fetch_and_relaxed
  741. #define atomic64_fetch_and_relaxed atomic64_fetch_and
  742. #define atomic64_fetch_and_acquire atomic64_fetch_and
  743. #define atomic64_fetch_and_release atomic64_fetch_and
  744. #else /* atomic64_fetch_and_relaxed */
  745. #ifndef atomic64_fetch_and_acquire
  746. #define atomic64_fetch_and_acquire(...) \
  747. __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
  748. #endif
  749. #ifndef atomic64_fetch_and_release
  750. #define atomic64_fetch_and_release(...) \
  751. __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
  752. #endif
  753. #ifndef atomic64_fetch_and
  754. #define atomic64_fetch_and(...) \
  755. __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
  756. #endif
  757. #endif /* atomic64_fetch_and_relaxed */
  758. #ifdef atomic64_andnot
  759. /* atomic64_fetch_andnot_relaxed */
  760. #ifndef atomic64_fetch_andnot_relaxed
  761. #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
  762. #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
  763. #define atomic64_fetch_andnot_release atomic64_fetch_andnot
  764. #else /* atomic64_fetch_andnot_relaxed */
  765. #ifndef atomic64_fetch_andnot_acquire
  766. #define atomic64_fetch_andnot_acquire(...) \
  767. __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
  768. #endif
  769. #ifndef atomic64_fetch_andnot_release
  770. #define atomic64_fetch_andnot_release(...) \
  771. __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
  772. #endif
  773. #ifndef atomic64_fetch_andnot
  774. #define atomic64_fetch_andnot(...) \
  775. __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
  776. #endif
  777. #endif /* atomic64_fetch_andnot_relaxed */
  778. #endif /* atomic64_andnot */
  779. /* atomic64_fetch_xor_relaxed */
  780. #ifndef atomic64_fetch_xor_relaxed
  781. #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
  782. #define atomic64_fetch_xor_acquire atomic64_fetch_xor
  783. #define atomic64_fetch_xor_release atomic64_fetch_xor
  784. #else /* atomic64_fetch_xor_relaxed */
  785. #ifndef atomic64_fetch_xor_acquire
  786. #define atomic64_fetch_xor_acquire(...) \
  787. __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
  788. #endif
  789. #ifndef atomic64_fetch_xor_release
  790. #define atomic64_fetch_xor_release(...) \
  791. __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
  792. #endif
  793. #ifndef atomic64_fetch_xor
  794. #define atomic64_fetch_xor(...) \
  795. __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
  796. #endif
  797. #endif /* atomic64_fetch_xor_relaxed */
  798. /* atomic64_xchg_relaxed */
  799. #ifndef atomic64_xchg_relaxed
  800. #define atomic64_xchg_relaxed atomic64_xchg
  801. #define atomic64_xchg_acquire atomic64_xchg
  802. #define atomic64_xchg_release atomic64_xchg
  803. #else /* atomic64_xchg_relaxed */
  804. #ifndef atomic64_xchg_acquire
  805. #define atomic64_xchg_acquire(...) \
  806. __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
  807. #endif
  808. #ifndef atomic64_xchg_release
  809. #define atomic64_xchg_release(...) \
  810. __atomic_op_release(atomic64_xchg, __VA_ARGS__)
  811. #endif
  812. #ifndef atomic64_xchg
  813. #define atomic64_xchg(...) \
  814. __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
  815. #endif
  816. #endif /* atomic64_xchg_relaxed */
  817. /* atomic64_cmpxchg_relaxed */
  818. #ifndef atomic64_cmpxchg_relaxed
  819. #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
  820. #define atomic64_cmpxchg_acquire atomic64_cmpxchg
  821. #define atomic64_cmpxchg_release atomic64_cmpxchg
  822. #else /* atomic64_cmpxchg_relaxed */
  823. #ifndef atomic64_cmpxchg_acquire
  824. #define atomic64_cmpxchg_acquire(...) \
  825. __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
  826. #endif
  827. #ifndef atomic64_cmpxchg_release
  828. #define atomic64_cmpxchg_release(...) \
  829. __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
  830. #endif
  831. #ifndef atomic64_cmpxchg
  832. #define atomic64_cmpxchg(...) \
  833. __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
  834. #endif
  835. #endif /* atomic64_cmpxchg_relaxed */
  836. #ifndef atomic64_try_cmpxchg
  837. #define __atomic64_try_cmpxchg(type, _p, _po, _n) \
  838. ({ \
  839. typeof(_po) __po = (_po); \
  840. typeof(*(_po)) __r, __o = *__po; \
  841. __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
  842. if (unlikely(__r != __o)) \
  843. *__po = __r; \
  844. likely(__r == __o); \
  845. })
  846. #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
  847. #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
  848. #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
  849. #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
  850. #else /* atomic64_try_cmpxchg */
  851. #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
  852. #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
  853. #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
  854. #endif /* atomic64_try_cmpxchg */
  855. #ifndef atomic64_andnot
  856. static inline void atomic64_andnot(long long i, atomic64_t *v)
  857. {
  858. atomic64_and(~i, v);
  859. }
  860. static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
  861. {
  862. return atomic64_fetch_and(~i, v);
  863. }
  864. static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
  865. {
  866. return atomic64_fetch_and_relaxed(~i, v);
  867. }
  868. static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
  869. {
  870. return atomic64_fetch_and_acquire(~i, v);
  871. }
  872. static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
  873. {
  874. return atomic64_fetch_and_release(~i, v);
  875. }
  876. #endif
  877. #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
  878. #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
  879. #include <asm-generic/atomic-long.h>
  880. #endif /* _LINUX_ATOMIC_H */