atomic.h 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074
  1. /* Atomic operations usable in machine independent code */
  2. #ifndef _LINUX_ATOMIC_H
  3. #define _LINUX_ATOMIC_H
  4. #include <asm/atomic.h>
  5. #include <asm/barrier.h>
  6. /*
  7. * Relaxed variants of xchg, cmpxchg and some atomic operations.
  8. *
  9. * We support four variants:
  10. *
  11. * - Fully ordered: The default implementation, no suffix required.
  12. * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
  13. * - Release: Provides RELEASE semantics, _release suffix.
  14. * - Relaxed: No ordering guarantees, _relaxed suffix.
  15. *
  16. * For compound atomics performing both a load and a store, ACQUIRE
  17. * semantics apply only to the load and RELEASE semantics only to the
  18. * store portion of the operation. Note that a failed cmpxchg_acquire
  19. * does -not- imply any memory ordering constraints.
  20. *
  21. * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
  22. */
  23. #ifndef atomic_read_acquire
  24. #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
  25. #endif
  26. #ifndef atomic_set_release
  27. #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
  28. #endif
  29. /*
  30. * The idea here is to build acquire/release variants by adding explicit
  31. * barriers on top of the relaxed variant. In the case where the relaxed
  32. * variant is already fully ordered, no additional barriers are needed.
  33. *
  34. * Besides, if an arch has a special barrier for acquire/release, it could
  35. * implement its own __atomic_op_* and use the same framework for building
  36. * variants
  37. */
  38. #ifndef __atomic_op_acquire
  39. #define __atomic_op_acquire(op, args...) \
  40. ({ \
  41. typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
  42. smp_mb__after_atomic(); \
  43. __ret; \
  44. })
  45. #endif
  46. #ifndef __atomic_op_release
  47. #define __atomic_op_release(op, args...) \
  48. ({ \
  49. smp_mb__before_atomic(); \
  50. op##_relaxed(args); \
  51. })
  52. #endif
  53. #ifndef __atomic_op_fence
  54. #define __atomic_op_fence(op, args...) \
  55. ({ \
  56. typeof(op##_relaxed(args)) __ret; \
  57. smp_mb__before_atomic(); \
  58. __ret = op##_relaxed(args); \
  59. smp_mb__after_atomic(); \
  60. __ret; \
  61. })
  62. #endif
  63. /* atomic_add_return_relaxed */
  64. #ifndef atomic_add_return_relaxed
  65. #define atomic_add_return_relaxed atomic_add_return
  66. #define atomic_add_return_acquire atomic_add_return
  67. #define atomic_add_return_release atomic_add_return
  68. #else /* atomic_add_return_relaxed */
  69. #ifndef atomic_add_return_acquire
  70. #define atomic_add_return_acquire(...) \
  71. __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
  72. #endif
  73. #ifndef atomic_add_return_release
  74. #define atomic_add_return_release(...) \
  75. __atomic_op_release(atomic_add_return, __VA_ARGS__)
  76. #endif
  77. #ifndef atomic_add_return
  78. #define atomic_add_return(...) \
  79. __atomic_op_fence(atomic_add_return, __VA_ARGS__)
  80. #endif
  81. #endif /* atomic_add_return_relaxed */
  82. /* atomic_inc_return_relaxed */
  83. #ifndef atomic_inc_return_relaxed
  84. #define atomic_inc_return_relaxed atomic_inc_return
  85. #define atomic_inc_return_acquire atomic_inc_return
  86. #define atomic_inc_return_release atomic_inc_return
  87. #else /* atomic_inc_return_relaxed */
  88. #ifndef atomic_inc_return_acquire
  89. #define atomic_inc_return_acquire(...) \
  90. __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
  91. #endif
  92. #ifndef atomic_inc_return_release
  93. #define atomic_inc_return_release(...) \
  94. __atomic_op_release(atomic_inc_return, __VA_ARGS__)
  95. #endif
  96. #ifndef atomic_inc_return
  97. #define atomic_inc_return(...) \
  98. __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
  99. #endif
  100. #endif /* atomic_inc_return_relaxed */
  101. /* atomic_sub_return_relaxed */
  102. #ifndef atomic_sub_return_relaxed
  103. #define atomic_sub_return_relaxed atomic_sub_return
  104. #define atomic_sub_return_acquire atomic_sub_return
  105. #define atomic_sub_return_release atomic_sub_return
  106. #else /* atomic_sub_return_relaxed */
  107. #ifndef atomic_sub_return_acquire
  108. #define atomic_sub_return_acquire(...) \
  109. __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
  110. #endif
  111. #ifndef atomic_sub_return_release
  112. #define atomic_sub_return_release(...) \
  113. __atomic_op_release(atomic_sub_return, __VA_ARGS__)
  114. #endif
  115. #ifndef atomic_sub_return
  116. #define atomic_sub_return(...) \
  117. __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
  118. #endif
  119. #endif /* atomic_sub_return_relaxed */
  120. /* atomic_dec_return_relaxed */
  121. #ifndef atomic_dec_return_relaxed
  122. #define atomic_dec_return_relaxed atomic_dec_return
  123. #define atomic_dec_return_acquire atomic_dec_return
  124. #define atomic_dec_return_release atomic_dec_return
  125. #else /* atomic_dec_return_relaxed */
  126. #ifndef atomic_dec_return_acquire
  127. #define atomic_dec_return_acquire(...) \
  128. __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
  129. #endif
  130. #ifndef atomic_dec_return_release
  131. #define atomic_dec_return_release(...) \
  132. __atomic_op_release(atomic_dec_return, __VA_ARGS__)
  133. #endif
  134. #ifndef atomic_dec_return
  135. #define atomic_dec_return(...) \
  136. __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
  137. #endif
  138. #endif /* atomic_dec_return_relaxed */
  139. /* atomic_fetch_add_relaxed */
  140. #ifndef atomic_fetch_add_relaxed
  141. #define atomic_fetch_add_relaxed atomic_fetch_add
  142. #define atomic_fetch_add_acquire atomic_fetch_add
  143. #define atomic_fetch_add_release atomic_fetch_add
  144. #else /* atomic_fetch_add_relaxed */
  145. #ifndef atomic_fetch_add_acquire
  146. #define atomic_fetch_add_acquire(...) \
  147. __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
  148. #endif
  149. #ifndef atomic_fetch_add_release
  150. #define atomic_fetch_add_release(...) \
  151. __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
  152. #endif
  153. #ifndef atomic_fetch_add
  154. #define atomic_fetch_add(...) \
  155. __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
  156. #endif
  157. #endif /* atomic_fetch_add_relaxed */
  158. /* atomic_fetch_inc_relaxed */
  159. #ifndef atomic_fetch_inc_relaxed
  160. #ifndef atomic_fetch_inc
  161. #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
  162. #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
  163. #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
  164. #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
  165. #else /* atomic_fetch_inc */
  166. #define atomic_fetch_inc_relaxed atomic_fetch_inc
  167. #define atomic_fetch_inc_acquire atomic_fetch_inc
  168. #define atomic_fetch_inc_release atomic_fetch_inc
  169. #endif /* atomic_fetch_inc */
  170. #else /* atomic_fetch_inc_relaxed */
  171. #ifndef atomic_fetch_inc_acquire
  172. #define atomic_fetch_inc_acquire(...) \
  173. __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
  174. #endif
  175. #ifndef atomic_fetch_inc_release
  176. #define atomic_fetch_inc_release(...) \
  177. __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
  178. #endif
  179. #ifndef atomic_fetch_inc
  180. #define atomic_fetch_inc(...) \
  181. __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
  182. #endif
  183. #endif /* atomic_fetch_inc_relaxed */
  184. /* atomic_fetch_sub_relaxed */
  185. #ifndef atomic_fetch_sub_relaxed
  186. #define atomic_fetch_sub_relaxed atomic_fetch_sub
  187. #define atomic_fetch_sub_acquire atomic_fetch_sub
  188. #define atomic_fetch_sub_release atomic_fetch_sub
  189. #else /* atomic_fetch_sub_relaxed */
  190. #ifndef atomic_fetch_sub_acquire
  191. #define atomic_fetch_sub_acquire(...) \
  192. __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
  193. #endif
  194. #ifndef atomic_fetch_sub_release
  195. #define atomic_fetch_sub_release(...) \
  196. __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
  197. #endif
  198. #ifndef atomic_fetch_sub
  199. #define atomic_fetch_sub(...) \
  200. __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
  201. #endif
  202. #endif /* atomic_fetch_sub_relaxed */
  203. /* atomic_fetch_dec_relaxed */
  204. #ifndef atomic_fetch_dec_relaxed
  205. #ifndef atomic_fetch_dec
  206. #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
  207. #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
  208. #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
  209. #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
  210. #else /* atomic_fetch_dec */
  211. #define atomic_fetch_dec_relaxed atomic_fetch_dec
  212. #define atomic_fetch_dec_acquire atomic_fetch_dec
  213. #define atomic_fetch_dec_release atomic_fetch_dec
  214. #endif /* atomic_fetch_dec */
  215. #else /* atomic_fetch_dec_relaxed */
  216. #ifndef atomic_fetch_dec_acquire
  217. #define atomic_fetch_dec_acquire(...) \
  218. __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
  219. #endif
  220. #ifndef atomic_fetch_dec_release
  221. #define atomic_fetch_dec_release(...) \
  222. __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
  223. #endif
  224. #ifndef atomic_fetch_dec
  225. #define atomic_fetch_dec(...) \
  226. __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
  227. #endif
  228. #endif /* atomic_fetch_dec_relaxed */
  229. /* atomic_fetch_or_relaxed */
  230. #ifndef atomic_fetch_or_relaxed
  231. #define atomic_fetch_or_relaxed atomic_fetch_or
  232. #define atomic_fetch_or_acquire atomic_fetch_or
  233. #define atomic_fetch_or_release atomic_fetch_or
  234. #else /* atomic_fetch_or_relaxed */
  235. #ifndef atomic_fetch_or_acquire
  236. #define atomic_fetch_or_acquire(...) \
  237. __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
  238. #endif
  239. #ifndef atomic_fetch_or_release
  240. #define atomic_fetch_or_release(...) \
  241. __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
  242. #endif
  243. #ifndef atomic_fetch_or
  244. #define atomic_fetch_or(...) \
  245. __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
  246. #endif
  247. #endif /* atomic_fetch_or_relaxed */
  248. /* atomic_fetch_and_relaxed */
  249. #ifndef atomic_fetch_and_relaxed
  250. #define atomic_fetch_and_relaxed atomic_fetch_and
  251. #define atomic_fetch_and_acquire atomic_fetch_and
  252. #define atomic_fetch_and_release atomic_fetch_and
  253. #else /* atomic_fetch_and_relaxed */
  254. #ifndef atomic_fetch_and_acquire
  255. #define atomic_fetch_and_acquire(...) \
  256. __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
  257. #endif
  258. #ifndef atomic_fetch_and_release
  259. #define atomic_fetch_and_release(...) \
  260. __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
  261. #endif
  262. #ifndef atomic_fetch_and
  263. #define atomic_fetch_and(...) \
  264. __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
  265. #endif
  266. #endif /* atomic_fetch_and_relaxed */
  267. #ifdef atomic_andnot
  268. /* atomic_fetch_andnot_relaxed */
  269. #ifndef atomic_fetch_andnot_relaxed
  270. #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
  271. #define atomic_fetch_andnot_acquire atomic_fetch_andnot
  272. #define atomic_fetch_andnot_release atomic_fetch_andnot
  273. #else /* atomic_fetch_andnot_relaxed */
  274. #ifndef atomic_fetch_andnot_acquire
  275. #define atomic_fetch_andnot_acquire(...) \
  276. __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
  277. #endif
  278. #ifndef atomic_fetch_andnot_release
  279. #define atomic_fetch_andnot_release(...) \
  280. __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
  281. #endif
  282. #ifndef atomic_fetch_andnot
  283. #define atomic_fetch_andnot(...) \
  284. __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
  285. #endif
  286. #endif /* atomic_fetch_andnot_relaxed */
  287. #endif /* atomic_andnot */
  288. /* atomic_fetch_xor_relaxed */
  289. #ifndef atomic_fetch_xor_relaxed
  290. #define atomic_fetch_xor_relaxed atomic_fetch_xor
  291. #define atomic_fetch_xor_acquire atomic_fetch_xor
  292. #define atomic_fetch_xor_release atomic_fetch_xor
  293. #else /* atomic_fetch_xor_relaxed */
  294. #ifndef atomic_fetch_xor_acquire
  295. #define atomic_fetch_xor_acquire(...) \
  296. __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
  297. #endif
  298. #ifndef atomic_fetch_xor_release
  299. #define atomic_fetch_xor_release(...) \
  300. __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
  301. #endif
  302. #ifndef atomic_fetch_xor
  303. #define atomic_fetch_xor(...) \
  304. __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
  305. #endif
  306. #endif /* atomic_fetch_xor_relaxed */
  307. /* atomic_xchg_relaxed */
  308. #ifndef atomic_xchg_relaxed
  309. #define atomic_xchg_relaxed atomic_xchg
  310. #define atomic_xchg_acquire atomic_xchg
  311. #define atomic_xchg_release atomic_xchg
  312. #else /* atomic_xchg_relaxed */
  313. #ifndef atomic_xchg_acquire
  314. #define atomic_xchg_acquire(...) \
  315. __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
  316. #endif
  317. #ifndef atomic_xchg_release
  318. #define atomic_xchg_release(...) \
  319. __atomic_op_release(atomic_xchg, __VA_ARGS__)
  320. #endif
  321. #ifndef atomic_xchg
  322. #define atomic_xchg(...) \
  323. __atomic_op_fence(atomic_xchg, __VA_ARGS__)
  324. #endif
  325. #endif /* atomic_xchg_relaxed */
  326. /* atomic_cmpxchg_relaxed */
  327. #ifndef atomic_cmpxchg_relaxed
  328. #define atomic_cmpxchg_relaxed atomic_cmpxchg
  329. #define atomic_cmpxchg_acquire atomic_cmpxchg
  330. #define atomic_cmpxchg_release atomic_cmpxchg
  331. #else /* atomic_cmpxchg_relaxed */
  332. #ifndef atomic_cmpxchg_acquire
  333. #define atomic_cmpxchg_acquire(...) \
  334. __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
  335. #endif
  336. #ifndef atomic_cmpxchg_release
  337. #define atomic_cmpxchg_release(...) \
  338. __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
  339. #endif
  340. #ifndef atomic_cmpxchg
  341. #define atomic_cmpxchg(...) \
  342. __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
  343. #endif
  344. #endif /* atomic_cmpxchg_relaxed */
  345. #ifndef atomic_try_cmpxchg
  346. #define __atomic_try_cmpxchg(type, _p, _po, _n) \
  347. ({ \
  348. typeof(_po) __po = (_po); \
  349. typeof(*(_po)) __r, __o = *__po; \
  350. __r = atomic_cmpxchg##type((_p), __o, (_n)); \
  351. if (unlikely(__r != __o)) \
  352. *__po = __r; \
  353. likely(__r == __o); \
  354. })
  355. #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
  356. #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
  357. #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
  358. #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
  359. #else /* atomic_try_cmpxchg */
  360. #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
  361. #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
  362. #define atomic_try_cmpxchg_release atomic_try_cmpxchg
  363. #endif /* atomic_try_cmpxchg */
  364. /* cmpxchg_relaxed */
  365. #ifndef cmpxchg_relaxed
  366. #define cmpxchg_relaxed cmpxchg
  367. #define cmpxchg_acquire cmpxchg
  368. #define cmpxchg_release cmpxchg
  369. #else /* cmpxchg_relaxed */
  370. #ifndef cmpxchg_acquire
  371. #define cmpxchg_acquire(...) \
  372. __atomic_op_acquire(cmpxchg, __VA_ARGS__)
  373. #endif
  374. #ifndef cmpxchg_release
  375. #define cmpxchg_release(...) \
  376. __atomic_op_release(cmpxchg, __VA_ARGS__)
  377. #endif
  378. #ifndef cmpxchg
  379. #define cmpxchg(...) \
  380. __atomic_op_fence(cmpxchg, __VA_ARGS__)
  381. #endif
  382. #endif /* cmpxchg_relaxed */
  383. /* cmpxchg64_relaxed */
  384. #ifndef cmpxchg64_relaxed
  385. #define cmpxchg64_relaxed cmpxchg64
  386. #define cmpxchg64_acquire cmpxchg64
  387. #define cmpxchg64_release cmpxchg64
  388. #else /* cmpxchg64_relaxed */
  389. #ifndef cmpxchg64_acquire
  390. #define cmpxchg64_acquire(...) \
  391. __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
  392. #endif
  393. #ifndef cmpxchg64_release
  394. #define cmpxchg64_release(...) \
  395. __atomic_op_release(cmpxchg64, __VA_ARGS__)
  396. #endif
  397. #ifndef cmpxchg64
  398. #define cmpxchg64(...) \
  399. __atomic_op_fence(cmpxchg64, __VA_ARGS__)
  400. #endif
  401. #endif /* cmpxchg64_relaxed */
  402. /* xchg_relaxed */
  403. #ifndef xchg_relaxed
  404. #define xchg_relaxed xchg
  405. #define xchg_acquire xchg
  406. #define xchg_release xchg
  407. #else /* xchg_relaxed */
  408. #ifndef xchg_acquire
  409. #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
  410. #endif
  411. #ifndef xchg_release
  412. #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
  413. #endif
  414. #ifndef xchg
  415. #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
  416. #endif
  417. #endif /* xchg_relaxed */
  418. /**
  419. * atomic_add_unless - add unless the number is already a given value
  420. * @v: pointer of type atomic_t
  421. * @a: the amount to add to v...
  422. * @u: ...unless v is equal to u.
  423. *
  424. * Atomically adds @a to @v, so long as @v was not already @u.
  425. * Returns non-zero if @v was not @u, and zero otherwise.
  426. */
  427. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  428. {
  429. return __atomic_add_unless(v, a, u) != u;
  430. }
  431. /**
  432. * atomic_inc_not_zero - increment unless the number is zero
  433. * @v: pointer of type atomic_t
  434. *
  435. * Atomically increments @v by 1, so long as @v is non-zero.
  436. * Returns non-zero if @v was non-zero, and zero otherwise.
  437. */
  438. #ifndef atomic_inc_not_zero
  439. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  440. #endif
  441. #ifndef atomic_andnot
  442. static inline void atomic_andnot(int i, atomic_t *v)
  443. {
  444. atomic_and(~i, v);
  445. }
  446. static inline int atomic_fetch_andnot(int i, atomic_t *v)
  447. {
  448. return atomic_fetch_and(~i, v);
  449. }
  450. static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
  451. {
  452. return atomic_fetch_and_relaxed(~i, v);
  453. }
  454. static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
  455. {
  456. return atomic_fetch_and_acquire(~i, v);
  457. }
  458. static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
  459. {
  460. return atomic_fetch_and_release(~i, v);
  461. }
  462. #endif
  463. /**
  464. * atomic_inc_not_zero_hint - increment if not null
  465. * @v: pointer of type atomic_t
  466. * @hint: probable value of the atomic before the increment
  467. *
  468. * This version of atomic_inc_not_zero() gives a hint of probable
  469. * value of the atomic. This helps processor to not read the memory
  470. * before doing the atomic read/modify/write cycle, lowering
  471. * number of bus transactions on some arches.
  472. *
  473. * Returns: 0 if increment was not done, 1 otherwise.
  474. */
  475. #ifndef atomic_inc_not_zero_hint
  476. static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
  477. {
  478. int val, c = hint;
  479. /* sanity test, should be removed by compiler if hint is a constant */
  480. if (!hint)
  481. return atomic_inc_not_zero(v);
  482. do {
  483. val = atomic_cmpxchg(v, c, c + 1);
  484. if (val == c)
  485. return 1;
  486. c = val;
  487. } while (c);
  488. return 0;
  489. }
  490. #endif
  491. #ifndef atomic_inc_unless_negative
  492. static inline int atomic_inc_unless_negative(atomic_t *p)
  493. {
  494. int v, v1;
  495. for (v = 0; v >= 0; v = v1) {
  496. v1 = atomic_cmpxchg(p, v, v + 1);
  497. if (likely(v1 == v))
  498. return 1;
  499. }
  500. return 0;
  501. }
  502. #endif
  503. #ifndef atomic_dec_unless_positive
  504. static inline int atomic_dec_unless_positive(atomic_t *p)
  505. {
  506. int v, v1;
  507. for (v = 0; v <= 0; v = v1) {
  508. v1 = atomic_cmpxchg(p, v, v - 1);
  509. if (likely(v1 == v))
  510. return 1;
  511. }
  512. return 0;
  513. }
  514. #endif
  515. /*
  516. * atomic_dec_if_positive - decrement by 1 if old value positive
  517. * @v: pointer of type atomic_t
  518. *
  519. * The function returns the old value of *v minus 1, even if
  520. * the atomic variable, v, was not decremented.
  521. */
  522. #ifndef atomic_dec_if_positive
  523. static inline int atomic_dec_if_positive(atomic_t *v)
  524. {
  525. int c, old, dec;
  526. c = atomic_read(v);
  527. for (;;) {
  528. dec = c - 1;
  529. if (unlikely(dec < 0))
  530. break;
  531. old = atomic_cmpxchg((v), c, dec);
  532. if (likely(old == c))
  533. break;
  534. c = old;
  535. }
  536. return dec;
  537. }
  538. #endif
  539. #ifdef CONFIG_GENERIC_ATOMIC64
  540. #include <asm-generic/atomic64.h>
  541. #endif
  542. #ifndef atomic64_read_acquire
  543. #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
  544. #endif
  545. #ifndef atomic64_set_release
  546. #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
  547. #endif
  548. /* atomic64_add_return_relaxed */
  549. #ifndef atomic64_add_return_relaxed
  550. #define atomic64_add_return_relaxed atomic64_add_return
  551. #define atomic64_add_return_acquire atomic64_add_return
  552. #define atomic64_add_return_release atomic64_add_return
  553. #else /* atomic64_add_return_relaxed */
  554. #ifndef atomic64_add_return_acquire
  555. #define atomic64_add_return_acquire(...) \
  556. __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
  557. #endif
  558. #ifndef atomic64_add_return_release
  559. #define atomic64_add_return_release(...) \
  560. __atomic_op_release(atomic64_add_return, __VA_ARGS__)
  561. #endif
  562. #ifndef atomic64_add_return
  563. #define atomic64_add_return(...) \
  564. __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
  565. #endif
  566. #endif /* atomic64_add_return_relaxed */
  567. /* atomic64_inc_return_relaxed */
  568. #ifndef atomic64_inc_return_relaxed
  569. #define atomic64_inc_return_relaxed atomic64_inc_return
  570. #define atomic64_inc_return_acquire atomic64_inc_return
  571. #define atomic64_inc_return_release atomic64_inc_return
  572. #else /* atomic64_inc_return_relaxed */
  573. #ifndef atomic64_inc_return_acquire
  574. #define atomic64_inc_return_acquire(...) \
  575. __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
  576. #endif
  577. #ifndef atomic64_inc_return_release
  578. #define atomic64_inc_return_release(...) \
  579. __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
  580. #endif
  581. #ifndef atomic64_inc_return
  582. #define atomic64_inc_return(...) \
  583. __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
  584. #endif
  585. #endif /* atomic64_inc_return_relaxed */
  586. /* atomic64_sub_return_relaxed */
  587. #ifndef atomic64_sub_return_relaxed
  588. #define atomic64_sub_return_relaxed atomic64_sub_return
  589. #define atomic64_sub_return_acquire atomic64_sub_return
  590. #define atomic64_sub_return_release atomic64_sub_return
  591. #else /* atomic64_sub_return_relaxed */
  592. #ifndef atomic64_sub_return_acquire
  593. #define atomic64_sub_return_acquire(...) \
  594. __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
  595. #endif
  596. #ifndef atomic64_sub_return_release
  597. #define atomic64_sub_return_release(...) \
  598. __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
  599. #endif
  600. #ifndef atomic64_sub_return
  601. #define atomic64_sub_return(...) \
  602. __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
  603. #endif
  604. #endif /* atomic64_sub_return_relaxed */
  605. /* atomic64_dec_return_relaxed */
  606. #ifndef atomic64_dec_return_relaxed
  607. #define atomic64_dec_return_relaxed atomic64_dec_return
  608. #define atomic64_dec_return_acquire atomic64_dec_return
  609. #define atomic64_dec_return_release atomic64_dec_return
  610. #else /* atomic64_dec_return_relaxed */
  611. #ifndef atomic64_dec_return_acquire
  612. #define atomic64_dec_return_acquire(...) \
  613. __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
  614. #endif
  615. #ifndef atomic64_dec_return_release
  616. #define atomic64_dec_return_release(...) \
  617. __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
  618. #endif
  619. #ifndef atomic64_dec_return
  620. #define atomic64_dec_return(...) \
  621. __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
  622. #endif
  623. #endif /* atomic64_dec_return_relaxed */
  624. /* atomic64_fetch_add_relaxed */
  625. #ifndef atomic64_fetch_add_relaxed
  626. #define atomic64_fetch_add_relaxed atomic64_fetch_add
  627. #define atomic64_fetch_add_acquire atomic64_fetch_add
  628. #define atomic64_fetch_add_release atomic64_fetch_add
  629. #else /* atomic64_fetch_add_relaxed */
  630. #ifndef atomic64_fetch_add_acquire
  631. #define atomic64_fetch_add_acquire(...) \
  632. __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
  633. #endif
  634. #ifndef atomic64_fetch_add_release
  635. #define atomic64_fetch_add_release(...) \
  636. __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
  637. #endif
  638. #ifndef atomic64_fetch_add
  639. #define atomic64_fetch_add(...) \
  640. __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
  641. #endif
  642. #endif /* atomic64_fetch_add_relaxed */
  643. /* atomic64_fetch_inc_relaxed */
  644. #ifndef atomic64_fetch_inc_relaxed
  645. #ifndef atomic64_fetch_inc
  646. #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
  647. #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
  648. #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
  649. #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
  650. #else /* atomic64_fetch_inc */
  651. #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
  652. #define atomic64_fetch_inc_acquire atomic64_fetch_inc
  653. #define atomic64_fetch_inc_release atomic64_fetch_inc
  654. #endif /* atomic64_fetch_inc */
  655. #else /* atomic64_fetch_inc_relaxed */
  656. #ifndef atomic64_fetch_inc_acquire
  657. #define atomic64_fetch_inc_acquire(...) \
  658. __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
  659. #endif
  660. #ifndef atomic64_fetch_inc_release
  661. #define atomic64_fetch_inc_release(...) \
  662. __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
  663. #endif
  664. #ifndef atomic64_fetch_inc
  665. #define atomic64_fetch_inc(...) \
  666. __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
  667. #endif
  668. #endif /* atomic64_fetch_inc_relaxed */
  669. /* atomic64_fetch_sub_relaxed */
  670. #ifndef atomic64_fetch_sub_relaxed
  671. #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
  672. #define atomic64_fetch_sub_acquire atomic64_fetch_sub
  673. #define atomic64_fetch_sub_release atomic64_fetch_sub
  674. #else /* atomic64_fetch_sub_relaxed */
  675. #ifndef atomic64_fetch_sub_acquire
  676. #define atomic64_fetch_sub_acquire(...) \
  677. __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
  678. #endif
  679. #ifndef atomic64_fetch_sub_release
  680. #define atomic64_fetch_sub_release(...) \
  681. __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
  682. #endif
  683. #ifndef atomic64_fetch_sub
  684. #define atomic64_fetch_sub(...) \
  685. __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
  686. #endif
  687. #endif /* atomic64_fetch_sub_relaxed */
  688. /* atomic64_fetch_dec_relaxed */
  689. #ifndef atomic64_fetch_dec_relaxed
  690. #ifndef atomic64_fetch_dec
  691. #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
  692. #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
  693. #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
  694. #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
  695. #else /* atomic64_fetch_dec */
  696. #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
  697. #define atomic64_fetch_dec_acquire atomic64_fetch_dec
  698. #define atomic64_fetch_dec_release atomic64_fetch_dec
  699. #endif /* atomic64_fetch_dec */
  700. #else /* atomic64_fetch_dec_relaxed */
  701. #ifndef atomic64_fetch_dec_acquire
  702. #define atomic64_fetch_dec_acquire(...) \
  703. __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
  704. #endif
  705. #ifndef atomic64_fetch_dec_release
  706. #define atomic64_fetch_dec_release(...) \
  707. __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
  708. #endif
  709. #ifndef atomic64_fetch_dec
  710. #define atomic64_fetch_dec(...) \
  711. __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
  712. #endif
  713. #endif /* atomic64_fetch_dec_relaxed */
  714. /* atomic64_fetch_or_relaxed */
  715. #ifndef atomic64_fetch_or_relaxed
  716. #define atomic64_fetch_or_relaxed atomic64_fetch_or
  717. #define atomic64_fetch_or_acquire atomic64_fetch_or
  718. #define atomic64_fetch_or_release atomic64_fetch_or
  719. #else /* atomic64_fetch_or_relaxed */
  720. #ifndef atomic64_fetch_or_acquire
  721. #define atomic64_fetch_or_acquire(...) \
  722. __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
  723. #endif
  724. #ifndef atomic64_fetch_or_release
  725. #define atomic64_fetch_or_release(...) \
  726. __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
  727. #endif
  728. #ifndef atomic64_fetch_or
  729. #define atomic64_fetch_or(...) \
  730. __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
  731. #endif
  732. #endif /* atomic64_fetch_or_relaxed */
  733. /* atomic64_fetch_and_relaxed */
  734. #ifndef atomic64_fetch_and_relaxed
  735. #define atomic64_fetch_and_relaxed atomic64_fetch_and
  736. #define atomic64_fetch_and_acquire atomic64_fetch_and
  737. #define atomic64_fetch_and_release atomic64_fetch_and
  738. #else /* atomic64_fetch_and_relaxed */
  739. #ifndef atomic64_fetch_and_acquire
  740. #define atomic64_fetch_and_acquire(...) \
  741. __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
  742. #endif
  743. #ifndef atomic64_fetch_and_release
  744. #define atomic64_fetch_and_release(...) \
  745. __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
  746. #endif
  747. #ifndef atomic64_fetch_and
  748. #define atomic64_fetch_and(...) \
  749. __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
  750. #endif
  751. #endif /* atomic64_fetch_and_relaxed */
  752. #ifdef atomic64_andnot
  753. /* atomic64_fetch_andnot_relaxed */
  754. #ifndef atomic64_fetch_andnot_relaxed
  755. #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
  756. #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
  757. #define atomic64_fetch_andnot_release atomic64_fetch_andnot
  758. #else /* atomic64_fetch_andnot_relaxed */
  759. #ifndef atomic64_fetch_andnot_acquire
  760. #define atomic64_fetch_andnot_acquire(...) \
  761. __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
  762. #endif
  763. #ifndef atomic64_fetch_andnot_release
  764. #define atomic64_fetch_andnot_release(...) \
  765. __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
  766. #endif
  767. #ifndef atomic64_fetch_andnot
  768. #define atomic64_fetch_andnot(...) \
  769. __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
  770. #endif
  771. #endif /* atomic64_fetch_andnot_relaxed */
  772. #endif /* atomic64_andnot */
  773. /* atomic64_fetch_xor_relaxed */
  774. #ifndef atomic64_fetch_xor_relaxed
  775. #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
  776. #define atomic64_fetch_xor_acquire atomic64_fetch_xor
  777. #define atomic64_fetch_xor_release atomic64_fetch_xor
  778. #else /* atomic64_fetch_xor_relaxed */
  779. #ifndef atomic64_fetch_xor_acquire
  780. #define atomic64_fetch_xor_acquire(...) \
  781. __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
  782. #endif
  783. #ifndef atomic64_fetch_xor_release
  784. #define atomic64_fetch_xor_release(...) \
  785. __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
  786. #endif
  787. #ifndef atomic64_fetch_xor
  788. #define atomic64_fetch_xor(...) \
  789. __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
  790. #endif
  791. #endif /* atomic64_fetch_xor_relaxed */
  792. /* atomic64_xchg_relaxed */
  793. #ifndef atomic64_xchg_relaxed
  794. #define atomic64_xchg_relaxed atomic64_xchg
  795. #define atomic64_xchg_acquire atomic64_xchg
  796. #define atomic64_xchg_release atomic64_xchg
  797. #else /* atomic64_xchg_relaxed */
  798. #ifndef atomic64_xchg_acquire
  799. #define atomic64_xchg_acquire(...) \
  800. __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
  801. #endif
  802. #ifndef atomic64_xchg_release
  803. #define atomic64_xchg_release(...) \
  804. __atomic_op_release(atomic64_xchg, __VA_ARGS__)
  805. #endif
  806. #ifndef atomic64_xchg
  807. #define atomic64_xchg(...) \
  808. __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
  809. #endif
  810. #endif /* atomic64_xchg_relaxed */
  811. /* atomic64_cmpxchg_relaxed */
  812. #ifndef atomic64_cmpxchg_relaxed
  813. #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
  814. #define atomic64_cmpxchg_acquire atomic64_cmpxchg
  815. #define atomic64_cmpxchg_release atomic64_cmpxchg
  816. #else /* atomic64_cmpxchg_relaxed */
  817. #ifndef atomic64_cmpxchg_acquire
  818. #define atomic64_cmpxchg_acquire(...) \
  819. __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
  820. #endif
  821. #ifndef atomic64_cmpxchg_release
  822. #define atomic64_cmpxchg_release(...) \
  823. __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
  824. #endif
  825. #ifndef atomic64_cmpxchg
  826. #define atomic64_cmpxchg(...) \
  827. __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
  828. #endif
  829. #endif /* atomic64_cmpxchg_relaxed */
  830. #ifndef atomic64_try_cmpxchg
  831. #define __atomic64_try_cmpxchg(type, _p, _po, _n) \
  832. ({ \
  833. typeof(_po) __po = (_po); \
  834. typeof(*(_po)) __r, __o = *__po; \
  835. __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
  836. if (unlikely(__r != __o)) \
  837. *__po = __r; \
  838. likely(__r == __o); \
  839. })
  840. #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
  841. #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
  842. #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
  843. #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
  844. #else /* atomic64_try_cmpxchg */
  845. #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
  846. #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
  847. #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
  848. #endif /* atomic64_try_cmpxchg */
  849. #ifndef atomic64_andnot
  850. static inline void atomic64_andnot(long long i, atomic64_t *v)
  851. {
  852. atomic64_and(~i, v);
  853. }
  854. static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
  855. {
  856. return atomic64_fetch_and(~i, v);
  857. }
  858. static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
  859. {
  860. return atomic64_fetch_and_relaxed(~i, v);
  861. }
  862. static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
  863. {
  864. return atomic64_fetch_and_acquire(~i, v);
  865. }
  866. static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
  867. {
  868. return atomic64_fetch_and_release(~i, v);
  869. }
  870. #endif
  871. #include <asm-generic/atomic-long.h>
  872. #endif /* _LINUX_ATOMIC_H */