atomic.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/irqflags.h>
  17. #include <linux/types.h>
  18. #include <asm/barrier.h>
  19. #include <asm/compiler.h>
  20. #include <asm/cpu-features.h>
  21. #include <asm/cmpxchg.h>
  22. #include <asm/war.h>
  23. #define ATOMIC_INIT(i) { (i) }
  24. /*
  25. * atomic_read - read atomic variable
  26. * @v: pointer of type atomic_t
  27. *
  28. * Atomically reads the value of @v.
  29. */
  30. #define atomic_read(v) READ_ONCE((v)->counter)
  31. /*
  32. * atomic_set - set atomic variable
  33. * @v: pointer of type atomic_t
  34. * @i: required value
  35. *
  36. * Atomically sets the value of @v to @i.
  37. */
  38. #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
  39. #define ATOMIC_OP(op, c_op, asm_op) \
  40. static __inline__ void atomic_##op(int i, atomic_t * v) \
  41. { \
  42. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  43. int temp; \
  44. \
  45. __asm__ __volatile__( \
  46. " .set arch=r4000 \n" \
  47. "1: ll %0, %1 # atomic_" #op " \n" \
  48. " " #asm_op " %0, %2 \n" \
  49. " sc %0, %1 \n" \
  50. " beqzl %0, 1b \n" \
  51. " .set mips0 \n" \
  52. : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
  53. : "Ir" (i)); \
  54. } else if (kernel_uses_llsc) { \
  55. int temp; \
  56. \
  57. do { \
  58. __asm__ __volatile__( \
  59. " .set "MIPS_ISA_LEVEL" \n" \
  60. " ll %0, %1 # atomic_" #op "\n" \
  61. " " #asm_op " %0, %2 \n" \
  62. " sc %0, %1 \n" \
  63. " .set mips0 \n" \
  64. : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
  65. : "Ir" (i)); \
  66. } while (unlikely(!temp)); \
  67. } else { \
  68. unsigned long flags; \
  69. \
  70. raw_local_irq_save(flags); \
  71. v->counter c_op i; \
  72. raw_local_irq_restore(flags); \
  73. } \
  74. }
  75. #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
  76. static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
  77. { \
  78. int result; \
  79. \
  80. smp_mb__before_llsc(); \
  81. \
  82. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  83. int temp; \
  84. \
  85. __asm__ __volatile__( \
  86. " .set arch=r4000 \n" \
  87. "1: ll %1, %2 # atomic_" #op "_return \n" \
  88. " " #asm_op " %0, %1, %3 \n" \
  89. " sc %0, %2 \n" \
  90. " beqzl %0, 1b \n" \
  91. " " #asm_op " %0, %1, %3 \n" \
  92. " .set mips0 \n" \
  93. : "=&r" (result), "=&r" (temp), \
  94. "+" GCC_OFF_SMALL_ASM() (v->counter) \
  95. : "Ir" (i)); \
  96. } else if (kernel_uses_llsc) { \
  97. int temp; \
  98. \
  99. do { \
  100. __asm__ __volatile__( \
  101. " .set "MIPS_ISA_LEVEL" \n" \
  102. " ll %1, %2 # atomic_" #op "_return \n" \
  103. " " #asm_op " %0, %1, %3 \n" \
  104. " sc %0, %2 \n" \
  105. " .set mips0 \n" \
  106. : "=&r" (result), "=&r" (temp), \
  107. "+" GCC_OFF_SMALL_ASM() (v->counter) \
  108. : "Ir" (i)); \
  109. } while (unlikely(!result)); \
  110. \
  111. result = temp; result c_op i; \
  112. } else { \
  113. unsigned long flags; \
  114. \
  115. raw_local_irq_save(flags); \
  116. result = v->counter; \
  117. result c_op i; \
  118. v->counter = result; \
  119. raw_local_irq_restore(flags); \
  120. } \
  121. \
  122. smp_llsc_mb(); \
  123. \
  124. return result; \
  125. }
  126. #define ATOMIC_OPS(op, c_op, asm_op) \
  127. ATOMIC_OP(op, c_op, asm_op) \
  128. ATOMIC_OP_RETURN(op, c_op, asm_op)
  129. ATOMIC_OPS(add, +=, addu)
  130. ATOMIC_OPS(sub, -=, subu)
  131. ATOMIC_OP(and, &=, and)
  132. ATOMIC_OP(or, |=, or)
  133. ATOMIC_OP(xor, ^=, xor)
  134. #undef ATOMIC_OPS
  135. #undef ATOMIC_OP_RETURN
  136. #undef ATOMIC_OP
  137. /*
  138. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  139. * @i: integer value to subtract
  140. * @v: pointer of type atomic_t
  141. *
  142. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  143. * The function returns the old value of @v minus @i.
  144. */
  145. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  146. {
  147. int result;
  148. smp_mb__before_llsc();
  149. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  150. int temp;
  151. __asm__ __volatile__(
  152. " .set arch=r4000 \n"
  153. "1: ll %1, %2 # atomic_sub_if_positive\n"
  154. " subu %0, %1, %3 \n"
  155. " bltz %0, 1f \n"
  156. " sc %0, %2 \n"
  157. " .set noreorder \n"
  158. " beqzl %0, 1b \n"
  159. " subu %0, %1, %3 \n"
  160. " .set reorder \n"
  161. "1: \n"
  162. " .set mips0 \n"
  163. : "=&r" (result), "=&r" (temp),
  164. "+" GCC_OFF_SMALL_ASM() (v->counter)
  165. : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
  166. : "memory");
  167. } else if (kernel_uses_llsc) {
  168. int temp;
  169. __asm__ __volatile__(
  170. " .set "MIPS_ISA_LEVEL" \n"
  171. "1: ll %1, %2 # atomic_sub_if_positive\n"
  172. " subu %0, %1, %3 \n"
  173. " bltz %0, 1f \n"
  174. " sc %0, %2 \n"
  175. " .set noreorder \n"
  176. " beqz %0, 1b \n"
  177. " subu %0, %1, %3 \n"
  178. " .set reorder \n"
  179. "1: \n"
  180. " .set mips0 \n"
  181. : "=&r" (result), "=&r" (temp),
  182. "+" GCC_OFF_SMALL_ASM() (v->counter)
  183. : "Ir" (i));
  184. } else {
  185. unsigned long flags;
  186. raw_local_irq_save(flags);
  187. result = v->counter;
  188. result -= i;
  189. if (result >= 0)
  190. v->counter = result;
  191. raw_local_irq_restore(flags);
  192. }
  193. smp_llsc_mb();
  194. return result;
  195. }
  196. #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  197. #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
  198. /**
  199. * __atomic_add_unless - add unless the number is a given value
  200. * @v: pointer of type atomic_t
  201. * @a: the amount to add to v...
  202. * @u: ...unless v is equal to u.
  203. *
  204. * Atomically adds @a to @v, so long as it was not @u.
  205. * Returns the old value of @v.
  206. */
  207. static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  208. {
  209. int c, old;
  210. c = atomic_read(v);
  211. for (;;) {
  212. if (unlikely(c == (u)))
  213. break;
  214. old = atomic_cmpxchg((v), c, c + (a));
  215. if (likely(old == c))
  216. break;
  217. c = old;
  218. }
  219. return c;
  220. }
  221. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  222. #define atomic_inc_return(v) atomic_add_return(1, (v))
  223. /*
  224. * atomic_sub_and_test - subtract value from variable and test result
  225. * @i: integer value to subtract
  226. * @v: pointer of type atomic_t
  227. *
  228. * Atomically subtracts @i from @v and returns
  229. * true if the result is zero, or false for all
  230. * other cases.
  231. */
  232. #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
  233. /*
  234. * atomic_inc_and_test - increment and test
  235. * @v: pointer of type atomic_t
  236. *
  237. * Atomically increments @v by 1
  238. * and returns true if the result is zero, or false for all
  239. * other cases.
  240. */
  241. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  242. /*
  243. * atomic_dec_and_test - decrement by 1 and test
  244. * @v: pointer of type atomic_t
  245. *
  246. * Atomically decrements @v by 1 and
  247. * returns true if the result is 0, or false for all other
  248. * cases.
  249. */
  250. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  251. /*
  252. * atomic_dec_if_positive - decrement by 1 if old value positive
  253. * @v: pointer of type atomic_t
  254. */
  255. #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
  256. /*
  257. * atomic_inc - increment atomic variable
  258. * @v: pointer of type atomic_t
  259. *
  260. * Atomically increments @v by 1.
  261. */
  262. #define atomic_inc(v) atomic_add(1, (v))
  263. /*
  264. * atomic_dec - decrement and test
  265. * @v: pointer of type atomic_t
  266. *
  267. * Atomically decrements @v by 1.
  268. */
  269. #define atomic_dec(v) atomic_sub(1, (v))
  270. /*
  271. * atomic_add_negative - add and test if negative
  272. * @v: pointer of type atomic_t
  273. * @i: integer value to add
  274. *
  275. * Atomically adds @i to @v and returns true
  276. * if the result is negative, or false when
  277. * result is greater than or equal to zero.
  278. */
  279. #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
  280. #ifdef CONFIG_64BIT
  281. #define ATOMIC64_INIT(i) { (i) }
  282. /*
  283. * atomic64_read - read atomic variable
  284. * @v: pointer of type atomic64_t
  285. *
  286. */
  287. #define atomic64_read(v) READ_ONCE((v)->counter)
  288. /*
  289. * atomic64_set - set atomic variable
  290. * @v: pointer of type atomic64_t
  291. * @i: required value
  292. */
  293. #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
  294. #define ATOMIC64_OP(op, c_op, asm_op) \
  295. static __inline__ void atomic64_##op(long i, atomic64_t * v) \
  296. { \
  297. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  298. long temp; \
  299. \
  300. __asm__ __volatile__( \
  301. " .set arch=r4000 \n" \
  302. "1: lld %0, %1 # atomic64_" #op " \n" \
  303. " " #asm_op " %0, %2 \n" \
  304. " scd %0, %1 \n" \
  305. " beqzl %0, 1b \n" \
  306. " .set mips0 \n" \
  307. : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
  308. : "Ir" (i)); \
  309. } else if (kernel_uses_llsc) { \
  310. long temp; \
  311. \
  312. do { \
  313. __asm__ __volatile__( \
  314. " .set "MIPS_ISA_LEVEL" \n" \
  315. " lld %0, %1 # atomic64_" #op "\n" \
  316. " " #asm_op " %0, %2 \n" \
  317. " scd %0, %1 \n" \
  318. " .set mips0 \n" \
  319. : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
  320. : "Ir" (i)); \
  321. } while (unlikely(!temp)); \
  322. } else { \
  323. unsigned long flags; \
  324. \
  325. raw_local_irq_save(flags); \
  326. v->counter c_op i; \
  327. raw_local_irq_restore(flags); \
  328. } \
  329. }
  330. #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
  331. static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
  332. { \
  333. long result; \
  334. \
  335. smp_mb__before_llsc(); \
  336. \
  337. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  338. long temp; \
  339. \
  340. __asm__ __volatile__( \
  341. " .set arch=r4000 \n" \
  342. "1: lld %1, %2 # atomic64_" #op "_return\n" \
  343. " " #asm_op " %0, %1, %3 \n" \
  344. " scd %0, %2 \n" \
  345. " beqzl %0, 1b \n" \
  346. " " #asm_op " %0, %1, %3 \n" \
  347. " .set mips0 \n" \
  348. : "=&r" (result), "=&r" (temp), \
  349. "+" GCC_OFF_SMALL_ASM() (v->counter) \
  350. : "Ir" (i)); \
  351. } else if (kernel_uses_llsc) { \
  352. long temp; \
  353. \
  354. do { \
  355. __asm__ __volatile__( \
  356. " .set "MIPS_ISA_LEVEL" \n" \
  357. " lld %1, %2 # atomic64_" #op "_return\n" \
  358. " " #asm_op " %0, %1, %3 \n" \
  359. " scd %0, %2 \n" \
  360. " .set mips0 \n" \
  361. : "=&r" (result), "=&r" (temp), \
  362. "=" GCC_OFF_SMALL_ASM() (v->counter) \
  363. : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
  364. : "memory"); \
  365. } while (unlikely(!result)); \
  366. \
  367. result = temp; result c_op i; \
  368. } else { \
  369. unsigned long flags; \
  370. \
  371. raw_local_irq_save(flags); \
  372. result = v->counter; \
  373. result c_op i; \
  374. v->counter = result; \
  375. raw_local_irq_restore(flags); \
  376. } \
  377. \
  378. smp_llsc_mb(); \
  379. \
  380. return result; \
  381. }
  382. #define ATOMIC64_OPS(op, c_op, asm_op) \
  383. ATOMIC64_OP(op, c_op, asm_op) \
  384. ATOMIC64_OP_RETURN(op, c_op, asm_op)
  385. ATOMIC64_OPS(add, +=, daddu)
  386. ATOMIC64_OPS(sub, -=, dsubu)
  387. ATOMIC64_OP(and, &=, and)
  388. ATOMIC64_OP(or, |=, or)
  389. ATOMIC64_OP(xor, ^=, xor)
  390. #undef ATOMIC64_OPS
  391. #undef ATOMIC64_OP_RETURN
  392. #undef ATOMIC64_OP
  393. /*
  394. * atomic64_sub_if_positive - conditionally subtract integer from atomic
  395. * variable
  396. * @i: integer value to subtract
  397. * @v: pointer of type atomic64_t
  398. *
  399. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  400. * The function returns the old value of @v minus @i.
  401. */
  402. static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
  403. {
  404. long result;
  405. smp_mb__before_llsc();
  406. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  407. long temp;
  408. __asm__ __volatile__(
  409. " .set arch=r4000 \n"
  410. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  411. " dsubu %0, %1, %3 \n"
  412. " bltz %0, 1f \n"
  413. " scd %0, %2 \n"
  414. " .set noreorder \n"
  415. " beqzl %0, 1b \n"
  416. " dsubu %0, %1, %3 \n"
  417. " .set reorder \n"
  418. "1: \n"
  419. " .set mips0 \n"
  420. : "=&r" (result), "=&r" (temp),
  421. "=" GCC_OFF_SMALL_ASM() (v->counter)
  422. : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
  423. : "memory");
  424. } else if (kernel_uses_llsc) {
  425. long temp;
  426. __asm__ __volatile__(
  427. " .set "MIPS_ISA_LEVEL" \n"
  428. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  429. " dsubu %0, %1, %3 \n"
  430. " bltz %0, 1f \n"
  431. " scd %0, %2 \n"
  432. " .set noreorder \n"
  433. " beqz %0, 1b \n"
  434. " dsubu %0, %1, %3 \n"
  435. " .set reorder \n"
  436. "1: \n"
  437. " .set mips0 \n"
  438. : "=&r" (result), "=&r" (temp),
  439. "+" GCC_OFF_SMALL_ASM() (v->counter)
  440. : "Ir" (i));
  441. } else {
  442. unsigned long flags;
  443. raw_local_irq_save(flags);
  444. result = v->counter;
  445. result -= i;
  446. if (result >= 0)
  447. v->counter = result;
  448. raw_local_irq_restore(flags);
  449. }
  450. smp_llsc_mb();
  451. return result;
  452. }
  453. #define atomic64_cmpxchg(v, o, n) \
  454. ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
  455. #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
  456. /**
  457. * atomic64_add_unless - add unless the number is a given value
  458. * @v: pointer of type atomic64_t
  459. * @a: the amount to add to v...
  460. * @u: ...unless v is equal to u.
  461. *
  462. * Atomically adds @a to @v, so long as it was not @u.
  463. * Returns true iff @v was not @u.
  464. */
  465. static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  466. {
  467. long c, old;
  468. c = atomic64_read(v);
  469. for (;;) {
  470. if (unlikely(c == (u)))
  471. break;
  472. old = atomic64_cmpxchg((v), c, c + (a));
  473. if (likely(old == c))
  474. break;
  475. c = old;
  476. }
  477. return c != (u);
  478. }
  479. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  480. #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
  481. #define atomic64_inc_return(v) atomic64_add_return(1, (v))
  482. /*
  483. * atomic64_sub_and_test - subtract value from variable and test result
  484. * @i: integer value to subtract
  485. * @v: pointer of type atomic64_t
  486. *
  487. * Atomically subtracts @i from @v and returns
  488. * true if the result is zero, or false for all
  489. * other cases.
  490. */
  491. #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
  492. /*
  493. * atomic64_inc_and_test - increment and test
  494. * @v: pointer of type atomic64_t
  495. *
  496. * Atomically increments @v by 1
  497. * and returns true if the result is zero, or false for all
  498. * other cases.
  499. */
  500. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  501. /*
  502. * atomic64_dec_and_test - decrement by 1 and test
  503. * @v: pointer of type atomic64_t
  504. *
  505. * Atomically decrements @v by 1 and
  506. * returns true if the result is 0, or false for all other
  507. * cases.
  508. */
  509. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  510. /*
  511. * atomic64_dec_if_positive - decrement by 1 if old value positive
  512. * @v: pointer of type atomic64_t
  513. */
  514. #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
  515. /*
  516. * atomic64_inc - increment atomic variable
  517. * @v: pointer of type atomic64_t
  518. *
  519. * Atomically increments @v by 1.
  520. */
  521. #define atomic64_inc(v) atomic64_add(1, (v))
  522. /*
  523. * atomic64_dec - decrement and test
  524. * @v: pointer of type atomic64_t
  525. *
  526. * Atomically decrements @v by 1.
  527. */
  528. #define atomic64_dec(v) atomic64_sub(1, (v))
  529. /*
  530. * atomic64_add_negative - add and test if negative
  531. * @v: pointer of type atomic64_t
  532. * @i: integer value to add
  533. *
  534. * Atomically adds @i to @v and returns true
  535. * if the result is negative, or false when
  536. * result is greater than or equal to zero.
  537. */
  538. #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
  539. #endif /* CONFIG_64BIT */
  540. #endif /* _ASM_ATOMIC_H */