atomic.h 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379
  1. #ifndef _ASM_POWERPC_ATOMIC_H_
  2. #define _ASM_POWERPC_ATOMIC_H_
  3. /*
  4. * PowerPC atomic operations
  5. */
  6. typedef struct { volatile int counter; } atomic_t;
  7. #ifdef __KERNEL__
  8. #include <asm/synch.h>
  9. #include <asm/asm-compat.h>
  10. #define ATOMIC_INIT(i) { (i) }
  11. #define atomic_read(v) ((v)->counter)
  12. #define atomic_set(v,i) (((v)->counter) = (i))
  13. static __inline__ void atomic_add(int a, atomic_t *v)
  14. {
  15. int t;
  16. __asm__ __volatile__(
  17. "1: lwarx %0,0,%3 # atomic_add\n\
  18. add %0,%2,%0\n"
  19. PPC405_ERR77(0,%3)
  20. " stwcx. %0,0,%3 \n\
  21. bne- 1b"
  22. : "=&r" (t), "=m" (v->counter)
  23. : "r" (a), "r" (&v->counter), "m" (v->counter)
  24. : "cc");
  25. }
  26. static __inline__ int atomic_add_return(int a, atomic_t *v)
  27. {
  28. int t;
  29. __asm__ __volatile__(
  30. EIEIO_ON_SMP
  31. "1: lwarx %0,0,%2 # atomic_add_return\n\
  32. add %0,%1,%0\n"
  33. PPC405_ERR77(0,%2)
  34. " stwcx. %0,0,%2 \n\
  35. bne- 1b"
  36. ISYNC_ON_SMP
  37. : "=&r" (t)
  38. : "r" (a), "r" (&v->counter)
  39. : "cc", "memory");
  40. return t;
  41. }
  42. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  43. static __inline__ void atomic_sub(int a, atomic_t *v)
  44. {
  45. int t;
  46. __asm__ __volatile__(
  47. "1: lwarx %0,0,%3 # atomic_sub\n\
  48. subf %0,%2,%0\n"
  49. PPC405_ERR77(0,%3)
  50. " stwcx. %0,0,%3 \n\
  51. bne- 1b"
  52. : "=&r" (t), "=m" (v->counter)
  53. : "r" (a), "r" (&v->counter), "m" (v->counter)
  54. : "cc");
  55. }
  56. static __inline__ int atomic_sub_return(int a, atomic_t *v)
  57. {
  58. int t;
  59. __asm__ __volatile__(
  60. EIEIO_ON_SMP
  61. "1: lwarx %0,0,%2 # atomic_sub_return\n\
  62. subf %0,%1,%0\n"
  63. PPC405_ERR77(0,%2)
  64. " stwcx. %0,0,%2 \n\
  65. bne- 1b"
  66. ISYNC_ON_SMP
  67. : "=&r" (t)
  68. : "r" (a), "r" (&v->counter)
  69. : "cc", "memory");
  70. return t;
  71. }
  72. static __inline__ void atomic_inc(atomic_t *v)
  73. {
  74. int t;
  75. __asm__ __volatile__(
  76. "1: lwarx %0,0,%2 # atomic_inc\n\
  77. addic %0,%0,1\n"
  78. PPC405_ERR77(0,%2)
  79. " stwcx. %0,0,%2 \n\
  80. bne- 1b"
  81. : "=&r" (t), "=m" (v->counter)
  82. : "r" (&v->counter), "m" (v->counter)
  83. : "cc");
  84. }
  85. static __inline__ int atomic_inc_return(atomic_t *v)
  86. {
  87. int t;
  88. __asm__ __volatile__(
  89. EIEIO_ON_SMP
  90. "1: lwarx %0,0,%1 # atomic_inc_return\n\
  91. addic %0,%0,1\n"
  92. PPC405_ERR77(0,%1)
  93. " stwcx. %0,0,%1 \n\
  94. bne- 1b"
  95. ISYNC_ON_SMP
  96. : "=&r" (t)
  97. : "r" (&v->counter)
  98. : "cc", "memory");
  99. return t;
  100. }
  101. /*
  102. * atomic_inc_and_test - increment and test
  103. * @v: pointer of type atomic_t
  104. *
  105. * Atomically increments @v by 1
  106. * and returns true if the result is zero, or false for all
  107. * other cases.
  108. */
  109. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  110. static __inline__ void atomic_dec(atomic_t *v)
  111. {
  112. int t;
  113. __asm__ __volatile__(
  114. "1: lwarx %0,0,%2 # atomic_dec\n\
  115. addic %0,%0,-1\n"
  116. PPC405_ERR77(0,%2)\
  117. " stwcx. %0,0,%2\n\
  118. bne- 1b"
  119. : "=&r" (t), "=m" (v->counter)
  120. : "r" (&v->counter), "m" (v->counter)
  121. : "cc");
  122. }
  123. static __inline__ int atomic_dec_return(atomic_t *v)
  124. {
  125. int t;
  126. __asm__ __volatile__(
  127. EIEIO_ON_SMP
  128. "1: lwarx %0,0,%1 # atomic_dec_return\n\
  129. addic %0,%0,-1\n"
  130. PPC405_ERR77(0,%1)
  131. " stwcx. %0,0,%1\n\
  132. bne- 1b"
  133. ISYNC_ON_SMP
  134. : "=&r" (t)
  135. : "r" (&v->counter)
  136. : "cc", "memory");
  137. return t;
  138. }
  139. #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
  140. #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
  141. /*
  142. * Atomically test *v and decrement if it is greater than 0.
  143. * The function returns the old value of *v minus 1.
  144. */
  145. static __inline__ int atomic_dec_if_positive(atomic_t *v)
  146. {
  147. int t;
  148. __asm__ __volatile__(
  149. EIEIO_ON_SMP
  150. "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
  151. addic. %0,%0,-1\n\
  152. blt- 2f\n"
  153. PPC405_ERR77(0,%1)
  154. " stwcx. %0,0,%1\n\
  155. bne- 1b"
  156. ISYNC_ON_SMP
  157. "\n\
  158. 2:" : "=&r" (t)
  159. : "r" (&v->counter)
  160. : "cc", "memory");
  161. return t;
  162. }
  163. #define smp_mb__before_atomic_dec() smp_mb()
  164. #define smp_mb__after_atomic_dec() smp_mb()
  165. #define smp_mb__before_atomic_inc() smp_mb()
  166. #define smp_mb__after_atomic_inc() smp_mb()
  167. #ifdef __powerpc64__
  168. typedef struct { volatile long counter; } atomic64_t;
  169. #define ATOMIC64_INIT(i) { (i) }
  170. #define atomic64_read(v) ((v)->counter)
  171. #define atomic64_set(v,i) (((v)->counter) = (i))
  172. static __inline__ void atomic64_add(long a, atomic64_t *v)
  173. {
  174. long t;
  175. __asm__ __volatile__(
  176. "1: ldarx %0,0,%3 # atomic64_add\n\
  177. add %0,%2,%0\n\
  178. stdcx. %0,0,%3 \n\
  179. bne- 1b"
  180. : "=&r" (t), "=m" (v->counter)
  181. : "r" (a), "r" (&v->counter), "m" (v->counter)
  182. : "cc");
  183. }
  184. static __inline__ long atomic64_add_return(long a, atomic64_t *v)
  185. {
  186. long t;
  187. __asm__ __volatile__(
  188. EIEIO_ON_SMP
  189. "1: ldarx %0,0,%2 # atomic64_add_return\n\
  190. add %0,%1,%0\n\
  191. stdcx. %0,0,%2 \n\
  192. bne- 1b"
  193. ISYNC_ON_SMP
  194. : "=&r" (t)
  195. : "r" (a), "r" (&v->counter)
  196. : "cc", "memory");
  197. return t;
  198. }
  199. #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
  200. static __inline__ void atomic64_sub(long a, atomic64_t *v)
  201. {
  202. long t;
  203. __asm__ __volatile__(
  204. "1: ldarx %0,0,%3 # atomic64_sub\n\
  205. subf %0,%2,%0\n\
  206. stdcx. %0,0,%3 \n\
  207. bne- 1b"
  208. : "=&r" (t), "=m" (v->counter)
  209. : "r" (a), "r" (&v->counter), "m" (v->counter)
  210. : "cc");
  211. }
  212. static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
  213. {
  214. long t;
  215. __asm__ __volatile__(
  216. EIEIO_ON_SMP
  217. "1: ldarx %0,0,%2 # atomic64_sub_return\n\
  218. subf %0,%1,%0\n\
  219. stdcx. %0,0,%2 \n\
  220. bne- 1b"
  221. ISYNC_ON_SMP
  222. : "=&r" (t)
  223. : "r" (a), "r" (&v->counter)
  224. : "cc", "memory");
  225. return t;
  226. }
  227. static __inline__ void atomic64_inc(atomic64_t *v)
  228. {
  229. long t;
  230. __asm__ __volatile__(
  231. "1: ldarx %0,0,%2 # atomic64_inc\n\
  232. addic %0,%0,1\n\
  233. stdcx. %0,0,%2 \n\
  234. bne- 1b"
  235. : "=&r" (t), "=m" (v->counter)
  236. : "r" (&v->counter), "m" (v->counter)
  237. : "cc");
  238. }
  239. static __inline__ long atomic64_inc_return(atomic64_t *v)
  240. {
  241. long t;
  242. __asm__ __volatile__(
  243. EIEIO_ON_SMP
  244. "1: ldarx %0,0,%1 # atomic64_inc_return\n\
  245. addic %0,%0,1\n\
  246. stdcx. %0,0,%1 \n\
  247. bne- 1b"
  248. ISYNC_ON_SMP
  249. : "=&r" (t)
  250. : "r" (&v->counter)
  251. : "cc", "memory");
  252. return t;
  253. }
  254. /*
  255. * atomic64_inc_and_test - increment and test
  256. * @v: pointer of type atomic64_t
  257. *
  258. * Atomically increments @v by 1
  259. * and returns true if the result is zero, or false for all
  260. * other cases.
  261. */
  262. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  263. static __inline__ void atomic64_dec(atomic64_t *v)
  264. {
  265. long t;
  266. __asm__ __volatile__(
  267. "1: ldarx %0,0,%2 # atomic64_dec\n\
  268. addic %0,%0,-1\n\
  269. stdcx. %0,0,%2\n\
  270. bne- 1b"
  271. : "=&r" (t), "=m" (v->counter)
  272. : "r" (&v->counter), "m" (v->counter)
  273. : "cc");
  274. }
  275. static __inline__ long atomic64_dec_return(atomic64_t *v)
  276. {
  277. long t;
  278. __asm__ __volatile__(
  279. EIEIO_ON_SMP
  280. "1: ldarx %0,0,%1 # atomic64_dec_return\n\
  281. addic %0,%0,-1\n\
  282. stdcx. %0,0,%1\n\
  283. bne- 1b"
  284. ISYNC_ON_SMP
  285. : "=&r" (t)
  286. : "r" (&v->counter)
  287. : "cc", "memory");
  288. return t;
  289. }
  290. #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
  291. #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
  292. /*
  293. * Atomically test *v and decrement if it is greater than 0.
  294. * The function returns the old value of *v minus 1.
  295. */
  296. static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
  297. {
  298. long t;
  299. __asm__ __volatile__(
  300. EIEIO_ON_SMP
  301. "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
  302. addic. %0,%0,-1\n\
  303. blt- 2f\n\
  304. stdcx. %0,0,%1\n\
  305. bne- 1b"
  306. ISYNC_ON_SMP
  307. "\n\
  308. 2:" : "=&r" (t)
  309. : "r" (&v->counter)
  310. : "cc", "memory");
  311. return t;
  312. }
  313. #endif /* __powerpc64__ */
  314. #endif /* __KERNEL__ */
  315. #endif /* _ASM_POWERPC_ATOMIC_H_ */