cmpxchg.h 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279
  1. /*
  2. * Based on arch/arm/include/asm/cmpxchg.h
  3. *
  4. * Copyright (C) 2012 ARM Ltd.
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  17. */
  18. #ifndef __ASM_CMPXCHG_H
  19. #define __ASM_CMPXCHG_H
  20. #include <linux/bug.h>
  21. #include <linux/mmdebug.h>
  22. #include <asm/barrier.h>
  23. static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
  24. {
  25. unsigned long ret, tmp;
  26. switch (size) {
  27. case 1:
  28. asm volatile("// __xchg1\n"
  29. "1: ldxrb %w0, %2\n"
  30. " stlxrb %w1, %w3, %2\n"
  31. " cbnz %w1, 1b\n"
  32. : "=&r" (ret), "=&r" (tmp), "+Q" (*(u8 *)ptr)
  33. : "r" (x)
  34. : "memory");
  35. break;
  36. case 2:
  37. asm volatile("// __xchg2\n"
  38. "1: ldxrh %w0, %2\n"
  39. " stlxrh %w1, %w3, %2\n"
  40. " cbnz %w1, 1b\n"
  41. : "=&r" (ret), "=&r" (tmp), "+Q" (*(u16 *)ptr)
  42. : "r" (x)
  43. : "memory");
  44. break;
  45. case 4:
  46. asm volatile("// __xchg4\n"
  47. "1: ldxr %w0, %2\n"
  48. " stlxr %w1, %w3, %2\n"
  49. " cbnz %w1, 1b\n"
  50. : "=&r" (ret), "=&r" (tmp), "+Q" (*(u32 *)ptr)
  51. : "r" (x)
  52. : "memory");
  53. break;
  54. case 8:
  55. asm volatile("// __xchg8\n"
  56. "1: ldxr %0, %2\n"
  57. " stlxr %w1, %3, %2\n"
  58. " cbnz %w1, 1b\n"
  59. : "=&r" (ret), "=&r" (tmp), "+Q" (*(u64 *)ptr)
  60. : "r" (x)
  61. : "memory");
  62. break;
  63. default:
  64. BUILD_BUG();
  65. }
  66. smp_mb();
  67. return ret;
  68. }
  69. #define xchg(ptr,x) \
  70. ({ \
  71. __typeof__(*(ptr)) __ret; \
  72. __ret = (__typeof__(*(ptr))) \
  73. __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))); \
  74. __ret; \
  75. })
  76. static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
  77. unsigned long new, int size)
  78. {
  79. unsigned long oldval = 0, res;
  80. switch (size) {
  81. case 1:
  82. do {
  83. asm volatile("// __cmpxchg1\n"
  84. " ldxrb %w1, %2\n"
  85. " mov %w0, #0\n"
  86. " cmp %w1, %w3\n"
  87. " b.ne 1f\n"
  88. " stxrb %w0, %w4, %2\n"
  89. "1:\n"
  90. : "=&r" (res), "=&r" (oldval), "+Q" (*(u8 *)ptr)
  91. : "Ir" (old), "r" (new)
  92. : "cc");
  93. } while (res);
  94. break;
  95. case 2:
  96. do {
  97. asm volatile("// __cmpxchg2\n"
  98. " ldxrh %w1, %2\n"
  99. " mov %w0, #0\n"
  100. " cmp %w1, %w3\n"
  101. " b.ne 1f\n"
  102. " stxrh %w0, %w4, %2\n"
  103. "1:\n"
  104. : "=&r" (res), "=&r" (oldval), "+Q" (*(u16 *)ptr)
  105. : "Ir" (old), "r" (new)
  106. : "cc");
  107. } while (res);
  108. break;
  109. case 4:
  110. do {
  111. asm volatile("// __cmpxchg4\n"
  112. " ldxr %w1, %2\n"
  113. " mov %w0, #0\n"
  114. " cmp %w1, %w3\n"
  115. " b.ne 1f\n"
  116. " stxr %w0, %w4, %2\n"
  117. "1:\n"
  118. : "=&r" (res), "=&r" (oldval), "+Q" (*(u32 *)ptr)
  119. : "Ir" (old), "r" (new)
  120. : "cc");
  121. } while (res);
  122. break;
  123. case 8:
  124. do {
  125. asm volatile("// __cmpxchg8\n"
  126. " ldxr %1, %2\n"
  127. " mov %w0, #0\n"
  128. " cmp %1, %3\n"
  129. " b.ne 1f\n"
  130. " stxr %w0, %4, %2\n"
  131. "1:\n"
  132. : "=&r" (res), "=&r" (oldval), "+Q" (*(u64 *)ptr)
  133. : "Ir" (old), "r" (new)
  134. : "cc");
  135. } while (res);
  136. break;
  137. default:
  138. BUILD_BUG();
  139. }
  140. return oldval;
  141. }
  142. #define system_has_cmpxchg_double() 1
  143. static inline int __cmpxchg_double(volatile void *ptr1, volatile void *ptr2,
  144. unsigned long old1, unsigned long old2,
  145. unsigned long new1, unsigned long new2, int size)
  146. {
  147. unsigned long loop, lost;
  148. switch (size) {
  149. case 8:
  150. VM_BUG_ON((unsigned long *)ptr2 - (unsigned long *)ptr1 != 1);
  151. do {
  152. asm volatile("// __cmpxchg_double8\n"
  153. " ldxp %0, %1, %2\n"
  154. " eor %0, %0, %3\n"
  155. " eor %1, %1, %4\n"
  156. " orr %1, %0, %1\n"
  157. " mov %w0, #0\n"
  158. " cbnz %1, 1f\n"
  159. " stxp %w0, %5, %6, %2\n"
  160. "1:\n"
  161. : "=&r"(loop), "=&r"(lost), "+Q" (*(u64 *)ptr1)
  162. : "r" (old1), "r"(old2), "r"(new1), "r"(new2));
  163. } while (loop);
  164. break;
  165. default:
  166. BUILD_BUG();
  167. }
  168. return !lost;
  169. }
  170. static inline int __cmpxchg_double_mb(volatile void *ptr1, volatile void *ptr2,
  171. unsigned long old1, unsigned long old2,
  172. unsigned long new1, unsigned long new2, int size)
  173. {
  174. int ret;
  175. smp_mb();
  176. ret = __cmpxchg_double(ptr1, ptr2, old1, old2, new1, new2, size);
  177. smp_mb();
  178. return ret;
  179. }
  180. static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old,
  181. unsigned long new, int size)
  182. {
  183. unsigned long ret;
  184. smp_mb();
  185. ret = __cmpxchg(ptr, old, new, size);
  186. smp_mb();
  187. return ret;
  188. }
  189. #define cmpxchg(ptr, o, n) \
  190. ({ \
  191. __typeof__(*(ptr)) __ret; \
  192. __ret = (__typeof__(*(ptr))) \
  193. __cmpxchg_mb((ptr), (unsigned long)(o), (unsigned long)(n), \
  194. sizeof(*(ptr))); \
  195. __ret; \
  196. })
  197. #define cmpxchg_local(ptr, o, n) \
  198. ({ \
  199. __typeof__(*(ptr)) __ret; \
  200. __ret = (__typeof__(*(ptr))) \
  201. __cmpxchg((ptr), (unsigned long)(o), \
  202. (unsigned long)(n), sizeof(*(ptr))); \
  203. __ret; \
  204. })
  205. #define cmpxchg_double(ptr1, ptr2, o1, o2, n1, n2) \
  206. ({\
  207. int __ret;\
  208. __ret = __cmpxchg_double_mb((ptr1), (ptr2), (unsigned long)(o1), \
  209. (unsigned long)(o2), (unsigned long)(n1), \
  210. (unsigned long)(n2), sizeof(*(ptr1)));\
  211. __ret; \
  212. })
  213. #define cmpxchg_double_local(ptr1, ptr2, o1, o2, n1, n2) \
  214. ({\
  215. int __ret;\
  216. __ret = __cmpxchg_double((ptr1), (ptr2), (unsigned long)(o1), \
  217. (unsigned long)(o2), (unsigned long)(n1), \
  218. (unsigned long)(n2), sizeof(*(ptr1)));\
  219. __ret; \
  220. })
  221. #define _protect_cmpxchg_local(pcp, o, n) \
  222. ({ \
  223. typeof(*raw_cpu_ptr(&(pcp))) __ret; \
  224. preempt_disable(); \
  225. __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \
  226. preempt_enable(); \
  227. __ret; \
  228. })
  229. #define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
  230. #define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
  231. #define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
  232. #define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
  233. #define this_cpu_cmpxchg_double_8(ptr1, ptr2, o1, o2, n1, n2) \
  234. ({ \
  235. int __ret; \
  236. preempt_disable(); \
  237. __ret = cmpxchg_double_local( raw_cpu_ptr(&(ptr1)), \
  238. raw_cpu_ptr(&(ptr2)), \
  239. o1, o2, n1, n2); \
  240. preempt_enable(); \
  241. __ret; \
  242. })
  243. #define cmpxchg64(ptr,o,n) cmpxchg((ptr),(o),(n))
  244. #define cmpxchg64_local(ptr,o,n) cmpxchg_local((ptr),(o),(n))
  245. #define cmpxchg64_relaxed(ptr,o,n) cmpxchg_local((ptr),(o),(n))
  246. #endif /* __ASM_CMPXCHG_H */