cmpxchg.h 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_M32R_CMPXCHG_H
  3. #define _ASM_M32R_CMPXCHG_H
  4. /*
  5. * M32R version:
  6. * Copyright (C) 2001, 2002 Hitoshi Yamamoto
  7. * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
  8. */
  9. #include <linux/irqflags.h>
  10. #include <asm/assembler.h>
  11. #include <asm/dcache_clear.h>
  12. extern void __xchg_called_with_bad_pointer(void);
  13. static __always_inline unsigned long
  14. __xchg(unsigned long x, volatile void *ptr, int size)
  15. {
  16. unsigned long flags;
  17. unsigned long tmp = 0;
  18. local_irq_save(flags);
  19. switch (size) {
  20. #ifndef CONFIG_SMP
  21. case 1:
  22. __asm__ __volatile__ (
  23. "ldb %0, @%2 \n\t"
  24. "stb %1, @%2 \n\t"
  25. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  26. break;
  27. case 2:
  28. __asm__ __volatile__ (
  29. "ldh %0, @%2 \n\t"
  30. "sth %1, @%2 \n\t"
  31. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  32. break;
  33. case 4:
  34. __asm__ __volatile__ (
  35. "ld %0, @%2 \n\t"
  36. "st %1, @%2 \n\t"
  37. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  38. break;
  39. #else /* CONFIG_SMP */
  40. case 4:
  41. __asm__ __volatile__ (
  42. DCACHE_CLEAR("%0", "r4", "%2")
  43. "lock %0, @%2; \n\t"
  44. "unlock %1, @%2; \n\t"
  45. : "=&r" (tmp) : "r" (x), "r" (ptr)
  46. : "memory"
  47. #ifdef CONFIG_CHIP_M32700_TS1
  48. , "r4"
  49. #endif /* CONFIG_CHIP_M32700_TS1 */
  50. );
  51. break;
  52. #endif /* CONFIG_SMP */
  53. default:
  54. __xchg_called_with_bad_pointer();
  55. }
  56. local_irq_restore(flags);
  57. return (tmp);
  58. }
  59. #define xchg(ptr, x) ({ \
  60. ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \
  61. sizeof(*(ptr)))); \
  62. })
  63. static __always_inline unsigned long
  64. __xchg_local(unsigned long x, volatile void *ptr, int size)
  65. {
  66. unsigned long flags;
  67. unsigned long tmp = 0;
  68. local_irq_save(flags);
  69. switch (size) {
  70. case 1:
  71. __asm__ __volatile__ (
  72. "ldb %0, @%2 \n\t"
  73. "stb %1, @%2 \n\t"
  74. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  75. break;
  76. case 2:
  77. __asm__ __volatile__ (
  78. "ldh %0, @%2 \n\t"
  79. "sth %1, @%2 \n\t"
  80. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  81. break;
  82. case 4:
  83. __asm__ __volatile__ (
  84. "ld %0, @%2 \n\t"
  85. "st %1, @%2 \n\t"
  86. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  87. break;
  88. default:
  89. __xchg_called_with_bad_pointer();
  90. }
  91. local_irq_restore(flags);
  92. return (tmp);
  93. }
  94. #define xchg_local(ptr, x) \
  95. ((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr), \
  96. sizeof(*(ptr))))
  97. static inline unsigned long
  98. __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
  99. {
  100. unsigned long flags;
  101. unsigned int retval;
  102. local_irq_save(flags);
  103. __asm__ __volatile__ (
  104. DCACHE_CLEAR("%0", "r4", "%1")
  105. M32R_LOCK" %0, @%1; \n"
  106. " bne %0, %2, 1f; \n"
  107. M32R_UNLOCK" %3, @%1; \n"
  108. " bra 2f; \n"
  109. " .fillinsn \n"
  110. "1:"
  111. M32R_UNLOCK" %0, @%1; \n"
  112. " .fillinsn \n"
  113. "2:"
  114. : "=&r" (retval)
  115. : "r" (p), "r" (old), "r" (new)
  116. : "cbit", "memory"
  117. #ifdef CONFIG_CHIP_M32700_TS1
  118. , "r4"
  119. #endif /* CONFIG_CHIP_M32700_TS1 */
  120. );
  121. local_irq_restore(flags);
  122. return retval;
  123. }
  124. static inline unsigned long
  125. __cmpxchg_local_u32(volatile unsigned int *p, unsigned int old,
  126. unsigned int new)
  127. {
  128. unsigned long flags;
  129. unsigned int retval;
  130. local_irq_save(flags);
  131. __asm__ __volatile__ (
  132. DCACHE_CLEAR("%0", "r4", "%1")
  133. "ld %0, @%1; \n"
  134. " bne %0, %2, 1f; \n"
  135. "st %3, @%1; \n"
  136. " bra 2f; \n"
  137. " .fillinsn \n"
  138. "1:"
  139. "st %0, @%1; \n"
  140. " .fillinsn \n"
  141. "2:"
  142. : "=&r" (retval)
  143. : "r" (p), "r" (old), "r" (new)
  144. : "cbit", "memory"
  145. #ifdef CONFIG_CHIP_M32700_TS1
  146. , "r4"
  147. #endif /* CONFIG_CHIP_M32700_TS1 */
  148. );
  149. local_irq_restore(flags);
  150. return retval;
  151. }
  152. /* This function doesn't exist, so you'll get a linker error
  153. if something tries to do an invalid cmpxchg(). */
  154. extern void __cmpxchg_called_with_bad_pointer(void);
  155. static inline unsigned long
  156. __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
  157. {
  158. switch (size) {
  159. case 4:
  160. return __cmpxchg_u32(ptr, old, new);
  161. #if 0 /* we don't have __cmpxchg_u64 */
  162. case 8:
  163. return __cmpxchg_u64(ptr, old, new);
  164. #endif /* 0 */
  165. }
  166. __cmpxchg_called_with_bad_pointer();
  167. return old;
  168. }
  169. #define cmpxchg(ptr, o, n) ({ \
  170. ((__typeof__(*(ptr))) \
  171. __cmpxchg((ptr), (unsigned long)(o), \
  172. (unsigned long)(n), \
  173. sizeof(*(ptr)))); \
  174. })
  175. #include <asm-generic/cmpxchg-local.h>
  176. static inline unsigned long __cmpxchg_local(volatile void *ptr,
  177. unsigned long old,
  178. unsigned long new, int size)
  179. {
  180. switch (size) {
  181. case 4:
  182. return __cmpxchg_local_u32(ptr, old, new);
  183. default:
  184. return __cmpxchg_local_generic(ptr, old, new, size);
  185. }
  186. return old;
  187. }
  188. /*
  189. * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  190. * them available.
  191. */
  192. #define cmpxchg_local(ptr, o, n) \
  193. ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
  194. (unsigned long)(n), sizeof(*(ptr))))
  195. #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  196. #endif /* _ASM_M32R_CMPXCHG_H */