barrier.h 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161
  1. /*
  2. * Based on arch/arm/include/asm/barrier.h
  3. *
  4. * Copyright (C) 2012 ARM Ltd.
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  17. */
  18. #ifndef __ASM_BARRIER_H
  19. #define __ASM_BARRIER_H
  20. #ifndef __ASSEMBLY__
  21. #define __nops(n) ".rept " #n "\nnop\n.endr\n"
  22. #define nops(n) asm volatile(__nops(n))
  23. #define sev() asm volatile("sev" : : : "memory")
  24. #define wfe() asm volatile("wfe" : : : "memory")
  25. #define wfi() asm volatile("wfi" : : : "memory")
  26. #define isb() asm volatile("isb" : : : "memory")
  27. #define dmb(opt) asm volatile("dmb " #opt : : : "memory")
  28. #define dsb(opt) asm volatile("dsb " #opt : : : "memory")
  29. #define psb_csync() asm volatile("hint #17" : : : "memory")
  30. #define csdb() asm volatile("hint #20" : : : "memory")
  31. #define mb() dsb(sy)
  32. #define rmb() dsb(ld)
  33. #define wmb() dsb(st)
  34. #define dma_rmb() dmb(oshld)
  35. #define dma_wmb() dmb(oshst)
  36. /*
  37. * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
  38. * and 0 otherwise.
  39. */
  40. #define array_index_mask_nospec array_index_mask_nospec
  41. static inline unsigned long array_index_mask_nospec(unsigned long idx,
  42. unsigned long sz)
  43. {
  44. unsigned long mask;
  45. asm volatile(
  46. " cmp %1, %2\n"
  47. " sbc %0, xzr, xzr\n"
  48. : "=r" (mask)
  49. : "r" (idx), "Ir" (sz)
  50. : "cc");
  51. csdb();
  52. return mask;
  53. }
  54. #define __smp_mb() dmb(ish)
  55. #define __smp_rmb() dmb(ishld)
  56. #define __smp_wmb() dmb(ishst)
  57. #define __smp_store_release(p, v) \
  58. do { \
  59. union { typeof(*p) __val; char __c[1]; } __u = \
  60. { .__val = (__force typeof(*p)) (v) }; \
  61. compiletime_assert_atomic_type(*p); \
  62. switch (sizeof(*p)) { \
  63. case 1: \
  64. asm volatile ("stlrb %w1, %0" \
  65. : "=Q" (*p) \
  66. : "r" (*(__u8 *)__u.__c) \
  67. : "memory"); \
  68. break; \
  69. case 2: \
  70. asm volatile ("stlrh %w1, %0" \
  71. : "=Q" (*p) \
  72. : "r" (*(__u16 *)__u.__c) \
  73. : "memory"); \
  74. break; \
  75. case 4: \
  76. asm volatile ("stlr %w1, %0" \
  77. : "=Q" (*p) \
  78. : "r" (*(__u32 *)__u.__c) \
  79. : "memory"); \
  80. break; \
  81. case 8: \
  82. asm volatile ("stlr %1, %0" \
  83. : "=Q" (*p) \
  84. : "r" (*(__u64 *)__u.__c) \
  85. : "memory"); \
  86. break; \
  87. } \
  88. } while (0)
  89. #define __smp_load_acquire(p) \
  90. ({ \
  91. union { typeof(*p) __val; char __c[1]; } __u; \
  92. compiletime_assert_atomic_type(*p); \
  93. switch (sizeof(*p)) { \
  94. case 1: \
  95. asm volatile ("ldarb %w0, %1" \
  96. : "=r" (*(__u8 *)__u.__c) \
  97. : "Q" (*p) : "memory"); \
  98. break; \
  99. case 2: \
  100. asm volatile ("ldarh %w0, %1" \
  101. : "=r" (*(__u16 *)__u.__c) \
  102. : "Q" (*p) : "memory"); \
  103. break; \
  104. case 4: \
  105. asm volatile ("ldar %w0, %1" \
  106. : "=r" (*(__u32 *)__u.__c) \
  107. : "Q" (*p) : "memory"); \
  108. break; \
  109. case 8: \
  110. asm volatile ("ldar %0, %1" \
  111. : "=r" (*(__u64 *)__u.__c) \
  112. : "Q" (*p) : "memory"); \
  113. break; \
  114. } \
  115. __u.__val; \
  116. })
  117. #define smp_cond_load_relaxed(ptr, cond_expr) \
  118. ({ \
  119. typeof(ptr) __PTR = (ptr); \
  120. typeof(*ptr) VAL; \
  121. for (;;) { \
  122. VAL = READ_ONCE(*__PTR); \
  123. if (cond_expr) \
  124. break; \
  125. __cmpwait_relaxed(__PTR, VAL); \
  126. } \
  127. VAL; \
  128. })
  129. #define smp_cond_load_acquire(ptr, cond_expr) \
  130. ({ \
  131. typeof(ptr) __PTR = (ptr); \
  132. typeof(*ptr) VAL; \
  133. for (;;) { \
  134. VAL = smp_load_acquire(__PTR); \
  135. if (cond_expr) \
  136. break; \
  137. __cmpwait_relaxed(__PTR, VAL); \
  138. } \
  139. VAL; \
  140. })
  141. #include <asm-generic/barrier.h>
  142. #endif /* __ASSEMBLY__ */
  143. #endif /* __ASM_BARRIER_H */