atomic.h 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126
  1. /*
  2. * Copyright (C) 2014 Stefan Kristiansson <stefan.kristiansson@saunalahti.fi>
  3. *
  4. * This file is licensed under the terms of the GNU General Public License
  5. * version 2. This program is licensed "as is" without any warranty of any
  6. * kind, whether express or implied.
  7. */
  8. #ifndef __ASM_OPENRISC_ATOMIC_H
  9. #define __ASM_OPENRISC_ATOMIC_H
  10. #include <linux/types.h>
  11. /* Atomically perform op with v->counter and i */
  12. #define ATOMIC_OP(op) \
  13. static inline void atomic_##op(int i, atomic_t *v) \
  14. { \
  15. int tmp; \
  16. \
  17. __asm__ __volatile__( \
  18. "1: l.lwa %0,0(%1) \n" \
  19. " l." #op " %0,%0,%2 \n" \
  20. " l.swa 0(%1),%0 \n" \
  21. " l.bnf 1b \n" \
  22. " l.nop \n" \
  23. : "=&r"(tmp) \
  24. : "r"(&v->counter), "r"(i) \
  25. : "cc", "memory"); \
  26. }
  27. /* Atomically perform op with v->counter and i, return the result */
  28. #define ATOMIC_OP_RETURN(op) \
  29. static inline int atomic_##op##_return(int i, atomic_t *v) \
  30. { \
  31. int tmp; \
  32. \
  33. __asm__ __volatile__( \
  34. "1: l.lwa %0,0(%1) \n" \
  35. " l." #op " %0,%0,%2 \n" \
  36. " l.swa 0(%1),%0 \n" \
  37. " l.bnf 1b \n" \
  38. " l.nop \n" \
  39. : "=&r"(tmp) \
  40. : "r"(&v->counter), "r"(i) \
  41. : "cc", "memory"); \
  42. \
  43. return tmp; \
  44. }
  45. /* Atomically perform op with v->counter and i, return orig v->counter */
  46. #define ATOMIC_FETCH_OP(op) \
  47. static inline int atomic_fetch_##op(int i, atomic_t *v) \
  48. { \
  49. int tmp, old; \
  50. \
  51. __asm__ __volatile__( \
  52. "1: l.lwa %0,0(%2) \n" \
  53. " l." #op " %1,%0,%3 \n" \
  54. " l.swa 0(%2),%1 \n" \
  55. " l.bnf 1b \n" \
  56. " l.nop \n" \
  57. : "=&r"(old), "=&r"(tmp) \
  58. : "r"(&v->counter), "r"(i) \
  59. : "cc", "memory"); \
  60. \
  61. return old; \
  62. }
  63. ATOMIC_OP_RETURN(add)
  64. ATOMIC_OP_RETURN(sub)
  65. ATOMIC_FETCH_OP(add)
  66. ATOMIC_FETCH_OP(sub)
  67. ATOMIC_FETCH_OP(and)
  68. ATOMIC_FETCH_OP(or)
  69. ATOMIC_FETCH_OP(xor)
  70. ATOMIC_OP(and)
  71. ATOMIC_OP(or)
  72. ATOMIC_OP(xor)
  73. #undef ATOMIC_FETCH_OP
  74. #undef ATOMIC_OP_RETURN
  75. #undef ATOMIC_OP
  76. #define atomic_add_return atomic_add_return
  77. #define atomic_sub_return atomic_sub_return
  78. #define atomic_fetch_add atomic_fetch_add
  79. #define atomic_fetch_sub atomic_fetch_sub
  80. #define atomic_fetch_and atomic_fetch_and
  81. #define atomic_fetch_or atomic_fetch_or
  82. #define atomic_fetch_xor atomic_fetch_xor
  83. #define atomic_and atomic_and
  84. #define atomic_or atomic_or
  85. #define atomic_xor atomic_xor
  86. /*
  87. * Atomically add a to v->counter as long as v is not already u.
  88. * Returns the original value at v->counter.
  89. *
  90. * This is often used through atomic_inc_not_zero()
  91. */
  92. static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
  93. {
  94. int old, tmp;
  95. __asm__ __volatile__(
  96. "1: l.lwa %0, 0(%2) \n"
  97. " l.sfeq %0, %4 \n"
  98. " l.bf 2f \n"
  99. " l.add %1, %0, %3 \n"
  100. " l.swa 0(%2), %1 \n"
  101. " l.bnf 1b \n"
  102. " l.nop \n"
  103. "2: \n"
  104. : "=&r"(old), "=&r" (tmp)
  105. : "r"(&v->counter), "r"(a), "r"(u)
  106. : "cc", "memory");
  107. return old;
  108. }
  109. #define atomic_fetch_add_unless atomic_fetch_add_unless
  110. #include <asm-generic/atomic.h>
  111. #endif /* __ASM_OPENRISC_ATOMIC_H */