atomic.h 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. #ifndef __ARCH_H8300_ATOMIC__
  2. #define __ARCH_H8300_ATOMIC__
  3. #include <linux/types.h>
  4. #include <asm/cmpxchg.h>
  5. /*
  6. * Atomic operations that C can't guarantee us. Useful for
  7. * resource counting etc..
  8. */
  9. #define ATOMIC_INIT(i) { (i) }
  10. #define atomic_read(v) ACCESS_ONCE((v)->counter)
  11. #define atomic_set(v, i) (((v)->counter) = i)
  12. #include <linux/kernel.h>
  13. static inline int atomic_add_return(int i, atomic_t *v)
  14. {
  15. h8300flags flags;
  16. int ret;
  17. flags = arch_local_irq_save();
  18. ret = v->counter += i;
  19. arch_local_irq_restore(flags);
  20. return ret;
  21. }
  22. #define atomic_add(i, v) atomic_add_return(i, v)
  23. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  24. static inline int atomic_sub_return(int i, atomic_t *v)
  25. {
  26. h8300flags flags;
  27. int ret;
  28. flags = arch_local_irq_save();
  29. ret = v->counter -= i;
  30. arch_local_irq_restore(flags);
  31. return ret;
  32. }
  33. #define atomic_sub(i, v) atomic_sub_return(i, v)
  34. #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
  35. static inline int atomic_inc_return(atomic_t *v)
  36. {
  37. h8300flags flags;
  38. int ret;
  39. flags = arch_local_irq_save();
  40. v->counter++;
  41. ret = v->counter;
  42. arch_local_irq_restore(flags);
  43. return ret;
  44. }
  45. #define atomic_inc(v) atomic_inc_return(v)
  46. /*
  47. * atomic_inc_and_test - increment and test
  48. * @v: pointer of type atomic_t
  49. *
  50. * Atomically increments @v by 1
  51. * and returns true if the result is zero, or false for all
  52. * other cases.
  53. */
  54. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  55. static inline int atomic_dec_return(atomic_t *v)
  56. {
  57. h8300flags flags;
  58. int ret;
  59. flags = arch_local_irq_save();
  60. --v->counter;
  61. ret = v->counter;
  62. arch_local_irq_restore(flags);
  63. return ret;
  64. }
  65. #define atomic_dec(v) atomic_dec_return(v)
  66. static inline int atomic_dec_and_test(atomic_t *v)
  67. {
  68. h8300flags flags;
  69. int ret;
  70. flags = arch_local_irq_save();
  71. --v->counter;
  72. ret = v->counter;
  73. arch_local_irq_restore(flags);
  74. return ret == 0;
  75. }
  76. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  77. {
  78. int ret;
  79. h8300flags flags;
  80. flags = arch_local_irq_save();
  81. ret = v->counter;
  82. if (likely(ret == old))
  83. v->counter = new;
  84. arch_local_irq_restore(flags);
  85. return ret;
  86. }
  87. static inline int __atomic_add_unless(atomic_t *v, int a, int u)
  88. {
  89. int ret;
  90. h8300flags flags;
  91. flags = arch_local_irq_save();
  92. ret = v->counter;
  93. if (ret != u)
  94. v->counter += a;
  95. arch_local_irq_restore(flags);
  96. return ret;
  97. }
  98. static inline void atomic_clear_mask(unsigned long mask, unsigned long *v)
  99. {
  100. unsigned char ccr;
  101. unsigned long tmp;
  102. __asm__ __volatile__("stc ccr,%w3\n\t"
  103. "orc #0x80,ccr\n\t"
  104. "mov.l %0,%1\n\t"
  105. "and.l %2,%1\n\t"
  106. "mov.l %1,%0\n\t"
  107. "ldc %w3,ccr"
  108. : "=m"(*v), "=r"(tmp)
  109. : "g"(~(mask)), "r"(ccr));
  110. }
  111. static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
  112. {
  113. unsigned char ccr;
  114. unsigned long tmp;
  115. __asm__ __volatile__("stc ccr,%w3\n\t"
  116. "orc #0x80,ccr\n\t"
  117. "mov.l %0,%1\n\t"
  118. "or.l %2,%1\n\t"
  119. "mov.l %1,%0\n\t"
  120. "ldc %w3,ccr"
  121. : "=m"(*v), "=r"(tmp)
  122. : "g"(~(mask)), "r"(ccr));
  123. }
  124. /* Atomic operations are already serializing */
  125. #define smp_mb__before_atomic_dec() barrier()
  126. #define smp_mb__after_atomic_dec() barrier()
  127. #define smp_mb__before_atomic_inc() barrier()
  128. #define smp_mb__after_atomic_inc() barrier()
  129. #endif /* __ARCH_H8300_ATOMIC __ */