atomic_64.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112
  1. /* atomic.h: Thankfully the V9 is at least reasonable for this
  2. * stuff.
  3. *
  4. * Copyright (C) 1996, 1997, 2000, 2012 David S. Miller (davem@redhat.com)
  5. */
  6. #ifndef __ARCH_SPARC64_ATOMIC__
  7. #define __ARCH_SPARC64_ATOMIC__
  8. #include <linux/types.h>
  9. #include <asm/cmpxchg.h>
  10. #include <asm/barrier.h>
  11. #define ATOMIC_INIT(i) { (i) }
  12. #define ATOMIC64_INIT(i) { (i) }
  13. #define atomic_read(v) (*(volatile int *)&(v)->counter)
  14. #define atomic64_read(v) (*(volatile long *)&(v)->counter)
  15. #define atomic_set(v, i) (((v)->counter) = i)
  16. #define atomic64_set(v, i) (((v)->counter) = i)
  17. void atomic_add(int, atomic_t *);
  18. void atomic64_add(long, atomic64_t *);
  19. void atomic_sub(int, atomic_t *);
  20. void atomic64_sub(long, atomic64_t *);
  21. int atomic_add_ret(int, atomic_t *);
  22. long atomic64_add_ret(long, atomic64_t *);
  23. int atomic_sub_ret(int, atomic_t *);
  24. long atomic64_sub_ret(long, atomic64_t *);
  25. #define atomic_dec_return(v) atomic_sub_ret(1, v)
  26. #define atomic64_dec_return(v) atomic64_sub_ret(1, v)
  27. #define atomic_inc_return(v) atomic_add_ret(1, v)
  28. #define atomic64_inc_return(v) atomic64_add_ret(1, v)
  29. #define atomic_sub_return(i, v) atomic_sub_ret(i, v)
  30. #define atomic64_sub_return(i, v) atomic64_sub_ret(i, v)
  31. #define atomic_add_return(i, v) atomic_add_ret(i, v)
  32. #define atomic64_add_return(i, v) atomic64_add_ret(i, v)
  33. /*
  34. * atomic_inc_and_test - increment and test
  35. * @v: pointer of type atomic_t
  36. *
  37. * Atomically increments @v by 1
  38. * and returns true if the result is zero, or false for all
  39. * other cases.
  40. */
  41. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  42. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  43. #define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0)
  44. #define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0)
  45. #define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0)
  46. #define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0)
  47. #define atomic_inc(v) atomic_add(1, v)
  48. #define atomic64_inc(v) atomic64_add(1, v)
  49. #define atomic_dec(v) atomic_sub(1, v)
  50. #define atomic64_dec(v) atomic64_sub(1, v)
  51. #define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0)
  52. #define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
  53. #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  54. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  55. static inline int __atomic_add_unless(atomic_t *v, int a, int u)
  56. {
  57. int c, old;
  58. c = atomic_read(v);
  59. for (;;) {
  60. if (unlikely(c == (u)))
  61. break;
  62. old = atomic_cmpxchg((v), c, c + (a));
  63. if (likely(old == c))
  64. break;
  65. c = old;
  66. }
  67. return c;
  68. }
  69. #define atomic64_cmpxchg(v, o, n) \
  70. ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
  71. #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  72. static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
  73. {
  74. long c, old;
  75. c = atomic64_read(v);
  76. for (;;) {
  77. if (unlikely(c == (u)))
  78. break;
  79. old = atomic64_cmpxchg((v), c, c + (a));
  80. if (likely(old == c))
  81. break;
  82. c = old;
  83. }
  84. return c != (u);
  85. }
  86. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  87. long atomic64_dec_if_positive(atomic64_t *v);
  88. #endif /* !(__ARCH_SPARC64_ATOMIC__) */