atomic_64.S 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104
  1. /* atomic.S: These things are too big to do inline.
  2. *
  3. * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
  4. */
  5. #include <linux/linkage.h>
  6. #include <asm/asi.h>
  7. #include <asm/backoff.h>
  8. .text
  9. /* Two versions of the atomic routines, one that
  10. * does not return a value and does not perform
  11. * memory barriers, and a second which returns
  12. * a value and does the barriers.
  13. */
  14. #define ATOMIC_OP(op) \
  15. ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  16. BACKOFF_SETUP(%o2); \
  17. 1: lduw [%o1], %g1; \
  18. op %g1, %o0, %g7; \
  19. cas [%o1], %g1, %g7; \
  20. cmp %g1, %g7; \
  21. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  22. nop; \
  23. retl; \
  24. nop; \
  25. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  26. ENDPROC(atomic_##op); \
  27. #define ATOMIC_OP_RETURN(op) \
  28. ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
  29. BACKOFF_SETUP(%o2); \
  30. 1: lduw [%o1], %g1; \
  31. op %g1, %o0, %g7; \
  32. cas [%o1], %g1, %g7; \
  33. cmp %g1, %g7; \
  34. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  35. op %g1, %o0, %g1; \
  36. retl; \
  37. sra %g1, 0, %o0; \
  38. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  39. ENDPROC(atomic_##op##_return);
  40. #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
  41. ATOMIC_OPS(add)
  42. ATOMIC_OPS(sub)
  43. #undef ATOMIC_OPS
  44. #undef ATOMIC_OP_RETURN
  45. #undef ATOMIC_OP
  46. #define ATOMIC64_OP(op) \
  47. ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  48. BACKOFF_SETUP(%o2); \
  49. 1: ldx [%o1], %g1; \
  50. op %g1, %o0, %g7; \
  51. casx [%o1], %g1, %g7; \
  52. cmp %g1, %g7; \
  53. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  54. nop; \
  55. retl; \
  56. nop; \
  57. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  58. ENDPROC(atomic64_##op); \
  59. #define ATOMIC64_OP_RETURN(op) \
  60. ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
  61. BACKOFF_SETUP(%o2); \
  62. 1: ldx [%o1], %g1; \
  63. op %g1, %o0, %g7; \
  64. casx [%o1], %g1, %g7; \
  65. cmp %g1, %g7; \
  66. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  67. nop; \
  68. retl; \
  69. op %g1, %o0, %o0; \
  70. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  71. ENDPROC(atomic64_##op##_return);
  72. #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op)
  73. ATOMIC64_OPS(add)
  74. ATOMIC64_OPS(sub)
  75. #undef ATOMIC64_OPS
  76. #undef ATOMIC64_OP_RETURN
  77. #undef ATOMIC64_OP
  78. ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
  79. BACKOFF_SETUP(%o2)
  80. 1: ldx [%o0], %g1
  81. brlez,pn %g1, 3f
  82. sub %g1, 1, %g7
  83. casx [%o0], %g1, %g7
  84. cmp %g1, %g7
  85. bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
  86. nop
  87. 3: retl
  88. sub %g1, 1, %o0
  89. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  90. ENDPROC(atomic64_dec_if_positive)