atomic_ops.h 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Low level function for atomic operations
  4. *
  5. * Copyright IBM Corp. 1999, 2016
  6. */
  7. #ifndef __ARCH_S390_ATOMIC_OPS__
  8. #define __ARCH_S390_ATOMIC_OPS__
  9. #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
  10. #define __ATOMIC_OP(op_name, op_type, op_string, op_barrier) \
  11. static inline op_type op_name(op_type val, op_type *ptr) \
  12. { \
  13. op_type old; \
  14. \
  15. asm volatile( \
  16. op_string " %[old],%[val],%[ptr]\n" \
  17. op_barrier \
  18. : [old] "=d" (old), [ptr] "+Q" (*ptr) \
  19. : [val] "d" (val) : "cc", "memory"); \
  20. return old; \
  21. } \
  22. #define __ATOMIC_OPS(op_name, op_type, op_string) \
  23. __ATOMIC_OP(op_name, op_type, op_string, "\n") \
  24. __ATOMIC_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
  25. __ATOMIC_OPS(__atomic_add, int, "laa")
  26. __ATOMIC_OPS(__atomic_and, int, "lan")
  27. __ATOMIC_OPS(__atomic_or, int, "lao")
  28. __ATOMIC_OPS(__atomic_xor, int, "lax")
  29. __ATOMIC_OPS(__atomic64_add, long, "laag")
  30. __ATOMIC_OPS(__atomic64_and, long, "lang")
  31. __ATOMIC_OPS(__atomic64_or, long, "laog")
  32. __ATOMIC_OPS(__atomic64_xor, long, "laxg")
  33. #undef __ATOMIC_OPS
  34. #undef __ATOMIC_OP
  35. #define __ATOMIC_CONST_OP(op_name, op_type, op_string, op_barrier) \
  36. static inline void op_name(op_type val, op_type *ptr) \
  37. { \
  38. asm volatile( \
  39. op_string " %[ptr],%[val]\n" \
  40. op_barrier \
  41. : [ptr] "+Q" (*ptr) : [val] "i" (val) : "cc", "memory");\
  42. }
  43. #define __ATOMIC_CONST_OPS(op_name, op_type, op_string) \
  44. __ATOMIC_CONST_OP(op_name, op_type, op_string, "\n") \
  45. __ATOMIC_CONST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
  46. __ATOMIC_CONST_OPS(__atomic_add_const, int, "asi")
  47. __ATOMIC_CONST_OPS(__atomic64_add_const, long, "agsi")
  48. #undef __ATOMIC_CONST_OPS
  49. #undef __ATOMIC_CONST_OP
  50. #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
  51. #define __ATOMIC_OP(op_name, op_string) \
  52. static inline int op_name(int val, int *ptr) \
  53. { \
  54. int old, new; \
  55. \
  56. asm volatile( \
  57. "0: lr %[new],%[old]\n" \
  58. op_string " %[new],%[val]\n" \
  59. " cs %[old],%[new],%[ptr]\n" \
  60. " jl 0b" \
  61. : [old] "=d" (old), [new] "=&d" (new), [ptr] "+Q" (*ptr)\
  62. : [val] "d" (val), "0" (*ptr) : "cc", "memory"); \
  63. return old; \
  64. }
  65. #define __ATOMIC_OPS(op_name, op_string) \
  66. __ATOMIC_OP(op_name, op_string) \
  67. __ATOMIC_OP(op_name##_barrier, op_string)
  68. __ATOMIC_OPS(__atomic_add, "ar")
  69. __ATOMIC_OPS(__atomic_and, "nr")
  70. __ATOMIC_OPS(__atomic_or, "or")
  71. __ATOMIC_OPS(__atomic_xor, "xr")
  72. #undef __ATOMIC_OPS
  73. #define __ATOMIC64_OP(op_name, op_string) \
  74. static inline long op_name(long val, long *ptr) \
  75. { \
  76. long old, new; \
  77. \
  78. asm volatile( \
  79. "0: lgr %[new],%[old]\n" \
  80. op_string " %[new],%[val]\n" \
  81. " csg %[old],%[new],%[ptr]\n" \
  82. " jl 0b" \
  83. : [old] "=d" (old), [new] "=&d" (new), [ptr] "+Q" (*ptr)\
  84. : [val] "d" (val), "0" (*ptr) : "cc", "memory"); \
  85. return old; \
  86. }
  87. #define __ATOMIC64_OPS(op_name, op_string) \
  88. __ATOMIC64_OP(op_name, op_string) \
  89. __ATOMIC64_OP(op_name##_barrier, op_string)
  90. __ATOMIC64_OPS(__atomic64_add, "agr")
  91. __ATOMIC64_OPS(__atomic64_and, "ngr")
  92. __ATOMIC64_OPS(__atomic64_or, "ogr")
  93. __ATOMIC64_OPS(__atomic64_xor, "xgr")
  94. #undef __ATOMIC64_OPS
  95. #define __atomic_add_const(val, ptr) __atomic_add(val, ptr)
  96. #define __atomic_add_const_barrier(val, ptr) __atomic_add(val, ptr)
  97. #define __atomic64_add_const(val, ptr) __atomic64_add(val, ptr)
  98. #define __atomic64_add_const_barrier(val, ptr) __atomic64_add(val, ptr)
  99. #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
  100. static inline int __atomic_cmpxchg(int *ptr, int old, int new)
  101. {
  102. return __sync_val_compare_and_swap(ptr, old, new);
  103. }
  104. static inline int __atomic_cmpxchg_bool(int *ptr, int old, int new)
  105. {
  106. return __sync_bool_compare_and_swap(ptr, old, new);
  107. }
  108. static inline long __atomic64_cmpxchg(long *ptr, long old, long new)
  109. {
  110. return __sync_val_compare_and_swap(ptr, old, new);
  111. }
  112. static inline long __atomic64_cmpxchg_bool(long *ptr, long old, long new)
  113. {
  114. return __sync_bool_compare_and_swap(ptr, old, new);
  115. }
  116. #endif /* __ARCH_S390_ATOMIC_OPS__ */