cmpxchg.h 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef TOOLS_ASM_X86_CMPXCHG_H
  3. #define TOOLS_ASM_X86_CMPXCHG_H
  4. #include <linux/compiler.h>
  5. /*
  6. * Non-existant functions to indicate usage errors at link time
  7. * (or compile-time if the compiler implements __compiletime_error().
  8. */
  9. extern void __cmpxchg_wrong_size(void)
  10. __compiletime_error("Bad argument size for cmpxchg");
  11. /*
  12. * Constants for operation sizes. On 32-bit, the 64-bit size it set to
  13. * -1 because sizeof will never return -1, thereby making those switch
  14. * case statements guaranteeed dead code which the compiler will
  15. * eliminate, and allowing the "missing symbol in the default case" to
  16. * indicate a usage error.
  17. */
  18. #define __X86_CASE_B 1
  19. #define __X86_CASE_W 2
  20. #define __X86_CASE_L 4
  21. #ifdef __x86_64__
  22. #define __X86_CASE_Q 8
  23. #else
  24. #define __X86_CASE_Q -1 /* sizeof will never return -1 */
  25. #endif
  26. /*
  27. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  28. * store NEW in MEM. Return the initial value in MEM. Success is
  29. * indicated by comparing RETURN with OLD.
  30. */
  31. #define __raw_cmpxchg(ptr, old, new, size, lock) \
  32. ({ \
  33. __typeof__(*(ptr)) __ret; \
  34. __typeof__(*(ptr)) __old = (old); \
  35. __typeof__(*(ptr)) __new = (new); \
  36. switch (size) { \
  37. case __X86_CASE_B: \
  38. { \
  39. volatile u8 *__ptr = (volatile u8 *)(ptr); \
  40. asm volatile(lock "cmpxchgb %2,%1" \
  41. : "=a" (__ret), "+m" (*__ptr) \
  42. : "q" (__new), "0" (__old) \
  43. : "memory"); \
  44. break; \
  45. } \
  46. case __X86_CASE_W: \
  47. { \
  48. volatile u16 *__ptr = (volatile u16 *)(ptr); \
  49. asm volatile(lock "cmpxchgw %2,%1" \
  50. : "=a" (__ret), "+m" (*__ptr) \
  51. : "r" (__new), "0" (__old) \
  52. : "memory"); \
  53. break; \
  54. } \
  55. case __X86_CASE_L: \
  56. { \
  57. volatile u32 *__ptr = (volatile u32 *)(ptr); \
  58. asm volatile(lock "cmpxchgl %2,%1" \
  59. : "=a" (__ret), "+m" (*__ptr) \
  60. : "r" (__new), "0" (__old) \
  61. : "memory"); \
  62. break; \
  63. } \
  64. case __X86_CASE_Q: \
  65. { \
  66. volatile u64 *__ptr = (volatile u64 *)(ptr); \
  67. asm volatile(lock "cmpxchgq %2,%1" \
  68. : "=a" (__ret), "+m" (*__ptr) \
  69. : "r" (__new), "0" (__old) \
  70. : "memory"); \
  71. break; \
  72. } \
  73. default: \
  74. __cmpxchg_wrong_size(); \
  75. } \
  76. __ret; \
  77. })
  78. #define __cmpxchg(ptr, old, new, size) \
  79. __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
  80. #define cmpxchg(ptr, old, new) \
  81. __cmpxchg(ptr, old, new, sizeof(*(ptr)))
  82. #endif /* TOOLS_ASM_X86_CMPXCHG_H */