cmpxchg.h 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * forked from parisc asm/atomic.h which was:
  4. * Copyright (C) 2000 Philipp Rumpf <prumpf@tux.org>
  5. * Copyright (C) 2006 Kyle McMartin <kyle@parisc-linux.org>
  6. */
  7. #ifndef _ASM_PARISC_CMPXCHG_H_
  8. #define _ASM_PARISC_CMPXCHG_H_
  9. /* This should get optimized out since it's never called.
  10. ** Or get a link error if xchg is used "wrong".
  11. */
  12. extern void __xchg_called_with_bad_pointer(void);
  13. /* __xchg32/64 defined in arch/parisc/lib/bitops.c */
  14. extern unsigned long __xchg8(char, char *);
  15. extern unsigned long __xchg32(int, int *);
  16. #ifdef CONFIG_64BIT
  17. extern unsigned long __xchg64(unsigned long, unsigned long *);
  18. #endif
  19. /* optimizer better get rid of switch since size is a constant */
  20. static inline unsigned long
  21. __xchg(unsigned long x, __volatile__ void *ptr, int size)
  22. {
  23. switch (size) {
  24. #ifdef CONFIG_64BIT
  25. case 8: return __xchg64(x, (unsigned long *) ptr);
  26. #endif
  27. case 4: return __xchg32((int) x, (int *) ptr);
  28. case 1: return __xchg8((char) x, (char *) ptr);
  29. }
  30. __xchg_called_with_bad_pointer();
  31. return x;
  32. }
  33. /*
  34. ** REVISIT - Abandoned use of LDCW in xchg() for now:
  35. ** o need to test sizeof(*ptr) to avoid clearing adjacent bytes
  36. ** o and while we are at it, could CONFIG_64BIT code use LDCD too?
  37. **
  38. ** if (__builtin_constant_p(x) && (x == NULL))
  39. ** if (((unsigned long)p & 0xf) == 0)
  40. ** return __ldcw(p);
  41. */
  42. #define xchg(ptr, x) \
  43. ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
  44. /* bug catcher for when unsupported size is used - won't link */
  45. extern void __cmpxchg_called_with_bad_pointer(void);
  46. /* __cmpxchg_u32/u64 defined in arch/parisc/lib/bitops.c */
  47. extern unsigned long __cmpxchg_u32(volatile unsigned int *m, unsigned int old,
  48. unsigned int new_);
  49. extern u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new_);
  50. /* don't worry...optimizer will get rid of most of this */
  51. static inline unsigned long
  52. __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
  53. {
  54. switch (size) {
  55. #ifdef CONFIG_64BIT
  56. case 8: return __cmpxchg_u64((u64 *)ptr, old, new_);
  57. #endif
  58. case 4: return __cmpxchg_u32((unsigned int *)ptr,
  59. (unsigned int)old, (unsigned int)new_);
  60. }
  61. __cmpxchg_called_with_bad_pointer();
  62. return old;
  63. }
  64. #define cmpxchg(ptr, o, n) \
  65. ({ \
  66. __typeof__(*(ptr)) _o_ = (o); \
  67. __typeof__(*(ptr)) _n_ = (n); \
  68. (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
  69. (unsigned long)_n_, sizeof(*(ptr))); \
  70. })
  71. #include <asm-generic/cmpxchg-local.h>
  72. static inline unsigned long __cmpxchg_local(volatile void *ptr,
  73. unsigned long old,
  74. unsigned long new_, int size)
  75. {
  76. switch (size) {
  77. #ifdef CONFIG_64BIT
  78. case 8: return __cmpxchg_u64((u64 *)ptr, old, new_);
  79. #endif
  80. case 4: return __cmpxchg_u32(ptr, old, new_);
  81. default:
  82. return __cmpxchg_local_generic(ptr, old, new_, size);
  83. }
  84. }
  85. /*
  86. * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  87. * them available.
  88. */
  89. #define cmpxchg_local(ptr, o, n) \
  90. ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
  91. (unsigned long)(n), sizeof(*(ptr))))
  92. #ifdef CONFIG_64BIT
  93. #define cmpxchg64_local(ptr, o, n) \
  94. ({ \
  95. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  96. cmpxchg_local((ptr), (o), (n)); \
  97. })
  98. #else
  99. #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  100. #endif
  101. #define cmpxchg64(ptr, o, n) __cmpxchg_u64(ptr, o, n)
  102. #endif /* _ASM_PARISC_CMPXCHG_H_ */