atomic-ops.h 3.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. /* Copyright (c) 2013, Ben Noordhuis <[email protected]>
  2. *
  3. * Permission to use, copy, modify, and/or distribute this software for any
  4. * purpose with or without fee is hereby granted, provided that the above
  5. * copyright notice and this permission notice appear in all copies.
  6. *
  7. * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  8. * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
  9. * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
  10. * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
  11. * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
  12. * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
  13. * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  14. */
  15. #ifndef UV_ATOMIC_OPS_H_
  16. #define UV_ATOMIC_OPS_H_
  17. #include "internal.h" /* UV_UNUSED */
  18. #if defined(__SUNPRO_C) || defined(__SUNPRO_CC)
  19. #include <atomic.h>
  20. #define __sync_val_compare_and_swap(p, o, n) atomic_cas_ptr(p, o, n)
  21. #endif
  22. UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval));
  23. UV_UNUSED(static long cmpxchgl(long* ptr, long oldval, long newval));
  24. UV_UNUSED(static void cpu_relax(void));
  25. /* Prefer hand-rolled assembly over the gcc builtins because the latter also
  26. * issue full memory barriers.
  27. */
  28. UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval)) {
  29. #if defined(__i386__) || defined(__x86_64__)
  30. int out;
  31. __asm__ __volatile__ ("lock; cmpxchg %2, %1;"
  32. : "=a" (out), "+m" (*(volatile int*) ptr)
  33. : "r" (newval), "0" (oldval)
  34. : "memory");
  35. return out;
  36. #elif defined(_AIX) && defined(__xlC__)
  37. const int out = (*(volatile int*) ptr);
  38. __compare_and_swap(ptr, &oldval, newval);
  39. return out;
  40. #elif defined(__MVS__)
  41. unsigned int op4;
  42. if (__plo_CSST(ptr, (unsigned int*) &oldval, newval,
  43. (unsigned int*) ptr, *ptr, &op4))
  44. return oldval;
  45. else
  46. return op4;
  47. #else
  48. return __sync_val_compare_and_swap(ptr, oldval, newval);
  49. #endif
  50. }
  51. UV_UNUSED(static long cmpxchgl(long* ptr, long oldval, long newval)) {
  52. #if defined(__i386__) || defined(__x86_64__)
  53. long out;
  54. __asm__ __volatile__ ("lock; cmpxchg %2, %1;"
  55. : "=a" (out), "+m" (*(volatile long*) ptr)
  56. : "r" (newval), "0" (oldval)
  57. : "memory");
  58. return out;
  59. #elif defined(_AIX) && defined(__xlC__)
  60. const long out = (*(volatile int*) ptr);
  61. # if defined(__64BIT__)
  62. __compare_and_swaplp(ptr, &oldval, newval);
  63. # else
  64. __compare_and_swap(ptr, &oldval, newval);
  65. # endif /* if defined(__64BIT__) */
  66. return out;
  67. #elif defined (__MVS__)
  68. #ifdef _LP64
  69. unsigned long long op4;
  70. if (__plo_CSSTGR(ptr, (unsigned long long*) &oldval, newval,
  71. (unsigned long long*) ptr, *ptr, &op4))
  72. #else
  73. unsigned long op4;
  74. if (__plo_CSST(ptr, (unsigned int*) &oldval, newval,
  75. (unsigned int*) ptr, *ptr, &op4))
  76. #endif
  77. return oldval;
  78. else
  79. return op4;
  80. #else
  81. return __sync_val_compare_and_swap(ptr, oldval, newval);
  82. #endif
  83. }
  84. UV_UNUSED(static void cpu_relax(void)) {
  85. #if defined(__i386__) || defined(__x86_64__)
  86. __asm__ __volatile__ ("rep; nop"); /* a.k.a. PAUSE */
  87. #endif
  88. }
  89. #endif /* UV_ATOMIC_OPS_H_ */