s390.c 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155
  1. /* Licensed to the Apache Software Foundation (ASF) under one or more
  2. * contributor license agreements. See the NOTICE file distributed with
  3. * this work for additional information regarding copyright ownership.
  4. * The ASF licenses this file to You under the Apache License, Version 2.0
  5. * (the "License"); you may not use this file except in compliance with
  6. * the License. You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "apr_arch_atomic.h"
  17. #ifdef USE_ATOMICS_S390
  18. APR_DECLARE(apr_status_t) apr_atomic_init(apr_pool_t *p)
  19. {
  20. return APR_SUCCESS;
  21. }
  22. APR_DECLARE(apr_uint32_t) apr_atomic_read32(volatile apr_uint32_t *mem)
  23. {
  24. return *mem;
  25. }
  26. APR_DECLARE(void) apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val)
  27. {
  28. *mem = val;
  29. }
  30. static APR_INLINE apr_uint32_t atomic_add(volatile apr_uint32_t *mem, apr_uint32_t val)
  31. {
  32. apr_uint32_t prev = *mem, temp;
  33. asm volatile ("loop_%=:\n"
  34. " lr %1,%0\n"
  35. " alr %1,%3\n"
  36. " cs %0,%1,%2\n"
  37. " jl loop_%=\n"
  38. : "+d" (prev), "+d" (temp), "=Q" (*mem)
  39. : "d" (val), "m" (*mem)
  40. : "cc", "memory");
  41. return prev;
  42. }
  43. APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint32_t val)
  44. {
  45. return atomic_add(mem, val);
  46. }
  47. APR_DECLARE(apr_uint32_t) apr_atomic_inc32(volatile apr_uint32_t *mem)
  48. {
  49. return atomic_add(mem, 1);
  50. }
  51. static APR_INLINE apr_uint32_t atomic_sub(volatile apr_uint32_t *mem, apr_uint32_t val)
  52. {
  53. apr_uint32_t prev = *mem, temp;
  54. asm volatile ("loop_%=:\n"
  55. " lr %1,%0\n"
  56. " slr %1,%3\n"
  57. " cs %0,%1,%2\n"
  58. " jl loop_%=\n"
  59. : "+d" (prev), "+d" (temp), "=Q" (*mem)
  60. : "d" (val), "m" (*mem)
  61. : "cc", "memory");
  62. return temp;
  63. }
  64. APR_DECLARE(void) apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
  65. {
  66. atomic_sub(mem, val);
  67. }
  68. APR_DECLARE(int) apr_atomic_dec32(volatile apr_uint32_t *mem)
  69. {
  70. return atomic_sub(mem, 1);
  71. }
  72. APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint32_t with,
  73. apr_uint32_t cmp)
  74. {
  75. asm volatile (" cs %0,%2,%1\n"
  76. : "+d" (cmp), "=Q" (*mem)
  77. : "d" (with), "m" (*mem)
  78. : "cc", "memory");
  79. return cmp;
  80. }
  81. APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
  82. {
  83. apr_uint32_t prev = *mem;
  84. asm volatile ("loop_%=:\n"
  85. " cs %0,%2,%1\n"
  86. " jl loop_%=\n"
  87. : "+d" (prev), "=Q" (*mem)
  88. : "d" (val), "m" (*mem)
  89. : "cc", "memory");
  90. return prev;
  91. }
  92. APR_DECLARE(void*) apr_atomic_casptr(volatile void **mem, void *with, const void *cmp)
  93. {
  94. void *prev = (void *) cmp;
  95. #if APR_SIZEOF_VOIDP == 4
  96. asm volatile (" cs %0,%2,%1\n"
  97. : "+d" (prev), "=Q" (*mem)
  98. : "d" (with), "m" (*mem)
  99. : "cc", "memory");
  100. #elif APR_SIZEOF_VOIDP == 8
  101. asm volatile (" csg %0,%2,%1\n"
  102. : "+d" (prev), "=Q" (*mem)
  103. : "d" (with), "m" (*mem)
  104. : "cc", "memory");
  105. #else
  106. #error APR_SIZEOF_VOIDP value not supported
  107. #endif
  108. return prev;
  109. }
  110. APR_DECLARE(void*) apr_atomic_xchgptr(volatile void **mem, void *with)
  111. {
  112. void *prev = (void *) *mem;
  113. #if APR_SIZEOF_VOIDP == 4
  114. asm volatile ("loop_%=:\n"
  115. " cs %0,%2,%1\n"
  116. " jl loop_%=\n"
  117. : "+d" (prev), "=Q" (*mem)
  118. : "d" (with), "m" (*mem)
  119. : "cc", "memory");
  120. #elif APR_SIZEOF_VOIDP == 8
  121. asm volatile ("loop_%=:\n"
  122. " csg %0,%2,%1\n"
  123. " jl loop_%=\n"
  124. : "+d" (prev), "=Q" (*mem)
  125. : "d" (with), "m" (*mem)
  126. : "cc", "memory");
  127. #else
  128. #error APR_SIZEOF_VOIDP value not supported
  129. #endif
  130. return prev;
  131. }
  132. #endif /* USE_ATOMICS_S390 */