ppc.c 8.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207
  1. /* Licensed to the Apache Software Foundation (ASF) under one or more
  2. * contributor license agreements. See the NOTICE file distributed with
  3. * this work for additional information regarding copyright ownership.
  4. * The ASF licenses this file to You under the Apache License, Version 2.0
  5. * (the "License"); you may not use this file except in compliance with
  6. * the License. You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include "apr_arch_atomic.h"
  17. #ifdef USE_ATOMICS_PPC
  18. #ifdef PPC405_ERRATA
  19. # define PPC405_ERR77_SYNC " sync\n"
  20. #else
  21. # define PPC405_ERR77_SYNC
  22. #endif
  23. APR_DECLARE(apr_status_t) apr_atomic_init(apr_pool_t *p)
  24. {
  25. return APR_SUCCESS;
  26. }
  27. APR_DECLARE(apr_uint32_t) apr_atomic_read32(volatile apr_uint32_t *mem)
  28. {
  29. return *mem;
  30. }
  31. APR_DECLARE(void) apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val)
  32. {
  33. *mem = val;
  34. }
  35. APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint32_t val)
  36. {
  37. apr_uint32_t prev, temp;
  38. asm volatile ("loop_%=:\n" /* lost reservation */
  39. " lwarx %0,0,%3\n" /* load and reserve */
  40. " add %1,%0,%4\n" /* add val and prev */
  41. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  42. " stwcx. %1,0,%3\n" /* store new value */
  43. " bne- loop_%=\n" /* loop if lost */
  44. : "=&r" (prev), "=&r" (temp), "=m" (*mem)
  45. : "b" (mem), "r" (val)
  46. : "cc", "memory");
  47. return prev;
  48. }
  49. APR_DECLARE(void) apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
  50. {
  51. apr_uint32_t temp;
  52. asm volatile ("loop_%=:\n" /* lost reservation */
  53. " lwarx %0,0,%2\n" /* load and reserve */
  54. " subf %0,%3,%0\n" /* subtract val */
  55. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  56. " stwcx. %0,0,%2\n" /* store new value */
  57. " bne- loop_%=\n" /* loop if lost */
  58. : "=&r" (temp), "=m" (*mem)
  59. : "b" (mem), "r" (val)
  60. : "cc", "memory");
  61. }
  62. APR_DECLARE(apr_uint32_t) apr_atomic_inc32(volatile apr_uint32_t *mem)
  63. {
  64. apr_uint32_t prev;
  65. asm volatile ("loop_%=:\n" /* lost reservation */
  66. " lwarx %0,0,%2\n" /* load and reserve */
  67. " addi %0,%0,1\n" /* add immediate */
  68. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  69. " stwcx. %0,0,%2\n" /* store new value */
  70. " bne- loop_%=\n" /* loop if lost */
  71. " subi %0,%0,1\n" /* return old value */
  72. : "=&b" (prev), "=m" (*mem)
  73. : "b" (mem), "m" (*mem)
  74. : "cc", "memory");
  75. return prev;
  76. }
  77. APR_DECLARE(int) apr_atomic_dec32(volatile apr_uint32_t *mem)
  78. {
  79. apr_uint32_t prev;
  80. asm volatile ("loop_%=:\n" /* lost reservation */
  81. " lwarx %0,0,%2\n" /* load and reserve */
  82. " subi %0,%0,1\n" /* subtract immediate */
  83. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  84. " stwcx. %0,0,%2\n" /* store new value */
  85. " bne- loop_%=\n" /* loop if lost */
  86. : "=&b" (prev), "=m" (*mem)
  87. : "b" (mem), "m" (*mem)
  88. : "cc", "memory");
  89. return prev;
  90. }
  91. APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint32_t with,
  92. apr_uint32_t cmp)
  93. {
  94. apr_uint32_t prev;
  95. asm volatile ("loop_%=:\n" /* lost reservation */
  96. " lwarx %0,0,%1\n" /* load and reserve */
  97. " cmpw %0,%3\n" /* compare operands */
  98. " bne- exit_%=\n" /* skip if not equal */
  99. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  100. " stwcx. %2,0,%1\n" /* store new value */
  101. " bne- loop_%=\n" /* loop if lost */
  102. "exit_%=:\n" /* not equal */
  103. : "=&r" (prev)
  104. : "b" (mem), "r" (with), "r" (cmp)
  105. : "cc", "memory");
  106. return prev;
  107. }
  108. APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
  109. {
  110. apr_uint32_t prev;
  111. asm volatile ("loop_%=:\n" /* lost reservation */
  112. " lwarx %0,0,%1\n" /* load and reserve */
  113. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  114. " stwcx. %2,0,%1\n" /* store new value */
  115. " bne- loop_%=" /* loop if lost */
  116. : "=&r" (prev)
  117. : "b" (mem), "r" (val)
  118. : "cc", "memory");
  119. return prev;
  120. }
  121. APR_DECLARE(void*) apr_atomic_casptr(volatile void **mem, void *with, const void *cmp)
  122. {
  123. void *prev;
  124. #if APR_SIZEOF_VOIDP == 4
  125. asm volatile ("loop_%=:\n" /* lost reservation */
  126. " lwarx %0,0,%1\n" /* load and reserve */
  127. " cmpw %0,%3\n" /* compare operands */
  128. " bne- exit_%=\n" /* skip if not equal */
  129. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  130. " stwcx. %2,0,%1\n" /* store new value */
  131. " bne- loop_%=\n" /* loop if lost */
  132. "exit_%=:\n" /* not equal */
  133. : "=&r" (prev)
  134. : "b" (mem), "r" (with), "r" (cmp)
  135. : "cc", "memory");
  136. #elif APR_SIZEOF_VOIDP == 8
  137. asm volatile ("loop_%=:\n" /* lost reservation */
  138. " ldarx %0,0,%1\n" /* load and reserve */
  139. " cmpd %0,%3\n" /* compare operands */
  140. " bne- exit_%=\n" /* skip if not equal */
  141. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  142. " stdcx. %2,0,%1\n" /* store new value */
  143. " bne- loop_%=\n" /* loop if lost */
  144. "exit_%=:\n" /* not equal */
  145. : "=&r" (prev)
  146. : "b" (mem), "r" (with), "r" (cmp)
  147. : "cc", "memory");
  148. #else
  149. #error APR_SIZEOF_VOIDP value not supported
  150. #endif
  151. return prev;
  152. }
  153. APR_DECLARE(void*) apr_atomic_xchgptr(volatile void **mem, void *with)
  154. {
  155. void *prev;
  156. #if APR_SIZEOF_VOIDP == 4
  157. asm volatile ("loop_%=:\n" /* lost reservation */
  158. " lwarx %0,0,%1\n" /* load and reserve */
  159. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  160. " stwcx. %2,0,%1\n" /* store new value */
  161. " bne- loop_%=\n" /* loop if lost */
  162. " isync\n" /* memory barrier */
  163. : "=&r" (prev)
  164. : "b" (mem), "r" (with)
  165. : "cc", "memory");
  166. #elif APR_SIZEOF_VOIDP == 8
  167. asm volatile ("loop_%=:\n" /* lost reservation */
  168. " ldarx %0,0,%1\n" /* load and reserve */
  169. PPC405_ERR77_SYNC /* ppc405 Erratum 77 */
  170. " stdcx. %2,0,%1\n" /* store new value */
  171. " bne- loop_%=\n" /* loop if lost */
  172. " isync\n" /* memory barrier */
  173. : "=&r" (prev)
  174. : "b" (mem), "r" (with)
  175. : "cc", "memory");
  176. #else
  177. #error APR_SIZEOF_VOIDP value not supported
  178. #endif
  179. return prev;
  180. }
  181. #endif /* USE_ATOMICS_PPC */