020-mcfv4e_inline_memory_params.patch 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168
  1. From 3f698a1cf08cc02911cdb2ca3217be77eeba794b Mon Sep 17 00:00:00 2001
  2. From: Kurt Mahan <[email protected]>
  3. Date: Tue, 27 Nov 2007 23:17:53 -0700
  4. Subject: [PATCH] Change inline assembly memory params.
  5. For various routines change how the assembly memory pointer
  6. is passed in.
  7. LTIBName: mcfv4e-inline-memory-params
  8. Signed-off-by: Kurt Mahan <[email protected]>
  9. ---
  10. include/asm-m68k/bitops.h | 68 ++++++++++++++++++++++++++++++++++++++++++++-
  11. 1 files changed, 67 insertions(+), 1 deletions(-)
  12. --- a/include/asm-m68k/bitops.h
  13. +++ b/include/asm-m68k/bitops.h
  14. @@ -465,7 +465,7 @@ static inline int ext2_find_next_bit(con
  15. __constant_coldfire_test_and_set_bit(nr, vaddr) : \
  16. __generic_coldfire_test_and_set_bit(nr, vaddr))
  17. -
  18. +#if 0
  19. static __inline__ int __constant_coldfire_test_and_set_bit(int nr,
  20. volatile void *vaddr)
  21. {
  22. @@ -477,6 +477,17 @@ static __inline__ int __constant_coldfir
  23. : "di" (nr & 7));
  24. return retval;
  25. }
  26. +#else
  27. +static __inline__ int __constant_coldfire_test_and_set_bit(int nr,volatile void * vaddr)
  28. +{
  29. + char retval;
  30. + volatile char *p = &((volatile char *)vaddr)[(nr^31) >> 3];
  31. + __asm__ __volatile__ ("bset %2,(%4); sne %0"
  32. + : "=d" (retval), "=m" (*p)
  33. + : "di" (nr & 7), "m" (*p), "a" (p));
  34. + return retval;
  35. +}
  36. +#endif
  37. static __inline__ int __generic_coldfire_test_and_set_bit(int nr,
  38. volatile void *vaddr)
  39. @@ -496,6 +507,7 @@ static __inline__ int __generic_coldfire
  40. __constant_coldfire_set_bit(nr, vaddr) : \
  41. __generic_coldfire_set_bit(nr, vaddr))
  42. +#if 0
  43. static __inline__ void __constant_coldfire_set_bit(int nr,
  44. volatile void *vaddr)
  45. {
  46. @@ -503,6 +515,14 @@ static __inline__ void __constant_coldfi
  47. __asm__ __volatile__ ("bset %1,%0"
  48. : "+QUd" (*p) : "di" (nr & 7));
  49. }
  50. +#else
  51. +static __inline__ void __constant_coldfire_set_bit(int nr, volatile void * vaddr)
  52. +{
  53. + volatile char *p = &((volatile char *)vaddr)[(nr^31) >> 3];
  54. + __asm__ __volatile__ ("bset %1,(%3)"
  55. + : "=m" (*p) : "di" (nr & 7), "m" (*p), "a" (p));
  56. +}
  57. +#endif
  58. static __inline__ void __generic_coldfire_set_bit(int nr, volatile void *vaddr)
  59. {
  60. @@ -518,6 +538,7 @@ static __inline__ void __generic_coldfir
  61. __constant_coldfire_test_and_clear_bit(nr, vaddr) : \
  62. __generic_coldfire_test_and_clear_bit(nr, vaddr))
  63. +#if 0
  64. static __inline__ int __constant_coldfire_test_and_clear_bit(int nr,
  65. volatile void *vaddr)
  66. {
  67. @@ -530,6 +551,19 @@ static __inline__ int __constant_coldfir
  68. return retval;
  69. }
  70. +#else
  71. +static __inline__ int __constant_coldfire_test_and_clear_bit(int nr, volatile void *vaddr)
  72. +{
  73. + char retval;
  74. + volatile char *p = &((volatile char *)vaddr)[(nr^31) >> 3];
  75. +
  76. + __asm__ __volatile__ ("bclr %2,(%4); sne %0"
  77. + : "=d" (retval), "=m" (*p)
  78. + : "id" (nr & 7), "m" (*p), "a" (p));
  79. +
  80. + return retval;
  81. +}
  82. +#endif
  83. static __inline__ int __generic_coldfire_test_and_clear_bit(int nr,
  84. volatile void *vaddr)
  85. @@ -556,6 +590,7 @@ static __inline__ int __generic_coldfire
  86. __constant_coldfire_clear_bit(nr, vaddr) : \
  87. __generic_coldfire_clear_bit(nr, vaddr))
  88. +#if 0
  89. static __inline__ void __constant_coldfire_clear_bit(int nr,
  90. volatile void *vaddr)
  91. {
  92. @@ -563,6 +598,14 @@ static __inline__ void __constant_coldfi
  93. __asm__ __volatile__ ("bclr %1,%0"
  94. : "+QUd" (*p) : "id" (nr & 7));
  95. }
  96. +#else
  97. +static __inline__ void __constant_coldfire_clear_bit(int nr, volatile void * vaddr)
  98. +{
  99. + volatile char *p = &((volatile char *)vaddr)[(nr^31) >> 3];
  100. + __asm__ __volatile__ ("bclr %1,(%3)"
  101. + : "=m" (*p) : "id" (nr & 7), "m" (*p), "a" (p));
  102. +}
  103. +#endif
  104. static __inline__ void __generic_coldfire_clear_bit(int nr,
  105. volatile void *vaddr)
  106. @@ -579,6 +622,7 @@ static __inline__ void __generic_coldfir
  107. __constant_coldfire_test_and_change_bit(nr, vaddr) : \
  108. __generic_coldfire_test_and_change_bit(nr, vaddr))
  109. +#if 0
  110. static __inline__ int __constant_coldfire_test_and_change_bit(int nr,
  111. volatile void *vaddr)
  112. {
  113. @@ -591,6 +635,19 @@ static __inline__ int __constant_coldfir
  114. return retval;
  115. }
  116. +#else
  117. +static __inline__ int __constant_coldfire_test_and_change_bit(int nr, volatile void * vaddr)
  118. +{
  119. + char retval;
  120. + volatile char *p = &((volatile char *)vaddr)[(nr^31) >> 3];
  121. +
  122. + __asm__ __volatile__ ("bchg %2,(%4); sne %0"
  123. + : "=d" (retval), "=m" (*p)
  124. + : "id" (nr & 7), "m" (*p), "a" (p));
  125. +
  126. + return retval;
  127. +}
  128. +#endif
  129. static __inline__ int __generic_coldfire_test_and_change_bit(int nr,
  130. volatile void *vaddr)
  131. @@ -612,6 +669,7 @@ static __inline__ int __generic_coldfire
  132. __constant_coldfire_change_bit(nr, vaddr) : \
  133. __generic_coldfire_change_bit(nr, vaddr))
  134. +#if 0
  135. static __inline__ void __constant_coldfire_change_bit(int nr,
  136. volatile void *vaddr)
  137. {
  138. @@ -619,6 +677,14 @@ static __inline__ void __constant_coldfi
  139. __asm__ __volatile__ ("bchg %1,%0"
  140. : "+QUd" (*p) : "id" (nr & 7));
  141. }
  142. +#else
  143. +static __inline__ void __constant_coldfire_change_bit(int nr, volatile void * vaddr)
  144. +{
  145. + volatile char *p = &((volatile char *)vaddr)[(nr^31) >> 3];
  146. + __asm__ __volatile__ ("bchg %1,(%3)"
  147. + : "=m" (*p) : "id" (nr & 7), "m" (*p), "a" (p));
  148. +}
  149. +#endif
  150. static __inline__ void __generic_coldfire_change_bit(int nr,
  151. volatile void *vaddr)