xchg.h 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259
  1. #ifndef _ALPHA_CMPXCHG_H
  2. #error Do not include xchg.h directly!
  3. #else
  4. /*
  5. * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
  6. * except that local version do not have the expensive memory barrier.
  7. * So this file is included twice from asm/cmpxchg.h.
  8. */
  9. /*
  10. * Atomic exchange.
  11. * Since it can be used to implement critical sections
  12. * it must clobber "memory" (also for interrupts in UP).
  13. */
  14. static inline unsigned long
  15. ____xchg(_u8, volatile char *m, unsigned long val)
  16. {
  17. unsigned long ret, tmp, addr64;
  18. __asm__ __volatile__(
  19. " andnot %4,7,%3\n"
  20. " insbl %1,%4,%1\n"
  21. "1: ldq_l %2,0(%3)\n"
  22. " extbl %2,%4,%0\n"
  23. " mskbl %2,%4,%2\n"
  24. " or %1,%2,%2\n"
  25. " stq_c %2,0(%3)\n"
  26. " beq %2,2f\n"
  27. __ASM__MB
  28. ".subsection 2\n"
  29. "2: br 1b\n"
  30. ".previous"
  31. : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
  32. : "r" ((long)m), "1" (val) : "memory");
  33. return ret;
  34. }
  35. static inline unsigned long
  36. ____xchg(_u16, volatile short *m, unsigned long val)
  37. {
  38. unsigned long ret, tmp, addr64;
  39. __asm__ __volatile__(
  40. " andnot %4,7,%3\n"
  41. " inswl %1,%4,%1\n"
  42. "1: ldq_l %2,0(%3)\n"
  43. " extwl %2,%4,%0\n"
  44. " mskwl %2,%4,%2\n"
  45. " or %1,%2,%2\n"
  46. " stq_c %2,0(%3)\n"
  47. " beq %2,2f\n"
  48. __ASM__MB
  49. ".subsection 2\n"
  50. "2: br 1b\n"
  51. ".previous"
  52. : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
  53. : "r" ((long)m), "1" (val) : "memory");
  54. return ret;
  55. }
  56. static inline unsigned long
  57. ____xchg(_u32, volatile int *m, unsigned long val)
  58. {
  59. unsigned long dummy;
  60. __asm__ __volatile__(
  61. "1: ldl_l %0,%4\n"
  62. " bis $31,%3,%1\n"
  63. " stl_c %1,%2\n"
  64. " beq %1,2f\n"
  65. __ASM__MB
  66. ".subsection 2\n"
  67. "2: br 1b\n"
  68. ".previous"
  69. : "=&r" (val), "=&r" (dummy), "=m" (*m)
  70. : "rI" (val), "m" (*m) : "memory");
  71. return val;
  72. }
  73. static inline unsigned long
  74. ____xchg(_u64, volatile long *m, unsigned long val)
  75. {
  76. unsigned long dummy;
  77. __asm__ __volatile__(
  78. "1: ldq_l %0,%4\n"
  79. " bis $31,%3,%1\n"
  80. " stq_c %1,%2\n"
  81. " beq %1,2f\n"
  82. __ASM__MB
  83. ".subsection 2\n"
  84. "2: br 1b\n"
  85. ".previous"
  86. : "=&r" (val), "=&r" (dummy), "=m" (*m)
  87. : "rI" (val), "m" (*m) : "memory");
  88. return val;
  89. }
  90. /* This function doesn't exist, so you'll get a linker error
  91. if something tries to do an invalid xchg(). */
  92. extern void __xchg_called_with_bad_pointer(void);
  93. static __always_inline unsigned long
  94. ____xchg(, volatile void *ptr, unsigned long x, int size)
  95. {
  96. switch (size) {
  97. case 1:
  98. return ____xchg(_u8, ptr, x);
  99. case 2:
  100. return ____xchg(_u16, ptr, x);
  101. case 4:
  102. return ____xchg(_u32, ptr, x);
  103. case 8:
  104. return ____xchg(_u64, ptr, x);
  105. }
  106. __xchg_called_with_bad_pointer();
  107. return x;
  108. }
  109. /*
  110. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  111. * store NEW in MEM. Return the initial value in MEM. Success is
  112. * indicated by comparing RETURN with OLD.
  113. *
  114. * The memory barrier should be placed in SMP only when we actually
  115. * make the change. If we don't change anything (so if the returned
  116. * prev is equal to old) then we aren't acquiring anything new and
  117. * we don't need any memory barrier as far I can tell.
  118. */
  119. static inline unsigned long
  120. ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
  121. {
  122. unsigned long prev, tmp, cmp, addr64;
  123. __asm__ __volatile__(
  124. " andnot %5,7,%4\n"
  125. " insbl %1,%5,%1\n"
  126. "1: ldq_l %2,0(%4)\n"
  127. " extbl %2,%5,%0\n"
  128. " cmpeq %0,%6,%3\n"
  129. " beq %3,2f\n"
  130. " mskbl %2,%5,%2\n"
  131. " or %1,%2,%2\n"
  132. " stq_c %2,0(%4)\n"
  133. " beq %2,3f\n"
  134. __ASM__MB
  135. "2:\n"
  136. ".subsection 2\n"
  137. "3: br 1b\n"
  138. ".previous"
  139. : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
  140. : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
  141. return prev;
  142. }
  143. static inline unsigned long
  144. ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
  145. {
  146. unsigned long prev, tmp, cmp, addr64;
  147. __asm__ __volatile__(
  148. " andnot %5,7,%4\n"
  149. " inswl %1,%5,%1\n"
  150. "1: ldq_l %2,0(%4)\n"
  151. " extwl %2,%5,%0\n"
  152. " cmpeq %0,%6,%3\n"
  153. " beq %3,2f\n"
  154. " mskwl %2,%5,%2\n"
  155. " or %1,%2,%2\n"
  156. " stq_c %2,0(%4)\n"
  157. " beq %2,3f\n"
  158. __ASM__MB
  159. "2:\n"
  160. ".subsection 2\n"
  161. "3: br 1b\n"
  162. ".previous"
  163. : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
  164. : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
  165. return prev;
  166. }
  167. static inline unsigned long
  168. ____cmpxchg(_u32, volatile int *m, int old, int new)
  169. {
  170. unsigned long prev, cmp;
  171. __asm__ __volatile__(
  172. "1: ldl_l %0,%5\n"
  173. " cmpeq %0,%3,%1\n"
  174. " beq %1,2f\n"
  175. " mov %4,%1\n"
  176. " stl_c %1,%2\n"
  177. " beq %1,3f\n"
  178. __ASM__MB
  179. "2:\n"
  180. ".subsection 2\n"
  181. "3: br 1b\n"
  182. ".previous"
  183. : "=&r"(prev), "=&r"(cmp), "=m"(*m)
  184. : "r"((long) old), "r"(new), "m"(*m) : "memory");
  185. return prev;
  186. }
  187. static inline unsigned long
  188. ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
  189. {
  190. unsigned long prev, cmp;
  191. __asm__ __volatile__(
  192. "1: ldq_l %0,%5\n"
  193. " cmpeq %0,%3,%1\n"
  194. " beq %1,2f\n"
  195. " mov %4,%1\n"
  196. " stq_c %1,%2\n"
  197. " beq %1,3f\n"
  198. __ASM__MB
  199. "2:\n"
  200. ".subsection 2\n"
  201. "3: br 1b\n"
  202. ".previous"
  203. : "=&r"(prev), "=&r"(cmp), "=m"(*m)
  204. : "r"((long) old), "r"(new), "m"(*m) : "memory");
  205. return prev;
  206. }
  207. /* This function doesn't exist, so you'll get a linker error
  208. if something tries to do an invalid cmpxchg(). */
  209. extern void __cmpxchg_called_with_bad_pointer(void);
  210. static __always_inline unsigned long
  211. ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
  212. int size)
  213. {
  214. switch (size) {
  215. case 1:
  216. return ____cmpxchg(_u8, ptr, old, new);
  217. case 2:
  218. return ____cmpxchg(_u16, ptr, old, new);
  219. case 4:
  220. return ____cmpxchg(_u32, ptr, old, new);
  221. case 8:
  222. return ____cmpxchg(_u64, ptr, old, new);
  223. }
  224. __cmpxchg_called_with_bad_pointer();
  225. return old;
  226. }
  227. #endif