xchg.h 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273
  1. #ifndef _ALPHA_CMPXCHG_H
  2. #error Do not include xchg.h directly!
  3. #else
  4. /*
  5. * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
  6. * except that local version do not have the expensive memory barrier.
  7. * So this file is included twice from asm/cmpxchg.h.
  8. */
  9. /*
  10. * Atomic exchange.
  11. * Since it can be used to implement critical sections
  12. * it must clobber "memory" (also for interrupts in UP).
  13. *
  14. * The leading and the trailing memory barriers guarantee that these
  15. * operations are fully ordered.
  16. *
  17. */
  18. static inline unsigned long
  19. ____xchg(_u8, volatile char *m, unsigned long val)
  20. {
  21. unsigned long ret, tmp, addr64;
  22. smp_mb();
  23. __asm__ __volatile__(
  24. " andnot %4,7,%3\n"
  25. " insbl %1,%4,%1\n"
  26. "1: ldq_l %2,0(%3)\n"
  27. " extbl %2,%4,%0\n"
  28. " mskbl %2,%4,%2\n"
  29. " or %1,%2,%2\n"
  30. " stq_c %2,0(%3)\n"
  31. " beq %2,2f\n"
  32. __ASM__MB
  33. ".subsection 2\n"
  34. "2: br 1b\n"
  35. ".previous"
  36. : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
  37. : "r" ((long)m), "1" (val) : "memory");
  38. return ret;
  39. }
  40. static inline unsigned long
  41. ____xchg(_u16, volatile short *m, unsigned long val)
  42. {
  43. unsigned long ret, tmp, addr64;
  44. smp_mb();
  45. __asm__ __volatile__(
  46. " andnot %4,7,%3\n"
  47. " inswl %1,%4,%1\n"
  48. "1: ldq_l %2,0(%3)\n"
  49. " extwl %2,%4,%0\n"
  50. " mskwl %2,%4,%2\n"
  51. " or %1,%2,%2\n"
  52. " stq_c %2,0(%3)\n"
  53. " beq %2,2f\n"
  54. __ASM__MB
  55. ".subsection 2\n"
  56. "2: br 1b\n"
  57. ".previous"
  58. : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
  59. : "r" ((long)m), "1" (val) : "memory");
  60. return ret;
  61. }
  62. static inline unsigned long
  63. ____xchg(_u32, volatile int *m, unsigned long val)
  64. {
  65. unsigned long dummy;
  66. smp_mb();
  67. __asm__ __volatile__(
  68. "1: ldl_l %0,%4\n"
  69. " bis $31,%3,%1\n"
  70. " stl_c %1,%2\n"
  71. " beq %1,2f\n"
  72. __ASM__MB
  73. ".subsection 2\n"
  74. "2: br 1b\n"
  75. ".previous"
  76. : "=&r" (val), "=&r" (dummy), "=m" (*m)
  77. : "rI" (val), "m" (*m) : "memory");
  78. return val;
  79. }
  80. static inline unsigned long
  81. ____xchg(_u64, volatile long *m, unsigned long val)
  82. {
  83. unsigned long dummy;
  84. smp_mb();
  85. __asm__ __volatile__(
  86. "1: ldq_l %0,%4\n"
  87. " bis $31,%3,%1\n"
  88. " stq_c %1,%2\n"
  89. " beq %1,2f\n"
  90. __ASM__MB
  91. ".subsection 2\n"
  92. "2: br 1b\n"
  93. ".previous"
  94. : "=&r" (val), "=&r" (dummy), "=m" (*m)
  95. : "rI" (val), "m" (*m) : "memory");
  96. return val;
  97. }
  98. /* This function doesn't exist, so you'll get a linker error
  99. if something tries to do an invalid xchg(). */
  100. extern void __xchg_called_with_bad_pointer(void);
  101. static __always_inline unsigned long
  102. ____xchg(, volatile void *ptr, unsigned long x, int size)
  103. {
  104. switch (size) {
  105. case 1:
  106. return ____xchg(_u8, ptr, x);
  107. case 2:
  108. return ____xchg(_u16, ptr, x);
  109. case 4:
  110. return ____xchg(_u32, ptr, x);
  111. case 8:
  112. return ____xchg(_u64, ptr, x);
  113. }
  114. __xchg_called_with_bad_pointer();
  115. return x;
  116. }
  117. /*
  118. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  119. * store NEW in MEM. Return the initial value in MEM. Success is
  120. * indicated by comparing RETURN with OLD.
  121. *
  122. * The leading and the trailing memory barriers guarantee that these
  123. * operations are fully ordered.
  124. *
  125. * The trailing memory barrier is placed in SMP unconditionally, in
  126. * order to guarantee that dependency ordering is preserved when a
  127. * dependency is headed by an unsuccessful operation.
  128. */
  129. static inline unsigned long
  130. ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
  131. {
  132. unsigned long prev, tmp, cmp, addr64;
  133. smp_mb();
  134. __asm__ __volatile__(
  135. " andnot %5,7,%4\n"
  136. " insbl %1,%5,%1\n"
  137. "1: ldq_l %2,0(%4)\n"
  138. " extbl %2,%5,%0\n"
  139. " cmpeq %0,%6,%3\n"
  140. " beq %3,2f\n"
  141. " mskbl %2,%5,%2\n"
  142. " or %1,%2,%2\n"
  143. " stq_c %2,0(%4)\n"
  144. " beq %2,3f\n"
  145. "2:\n"
  146. __ASM__MB
  147. ".subsection 2\n"
  148. "3: br 1b\n"
  149. ".previous"
  150. : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
  151. : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
  152. return prev;
  153. }
  154. static inline unsigned long
  155. ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
  156. {
  157. unsigned long prev, tmp, cmp, addr64;
  158. smp_mb();
  159. __asm__ __volatile__(
  160. " andnot %5,7,%4\n"
  161. " inswl %1,%5,%1\n"
  162. "1: ldq_l %2,0(%4)\n"
  163. " extwl %2,%5,%0\n"
  164. " cmpeq %0,%6,%3\n"
  165. " beq %3,2f\n"
  166. " mskwl %2,%5,%2\n"
  167. " or %1,%2,%2\n"
  168. " stq_c %2,0(%4)\n"
  169. " beq %2,3f\n"
  170. "2:\n"
  171. __ASM__MB
  172. ".subsection 2\n"
  173. "3: br 1b\n"
  174. ".previous"
  175. : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
  176. : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
  177. return prev;
  178. }
  179. static inline unsigned long
  180. ____cmpxchg(_u32, volatile int *m, int old, int new)
  181. {
  182. unsigned long prev, cmp;
  183. smp_mb();
  184. __asm__ __volatile__(
  185. "1: ldl_l %0,%5\n"
  186. " cmpeq %0,%3,%1\n"
  187. " beq %1,2f\n"
  188. " mov %4,%1\n"
  189. " stl_c %1,%2\n"
  190. " beq %1,3f\n"
  191. "2:\n"
  192. __ASM__MB
  193. ".subsection 2\n"
  194. "3: br 1b\n"
  195. ".previous"
  196. : "=&r"(prev), "=&r"(cmp), "=m"(*m)
  197. : "r"((long) old), "r"(new), "m"(*m) : "memory");
  198. return prev;
  199. }
  200. static inline unsigned long
  201. ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
  202. {
  203. unsigned long prev, cmp;
  204. smp_mb();
  205. __asm__ __volatile__(
  206. "1: ldq_l %0,%5\n"
  207. " cmpeq %0,%3,%1\n"
  208. " beq %1,2f\n"
  209. " mov %4,%1\n"
  210. " stq_c %1,%2\n"
  211. " beq %1,3f\n"
  212. "2:\n"
  213. __ASM__MB
  214. ".subsection 2\n"
  215. "3: br 1b\n"
  216. ".previous"
  217. : "=&r"(prev), "=&r"(cmp), "=m"(*m)
  218. : "r"((long) old), "r"(new), "m"(*m) : "memory");
  219. return prev;
  220. }
  221. /* This function doesn't exist, so you'll get a linker error
  222. if something tries to do an invalid cmpxchg(). */
  223. extern void __cmpxchg_called_with_bad_pointer(void);
  224. static __always_inline unsigned long
  225. ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
  226. int size)
  227. {
  228. switch (size) {
  229. case 1:
  230. return ____cmpxchg(_u8, ptr, old, new);
  231. case 2:
  232. return ____cmpxchg(_u16, ptr, old, new);
  233. case 4:
  234. return ____cmpxchg(_u32, ptr, old, new);
  235. case 8:
  236. return ____cmpxchg(_u64, ptr, old, new);
  237. }
  238. __cmpxchg_called_with_bad_pointer();
  239. return old;
  240. }
  241. #endif