atomic64_cx8_32.S 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216
  1. /*
  2. * atomic64_t for 586+
  3. *
  4. * Copyright © 2010 Luca Barbieri
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; either version 2 of the License, or
  9. * (at your option) any later version.
  10. */
  11. #include <linux/linkage.h>
  12. #include <asm/alternative-asm.h>
  13. #include <asm/dwarf2.h>
  14. .macro SAVE reg
  15. pushl_cfi %\reg
  16. CFI_REL_OFFSET \reg, 0
  17. .endm
  18. .macro RESTORE reg
  19. popl_cfi %\reg
  20. CFI_RESTORE \reg
  21. .endm
  22. .macro read64 reg
  23. movl %ebx, %eax
  24. movl %ecx, %edx
  25. /* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
  26. LOCK_PREFIX
  27. cmpxchg8b (\reg)
  28. .endm
  29. ENTRY(atomic64_read_cx8)
  30. CFI_STARTPROC
  31. read64 %ecx
  32. ret
  33. CFI_ENDPROC
  34. ENDPROC(atomic64_read_cx8)
  35. ENTRY(atomic64_set_cx8)
  36. CFI_STARTPROC
  37. 1:
  38. /* we don't need LOCK_PREFIX since aligned 64-bit writes
  39. * are atomic on 586 and newer */
  40. cmpxchg8b (%esi)
  41. jne 1b
  42. ret
  43. CFI_ENDPROC
  44. ENDPROC(atomic64_set_cx8)
  45. ENTRY(atomic64_xchg_cx8)
  46. CFI_STARTPROC
  47. 1:
  48. LOCK_PREFIX
  49. cmpxchg8b (%esi)
  50. jne 1b
  51. ret
  52. CFI_ENDPROC
  53. ENDPROC(atomic64_xchg_cx8)
  54. .macro addsub_return func ins insc
  55. ENTRY(atomic64_\func\()_return_cx8)
  56. CFI_STARTPROC
  57. SAVE ebp
  58. SAVE ebx
  59. SAVE esi
  60. SAVE edi
  61. movl %eax, %esi
  62. movl %edx, %edi
  63. movl %ecx, %ebp
  64. read64 %ecx
  65. 1:
  66. movl %eax, %ebx
  67. movl %edx, %ecx
  68. \ins\()l %esi, %ebx
  69. \insc\()l %edi, %ecx
  70. LOCK_PREFIX
  71. cmpxchg8b (%ebp)
  72. jne 1b
  73. 10:
  74. movl %ebx, %eax
  75. movl %ecx, %edx
  76. RESTORE edi
  77. RESTORE esi
  78. RESTORE ebx
  79. RESTORE ebp
  80. ret
  81. CFI_ENDPROC
  82. ENDPROC(atomic64_\func\()_return_cx8)
  83. .endm
  84. addsub_return add add adc
  85. addsub_return sub sub sbb
  86. .macro incdec_return func ins insc
  87. ENTRY(atomic64_\func\()_return_cx8)
  88. CFI_STARTPROC
  89. SAVE ebx
  90. read64 %esi
  91. 1:
  92. movl %eax, %ebx
  93. movl %edx, %ecx
  94. \ins\()l $1, %ebx
  95. \insc\()l $0, %ecx
  96. LOCK_PREFIX
  97. cmpxchg8b (%esi)
  98. jne 1b
  99. 10:
  100. movl %ebx, %eax
  101. movl %ecx, %edx
  102. RESTORE ebx
  103. ret
  104. CFI_ENDPROC
  105. ENDPROC(atomic64_\func\()_return_cx8)
  106. .endm
  107. incdec_return inc add adc
  108. incdec_return dec sub sbb
  109. ENTRY(atomic64_dec_if_positive_cx8)
  110. CFI_STARTPROC
  111. SAVE ebx
  112. read64 %esi
  113. 1:
  114. movl %eax, %ebx
  115. movl %edx, %ecx
  116. subl $1, %ebx
  117. sbb $0, %ecx
  118. js 2f
  119. LOCK_PREFIX
  120. cmpxchg8b (%esi)
  121. jne 1b
  122. 2:
  123. movl %ebx, %eax
  124. movl %ecx, %edx
  125. RESTORE ebx
  126. ret
  127. CFI_ENDPROC
  128. ENDPROC(atomic64_dec_if_positive_cx8)
  129. ENTRY(atomic64_add_unless_cx8)
  130. CFI_STARTPROC
  131. SAVE ebp
  132. SAVE ebx
  133. /* these just push these two parameters on the stack */
  134. SAVE edi
  135. SAVE ecx
  136. movl %eax, %ebp
  137. movl %edx, %edi
  138. read64 %esi
  139. 1:
  140. cmpl %eax, 0(%esp)
  141. je 4f
  142. 2:
  143. movl %eax, %ebx
  144. movl %edx, %ecx
  145. addl %ebp, %ebx
  146. adcl %edi, %ecx
  147. LOCK_PREFIX
  148. cmpxchg8b (%esi)
  149. jne 1b
  150. movl $1, %eax
  151. 3:
  152. addl $8, %esp
  153. CFI_ADJUST_CFA_OFFSET -8
  154. RESTORE ebx
  155. RESTORE ebp
  156. ret
  157. 4:
  158. cmpl %edx, 4(%esp)
  159. jne 2b
  160. xorl %eax, %eax
  161. jmp 3b
  162. CFI_ENDPROC
  163. ENDPROC(atomic64_add_unless_cx8)
  164. ENTRY(atomic64_inc_not_zero_cx8)
  165. CFI_STARTPROC
  166. SAVE ebx
  167. read64 %esi
  168. 1:
  169. movl %eax, %ecx
  170. orl %edx, %ecx
  171. jz 3f
  172. movl %eax, %ebx
  173. xorl %ecx, %ecx
  174. addl $1, %ebx
  175. adcl %edx, %ecx
  176. LOCK_PREFIX
  177. cmpxchg8b (%esi)
  178. jne 1b
  179. movl $1, %eax
  180. 3:
  181. RESTORE ebx
  182. ret
  183. CFI_ENDPROC
  184. ENDPROC(atomic64_inc_not_zero_cx8)