atomic64_cx8_32.S 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226
  1. /*
  2. * atomic64_t for 586+
  3. *
  4. * Copied from arch/x86/lib/atomic64_cx8_32.S
  5. *
  6. * Copyright © 2010 Luca Barbieri
  7. *
  8. * This program is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License as published by
  10. * the Free Software Foundation; either version 2 of the License, or
  11. * (at your option) any later version.
  12. *
  13. */
  14. #include <linux/linkage.h>
  15. #include <asm/alternative-asm.h>
  16. #include <asm/dwarf2.h>
  17. .macro SAVE reg
  18. pushl_cfi %\reg
  19. CFI_REL_OFFSET \reg, 0
  20. .endm
  21. .macro RESTORE reg
  22. popl_cfi %\reg
  23. CFI_RESTORE \reg
  24. .endm
  25. .macro read64 reg
  26. movl %ebx, %eax
  27. movl %ecx, %edx
  28. /* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
  29. LOCK_PREFIX
  30. cmpxchg8b (\reg)
  31. .endm
  32. ENTRY(atomic64_read_cx8)
  33. CFI_STARTPROC
  34. read64 %ecx
  35. ret
  36. CFI_ENDPROC
  37. ENDPROC(atomic64_read_cx8)
  38. ENTRY(atomic64_set_cx8)
  39. CFI_STARTPROC
  40. 1:
  41. /* we don't need LOCK_PREFIX since aligned 64-bit writes
  42. * are atomic on 586 and newer */
  43. cmpxchg8b (%esi)
  44. jne 1b
  45. ret
  46. CFI_ENDPROC
  47. ENDPROC(atomic64_set_cx8)
  48. ENTRY(atomic64_xchg_cx8)
  49. CFI_STARTPROC
  50. movl %ebx, %eax
  51. movl %ecx, %edx
  52. 1:
  53. LOCK_PREFIX
  54. cmpxchg8b (%esi)
  55. jne 1b
  56. ret
  57. CFI_ENDPROC
  58. ENDPROC(atomic64_xchg_cx8)
  59. .macro addsub_return func ins insc
  60. ENTRY(atomic64_\func\()_return_cx8)
  61. CFI_STARTPROC
  62. SAVE ebp
  63. SAVE ebx
  64. SAVE esi
  65. SAVE edi
  66. movl %eax, %esi
  67. movl %edx, %edi
  68. movl %ecx, %ebp
  69. read64 %ebp
  70. 1:
  71. movl %eax, %ebx
  72. movl %edx, %ecx
  73. \ins\()l %esi, %ebx
  74. \insc\()l %edi, %ecx
  75. LOCK_PREFIX
  76. cmpxchg8b (%ebp)
  77. jne 1b
  78. 10:
  79. movl %ebx, %eax
  80. movl %ecx, %edx
  81. RESTORE edi
  82. RESTORE esi
  83. RESTORE ebx
  84. RESTORE ebp
  85. ret
  86. CFI_ENDPROC
  87. ENDPROC(atomic64_\func\()_return_cx8)
  88. .endm
  89. addsub_return add add adc
  90. addsub_return sub sub sbb
  91. .macro incdec_return func ins insc
  92. ENTRY(atomic64_\func\()_return_cx8)
  93. CFI_STARTPROC
  94. SAVE ebx
  95. read64 %esi
  96. 1:
  97. movl %eax, %ebx
  98. movl %edx, %ecx
  99. \ins\()l $1, %ebx
  100. \insc\()l $0, %ecx
  101. LOCK_PREFIX
  102. cmpxchg8b (%esi)
  103. jne 1b
  104. 10:
  105. movl %ebx, %eax
  106. movl %ecx, %edx
  107. RESTORE ebx
  108. ret
  109. CFI_ENDPROC
  110. ENDPROC(atomic64_\func\()_return_cx8)
  111. .endm
  112. incdec_return inc add adc
  113. incdec_return dec sub sbb
  114. ENTRY(atomic64_dec_if_positive_cx8)
  115. CFI_STARTPROC
  116. SAVE ebx
  117. read64 %esi
  118. 1:
  119. movl %eax, %ebx
  120. movl %edx, %ecx
  121. subl $1, %ebx
  122. sbb $0, %ecx
  123. js 2f
  124. LOCK_PREFIX
  125. cmpxchg8b (%esi)
  126. jne 1b
  127. 2:
  128. movl %ebx, %eax
  129. movl %ecx, %edx
  130. RESTORE ebx
  131. ret
  132. CFI_ENDPROC
  133. ENDPROC(atomic64_dec_if_positive_cx8)
  134. ENTRY(atomic64_add_unless_cx8)
  135. CFI_STARTPROC
  136. SAVE ebp
  137. SAVE ebx
  138. /* these just push these two parameters on the stack */
  139. SAVE edi
  140. SAVE esi
  141. movl %ecx, %ebp
  142. movl %eax, %esi
  143. movl %edx, %edi
  144. read64 %ebp
  145. 1:
  146. cmpl %eax, 0(%esp)
  147. je 4f
  148. 2:
  149. movl %eax, %ebx
  150. movl %edx, %ecx
  151. addl %esi, %ebx
  152. adcl %edi, %ecx
  153. LOCK_PREFIX
  154. cmpxchg8b (%ebp)
  155. jne 1b
  156. movl $1, %eax
  157. 3:
  158. addl $8, %esp
  159. CFI_ADJUST_CFA_OFFSET -8
  160. RESTORE ebx
  161. RESTORE ebp
  162. ret
  163. 4:
  164. cmpl %edx, 4(%esp)
  165. jne 2b
  166. xorl %eax, %eax
  167. jmp 3b
  168. CFI_ENDPROC
  169. ENDPROC(atomic64_add_unless_cx8)
  170. ENTRY(atomic64_inc_not_zero_cx8)
  171. CFI_STARTPROC
  172. SAVE ebx
  173. read64 %esi
  174. 1:
  175. testl %eax, %eax
  176. je 4f
  177. 2:
  178. movl %eax, %ebx
  179. movl %edx, %ecx
  180. addl $1, %ebx
  181. adcl $0, %ecx
  182. LOCK_PREFIX
  183. cmpxchg8b (%esi)
  184. jne 1b
  185. movl $1, %eax
  186. 3:
  187. RESTORE ebx
  188. ret
  189. 4:
  190. testl %edx, %edx
  191. jne 2b
  192. jmp 3b
  193. CFI_ENDPROC
  194. ENDPROC(atomic64_inc_not_zero_cx8)