atomic64_386_32.S 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. /*
  2. * atomic64_t for 386/486
  3. *
  4. * Copyright © 2010 Luca Barbieri
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; either version 2 of the License, or
  9. * (at your option) any later version.
  10. */
  11. #include <linux/linkage.h>
  12. #include <asm/alternative-asm.h>
  13. #include <asm/dwarf2.h>
  14. /* if you want SMP support, implement these with real spinlocks */
  15. .macro LOCK reg
  16. pushfl_cfi
  17. cli
  18. .endm
  19. .macro UNLOCK reg
  20. popfl_cfi
  21. .endm
  22. #define BEGIN(op) \
  23. .macro endp; \
  24. CFI_ENDPROC; \
  25. ENDPROC(atomic64_##op##_386); \
  26. .purgem endp; \
  27. .endm; \
  28. ENTRY(atomic64_##op##_386); \
  29. CFI_STARTPROC; \
  30. LOCK v;
  31. #define ENDP endp
  32. #define RET \
  33. UNLOCK v; \
  34. ret
  35. #define RET_ENDP \
  36. RET; \
  37. ENDP
  38. #define v %ecx
  39. BEGIN(read)
  40. movl (v), %eax
  41. movl 4(v), %edx
  42. RET_ENDP
  43. #undef v
  44. #define v %esi
  45. BEGIN(set)
  46. movl %ebx, (v)
  47. movl %ecx, 4(v)
  48. RET_ENDP
  49. #undef v
  50. #define v %esi
  51. BEGIN(xchg)
  52. movl (v), %eax
  53. movl 4(v), %edx
  54. movl %ebx, (v)
  55. movl %ecx, 4(v)
  56. RET_ENDP
  57. #undef v
  58. #define v %ecx
  59. BEGIN(add)
  60. addl %eax, (v)
  61. adcl %edx, 4(v)
  62. RET_ENDP
  63. #undef v
  64. #define v %ecx
  65. BEGIN(add_return)
  66. addl (v), %eax
  67. adcl 4(v), %edx
  68. movl %eax, (v)
  69. movl %edx, 4(v)
  70. RET_ENDP
  71. #undef v
  72. #define v %ecx
  73. BEGIN(sub)
  74. subl %eax, (v)
  75. sbbl %edx, 4(v)
  76. RET_ENDP
  77. #undef v
  78. #define v %ecx
  79. BEGIN(sub_return)
  80. negl %edx
  81. negl %eax
  82. sbbl $0, %edx
  83. addl (v), %eax
  84. adcl 4(v), %edx
  85. movl %eax, (v)
  86. movl %edx, 4(v)
  87. RET_ENDP
  88. #undef v
  89. #define v %esi
  90. BEGIN(inc)
  91. addl $1, (v)
  92. adcl $0, 4(v)
  93. RET_ENDP
  94. #undef v
  95. #define v %esi
  96. BEGIN(inc_return)
  97. movl (v), %eax
  98. movl 4(v), %edx
  99. addl $1, %eax
  100. adcl $0, %edx
  101. movl %eax, (v)
  102. movl %edx, 4(v)
  103. RET_ENDP
  104. #undef v
  105. #define v %esi
  106. BEGIN(dec)
  107. subl $1, (v)
  108. sbbl $0, 4(v)
  109. RET_ENDP
  110. #undef v
  111. #define v %esi
  112. BEGIN(dec_return)
  113. movl (v), %eax
  114. movl 4(v), %edx
  115. subl $1, %eax
  116. sbbl $0, %edx
  117. movl %eax, (v)
  118. movl %edx, 4(v)
  119. RET_ENDP
  120. #undef v
  121. #define v %esi
  122. BEGIN(add_unless)
  123. addl %eax, %ecx
  124. adcl %edx, %edi
  125. addl (v), %eax
  126. adcl 4(v), %edx
  127. cmpl %eax, %ecx
  128. je 3f
  129. 1:
  130. movl %eax, (v)
  131. movl %edx, 4(v)
  132. movl $1, %eax
  133. 2:
  134. RET
  135. 3:
  136. cmpl %edx, %edi
  137. jne 1b
  138. xorl %eax, %eax
  139. jmp 2b
  140. ENDP
  141. #undef v
  142. #define v %esi
  143. BEGIN(inc_not_zero)
  144. movl (v), %eax
  145. movl 4(v), %edx
  146. testl %eax, %eax
  147. je 3f
  148. 1:
  149. addl $1, %eax
  150. adcl $0, %edx
  151. movl %eax, (v)
  152. movl %edx, 4(v)
  153. movl $1, %eax
  154. 2:
  155. RET
  156. 3:
  157. testl %edx, %edx
  158. jne 1b
  159. jmp 2b
  160. ENDP
  161. #undef v
  162. #define v %esi
  163. BEGIN(dec_if_positive)
  164. movl (v), %eax
  165. movl 4(v), %edx
  166. subl $1, %eax
  167. sbbl $0, %edx
  168. js 1f
  169. movl %eax, (v)
  170. movl %edx, 4(v)
  171. 1:
  172. RET_ENDP
  173. #undef v