atomic_64.S 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135
  1. /* atomic.S: These things are too big to do inline.
  2. *
  3. * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
  4. */
  5. #include <asm/asi.h>
  6. #include <asm/backoff.h>
  7. .text
  8. /* Two versions of the atomic routines, one that
  9. * does not return a value and does not perform
  10. * memory barriers, and a second which returns
  11. * a value and does the barriers.
  12. */
  13. .globl atomic_add
  14. .type atomic_add,#function
  15. atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
  16. BACKOFF_SETUP(%o2)
  17. 1: lduw [%o1], %g1
  18. add %g1, %o0, %g7
  19. cas [%o1], %g1, %g7
  20. cmp %g1, %g7
  21. bne,pn %icc, BACKOFF_LABEL(2f, 1b)
  22. nop
  23. retl
  24. nop
  25. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  26. .size atomic_add, .-atomic_add
  27. .globl atomic_sub
  28. .type atomic_sub,#function
  29. atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  30. BACKOFF_SETUP(%o2)
  31. 1: lduw [%o1], %g1
  32. sub %g1, %o0, %g7
  33. cas [%o1], %g1, %g7
  34. cmp %g1, %g7
  35. bne,pn %icc, BACKOFF_LABEL(2f, 1b)
  36. nop
  37. retl
  38. nop
  39. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  40. .size atomic_sub, .-atomic_sub
  41. .globl atomic_add_ret
  42. .type atomic_add_ret,#function
  43. atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  44. BACKOFF_SETUP(%o2)
  45. 1: lduw [%o1], %g1
  46. add %g1, %o0, %g7
  47. cas [%o1], %g1, %g7
  48. cmp %g1, %g7
  49. bne,pn %icc, BACKOFF_LABEL(2f, 1b)
  50. add %g1, %o0, %g1
  51. retl
  52. sra %g1, 0, %o0
  53. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  54. .size atomic_add_ret, .-atomic_add_ret
  55. .globl atomic_sub_ret
  56. .type atomic_sub_ret,#function
  57. atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  58. BACKOFF_SETUP(%o2)
  59. 1: lduw [%o1], %g1
  60. sub %g1, %o0, %g7
  61. cas [%o1], %g1, %g7
  62. cmp %g1, %g7
  63. bne,pn %icc, BACKOFF_LABEL(2f, 1b)
  64. sub %g1, %o0, %g1
  65. retl
  66. sra %g1, 0, %o0
  67. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  68. .size atomic_sub_ret, .-atomic_sub_ret
  69. .globl atomic64_add
  70. .type atomic64_add,#function
  71. atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
  72. BACKOFF_SETUP(%o2)
  73. 1: ldx [%o1], %g1
  74. add %g1, %o0, %g7
  75. casx [%o1], %g1, %g7
  76. cmp %g1, %g7
  77. bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
  78. nop
  79. retl
  80. nop
  81. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  82. .size atomic64_add, .-atomic64_add
  83. .globl atomic64_sub
  84. .type atomic64_sub,#function
  85. atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  86. BACKOFF_SETUP(%o2)
  87. 1: ldx [%o1], %g1
  88. sub %g1, %o0, %g7
  89. casx [%o1], %g1, %g7
  90. cmp %g1, %g7
  91. bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
  92. nop
  93. retl
  94. nop
  95. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  96. .size atomic64_sub, .-atomic64_sub
  97. .globl atomic64_add_ret
  98. .type atomic64_add_ret,#function
  99. atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  100. BACKOFF_SETUP(%o2)
  101. 1: ldx [%o1], %g1
  102. add %g1, %o0, %g7
  103. casx [%o1], %g1, %g7
  104. cmp %g1, %g7
  105. bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
  106. nop
  107. retl
  108. add %g1, %o0, %o0
  109. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  110. .size atomic64_add_ret, .-atomic64_add_ret
  111. .globl atomic64_sub_ret
  112. .type atomic64_sub_ret,#function
  113. atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  114. BACKOFF_SETUP(%o2)
  115. 1: ldx [%o1], %g1
  116. sub %g1, %o0, %g7
  117. casx [%o1], %g1, %g7
  118. cmp %g1, %g7
  119. bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
  120. nop
  121. retl
  122. sub %g1, %o0, %o0
  123. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  124. .size atomic64_sub_ret, .-atomic64_sub_ret