atomic_64.S 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158
  1. /* atomic.S: These things are too big to do inline.
  2. *
  3. * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
  4. */
  5. #include <linux/linkage.h>
  6. #include <asm/asi.h>
  7. #include <asm/backoff.h>
  8. #include <asm/export.h>
  9. .text
  10. /* Three versions of the atomic routines, one that
  11. * does not return a value and does not perform
  12. * memory barriers, and a two which return
  13. * a value, the new and old value resp. and does the
  14. * barriers.
  15. */
  16. #define ATOMIC_OP(op) \
  17. ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  18. BACKOFF_SETUP(%o2); \
  19. 1: lduw [%o1], %g1; \
  20. op %g1, %o0, %g7; \
  21. cas [%o1], %g1, %g7; \
  22. cmp %g1, %g7; \
  23. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  24. nop; \
  25. retl; \
  26. nop; \
  27. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  28. ENDPROC(atomic_##op); \
  29. EXPORT_SYMBOL(atomic_##op);
  30. #define ATOMIC_OP_RETURN(op) \
  31. ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
  32. BACKOFF_SETUP(%o2); \
  33. 1: lduw [%o1], %g1; \
  34. op %g1, %o0, %g7; \
  35. cas [%o1], %g1, %g7; \
  36. cmp %g1, %g7; \
  37. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  38. op %g1, %o0, %g1; \
  39. retl; \
  40. sra %g1, 0, %o0; \
  41. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  42. ENDPROC(atomic_##op##_return); \
  43. EXPORT_SYMBOL(atomic_##op##_return);
  44. #define ATOMIC_FETCH_OP(op) \
  45. ENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  46. BACKOFF_SETUP(%o2); \
  47. 1: lduw [%o1], %g1; \
  48. op %g1, %o0, %g7; \
  49. cas [%o1], %g1, %g7; \
  50. cmp %g1, %g7; \
  51. bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
  52. nop; \
  53. retl; \
  54. sra %g1, 0, %o0; \
  55. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  56. ENDPROC(atomic_fetch_##op); \
  57. EXPORT_SYMBOL(atomic_fetch_##op);
  58. #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
  59. ATOMIC_OPS(add)
  60. ATOMIC_OPS(sub)
  61. #undef ATOMIC_OPS
  62. #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
  63. ATOMIC_OPS(and)
  64. ATOMIC_OPS(or)
  65. ATOMIC_OPS(xor)
  66. #undef ATOMIC_OPS
  67. #undef ATOMIC_FETCH_OP
  68. #undef ATOMIC_OP_RETURN
  69. #undef ATOMIC_OP
  70. #define ATOMIC64_OP(op) \
  71. ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  72. BACKOFF_SETUP(%o2); \
  73. 1: ldx [%o1], %g1; \
  74. op %g1, %o0, %g7; \
  75. casx [%o1], %g1, %g7; \
  76. cmp %g1, %g7; \
  77. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  78. nop; \
  79. retl; \
  80. nop; \
  81. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  82. ENDPROC(atomic64_##op); \
  83. EXPORT_SYMBOL(atomic64_##op);
  84. #define ATOMIC64_OP_RETURN(op) \
  85. ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
  86. BACKOFF_SETUP(%o2); \
  87. 1: ldx [%o1], %g1; \
  88. op %g1, %o0, %g7; \
  89. casx [%o1], %g1, %g7; \
  90. cmp %g1, %g7; \
  91. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  92. nop; \
  93. retl; \
  94. op %g1, %o0, %o0; \
  95. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  96. ENDPROC(atomic64_##op##_return); \
  97. EXPORT_SYMBOL(atomic64_##op##_return);
  98. #define ATOMIC64_FETCH_OP(op) \
  99. ENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
  100. BACKOFF_SETUP(%o2); \
  101. 1: ldx [%o1], %g1; \
  102. op %g1, %o0, %g7; \
  103. casx [%o1], %g1, %g7; \
  104. cmp %g1, %g7; \
  105. bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
  106. nop; \
  107. retl; \
  108. mov %g1, %o0; \
  109. 2: BACKOFF_SPIN(%o2, %o3, 1b); \
  110. ENDPROC(atomic64_fetch_##op); \
  111. EXPORT_SYMBOL(atomic64_fetch_##op);
  112. #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op)
  113. ATOMIC64_OPS(add)
  114. ATOMIC64_OPS(sub)
  115. #undef ATOMIC64_OPS
  116. #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op)
  117. ATOMIC64_OPS(and)
  118. ATOMIC64_OPS(or)
  119. ATOMIC64_OPS(xor)
  120. #undef ATOMIC64_OPS
  121. #undef ATOMIC64_FETCH_OP
  122. #undef ATOMIC64_OP_RETURN
  123. #undef ATOMIC64_OP
  124. ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
  125. BACKOFF_SETUP(%o2)
  126. 1: ldx [%o0], %g1
  127. brlez,pn %g1, 3f
  128. sub %g1, 1, %g7
  129. casx [%o0], %g1, %g7
  130. cmp %g1, %g7
  131. bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
  132. nop
  133. 3: retl
  134. sub %g1, 1, %o0
  135. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  136. ENDPROC(atomic64_dec_if_positive)
  137. EXPORT_SYMBOL(atomic64_dec_if_positive)