atomic_64.S 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138
  1. /* atomic.S: These things are too big to do inline.
  2. *
  3. * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
  4. */
  5. #include <asm/asi.h>
  6. #include <asm/backoff.h>
  7. .text
  8. /* Two versions of the atomic routines, one that
  9. * does not return a value and does not perform
  10. * memory barriers, and a second which returns
  11. * a value and does the barriers.
  12. */
  13. .globl atomic_add
  14. .type atomic_add,#function
  15. atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
  16. BACKOFF_SETUP(%o2)
  17. 1: lduw [%o1], %g1
  18. add %g1, %o0, %g7
  19. cas [%o1], %g1, %g7
  20. cmp %g1, %g7
  21. bne,pn %icc, 2f
  22. nop
  23. retl
  24. nop
  25. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  26. .size atomic_add, .-atomic_add
  27. .globl atomic_sub
  28. .type atomic_sub,#function
  29. atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  30. BACKOFF_SETUP(%o2)
  31. 1: lduw [%o1], %g1
  32. sub %g1, %o0, %g7
  33. cas [%o1], %g1, %g7
  34. cmp %g1, %g7
  35. bne,pn %icc, 2f
  36. nop
  37. retl
  38. nop
  39. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  40. .size atomic_sub, .-atomic_sub
  41. .globl atomic_add_ret
  42. .type atomic_add_ret,#function
  43. atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  44. BACKOFF_SETUP(%o2)
  45. 1: lduw [%o1], %g1
  46. add %g1, %o0, %g7
  47. cas [%o1], %g1, %g7
  48. cmp %g1, %g7
  49. bne,pn %icc, 2f
  50. add %g7, %o0, %g7
  51. sra %g7, 0, %o0
  52. retl
  53. nop
  54. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  55. .size atomic_add_ret, .-atomic_add_ret
  56. .globl atomic_sub_ret
  57. .type atomic_sub_ret,#function
  58. atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  59. BACKOFF_SETUP(%o2)
  60. 1: lduw [%o1], %g1
  61. sub %g1, %o0, %g7
  62. cas [%o1], %g1, %g7
  63. cmp %g1, %g7
  64. bne,pn %icc, 2f
  65. sub %g7, %o0, %g7
  66. sra %g7, 0, %o0
  67. retl
  68. nop
  69. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  70. .size atomic_sub_ret, .-atomic_sub_ret
  71. .globl atomic64_add
  72. .type atomic64_add,#function
  73. atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
  74. BACKOFF_SETUP(%o2)
  75. 1: ldx [%o1], %g1
  76. add %g1, %o0, %g7
  77. casx [%o1], %g1, %g7
  78. cmp %g1, %g7
  79. bne,pn %xcc, 2f
  80. nop
  81. retl
  82. nop
  83. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  84. .size atomic64_add, .-atomic64_add
  85. .globl atomic64_sub
  86. .type atomic64_sub,#function
  87. atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  88. BACKOFF_SETUP(%o2)
  89. 1: ldx [%o1], %g1
  90. sub %g1, %o0, %g7
  91. casx [%o1], %g1, %g7
  92. cmp %g1, %g7
  93. bne,pn %xcc, 2f
  94. nop
  95. retl
  96. nop
  97. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  98. .size atomic64_sub, .-atomic64_sub
  99. .globl atomic64_add_ret
  100. .type atomic64_add_ret,#function
  101. atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  102. BACKOFF_SETUP(%o2)
  103. 1: ldx [%o1], %g1
  104. add %g1, %o0, %g7
  105. casx [%o1], %g1, %g7
  106. cmp %g1, %g7
  107. bne,pn %xcc, 2f
  108. add %g7, %o0, %g7
  109. mov %g7, %o0
  110. retl
  111. nop
  112. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  113. .size atomic64_add_ret, .-atomic64_add_ret
  114. .globl atomic64_sub_ret
  115. .type atomic64_sub_ret,#function
  116. atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  117. BACKOFF_SETUP(%o2)
  118. 1: ldx [%o1], %g1
  119. sub %g1, %o0, %g7
  120. casx [%o1], %g1, %g7
  121. cmp %g1, %g7
  122. bne,pn %xcc, 2f
  123. sub %g7, %o0, %g7
  124. mov %g7, %o0
  125. retl
  126. nop
  127. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  128. .size atomic64_sub_ret, .-atomic64_sub_ret