atomic.S 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164
  1. /* atomic.S: These things are too big to do inline.
  2. *
  3. * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
  4. */
  5. #include <asm/asi.h>
  6. #include <asm/backoff.h>
  7. .text
  8. /* Two versions of the atomic routines, one that
  9. * does not return a value and does not perform
  10. * memory barriers, and a second which returns
  11. * a value and does the barriers.
  12. */
  13. .globl atomic_add
  14. .type atomic_add,#function
  15. atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
  16. BACKOFF_SETUP(%o2)
  17. 1: lduw [%o1], %g1
  18. add %g1, %o0, %g7
  19. cas [%o1], %g1, %g7
  20. cmp %g1, %g7
  21. bne,pn %icc, 2f
  22. nop
  23. retl
  24. nop
  25. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  26. .size atomic_add, .-atomic_add
  27. .globl atomic_sub
  28. .type atomic_sub,#function
  29. atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  30. BACKOFF_SETUP(%o2)
  31. 1: lduw [%o1], %g1
  32. sub %g1, %o0, %g7
  33. cas [%o1], %g1, %g7
  34. cmp %g1, %g7
  35. bne,pn %icc, 2f
  36. nop
  37. retl
  38. nop
  39. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  40. .size atomic_sub, .-atomic_sub
  41. /* On SMP we need to use memory barriers to ensure
  42. * correct memory operation ordering, nop these out
  43. * for uniprocessor.
  44. */
  45. #ifdef CONFIG_SMP
  46. #define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
  47. #define ATOMIC_POST_BARRIER \
  48. ba,pt %xcc, 80b; \
  49. membar #StoreLoad | #StoreStore
  50. 80: retl
  51. nop
  52. #else
  53. #define ATOMIC_PRE_BARRIER
  54. #define ATOMIC_POST_BARRIER
  55. #endif
  56. .globl atomic_add_ret
  57. .type atomic_add_ret,#function
  58. atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  59. BACKOFF_SETUP(%o2)
  60. ATOMIC_PRE_BARRIER
  61. 1: lduw [%o1], %g1
  62. add %g1, %o0, %g7
  63. cas [%o1], %g1, %g7
  64. cmp %g1, %g7
  65. bne,pn %icc, 2f
  66. add %g7, %o0, %g7
  67. sra %g7, 0, %o0
  68. ATOMIC_POST_BARRIER
  69. retl
  70. nop
  71. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  72. .size atomic_add_ret, .-atomic_add_ret
  73. .globl atomic_sub_ret
  74. .type atomic_sub_ret,#function
  75. atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  76. BACKOFF_SETUP(%o2)
  77. ATOMIC_PRE_BARRIER
  78. 1: lduw [%o1], %g1
  79. sub %g1, %o0, %g7
  80. cas [%o1], %g1, %g7
  81. cmp %g1, %g7
  82. bne,pn %icc, 2f
  83. sub %g7, %o0, %g7
  84. sra %g7, 0, %o0
  85. ATOMIC_POST_BARRIER
  86. retl
  87. nop
  88. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  89. .size atomic_sub_ret, .-atomic_sub_ret
  90. .globl atomic64_add
  91. .type atomic64_add,#function
  92. atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
  93. BACKOFF_SETUP(%o2)
  94. 1: ldx [%o1], %g1
  95. add %g1, %o0, %g7
  96. casx [%o1], %g1, %g7
  97. cmp %g1, %g7
  98. bne,pn %xcc, 2f
  99. nop
  100. retl
  101. nop
  102. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  103. .size atomic64_add, .-atomic64_add
  104. .globl atomic64_sub
  105. .type atomic64_sub,#function
  106. atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  107. BACKOFF_SETUP(%o2)
  108. 1: ldx [%o1], %g1
  109. sub %g1, %o0, %g7
  110. casx [%o1], %g1, %g7
  111. cmp %g1, %g7
  112. bne,pn %xcc, 2f
  113. nop
  114. retl
  115. nop
  116. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  117. .size atomic64_sub, .-atomic64_sub
  118. .globl atomic64_add_ret
  119. .type atomic64_add_ret,#function
  120. atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  121. BACKOFF_SETUP(%o2)
  122. ATOMIC_PRE_BARRIER
  123. 1: ldx [%o1], %g1
  124. add %g1, %o0, %g7
  125. casx [%o1], %g1, %g7
  126. cmp %g1, %g7
  127. bne,pn %xcc, 2f
  128. add %g7, %o0, %g7
  129. mov %g7, %o0
  130. ATOMIC_POST_BARRIER
  131. retl
  132. nop
  133. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  134. .size atomic64_add_ret, .-atomic64_add_ret
  135. .globl atomic64_sub_ret
  136. .type atomic64_sub_ret,#function
  137. atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  138. BACKOFF_SETUP(%o2)
  139. ATOMIC_PRE_BARRIER
  140. 1: ldx [%o1], %g1
  141. sub %g1, %o0, %g7
  142. casx [%o1], %g1, %g7
  143. cmp %g1, %g7
  144. bne,pn %xcc, 2f
  145. sub %g7, %o0, %g7
  146. mov %g7, %o0
  147. ATOMIC_POST_BARRIER
  148. retl
  149. nop
  150. 2: BACKOFF_SPIN(%o2, %o3, 1b)
  151. .size atomic64_sub_ret, .-atomic64_sub_ret