atomic.S 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148
  1. /* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $
  2. * atomic.S: These things are too big to do inline.
  3. *
  4. * Copyright (C) 1999 David S. Miller (davem@redhat.com)
  5. */
  6. #include <asm/asi.h>
  7. .text
  8. /* Two versions of the atomic routines, one that
  9. * does not return a value and does not perform
  10. * memory barriers, and a second which returns
  11. * a value and does the barriers.
  12. */
  13. .globl atomic_add
  14. .type atomic_add,#function
  15. atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
  16. 1: lduw [%o1], %g1
  17. add %g1, %o0, %g7
  18. cas [%o1], %g1, %g7
  19. cmp %g1, %g7
  20. bne,pn %icc, 1b
  21. nop
  22. retl
  23. nop
  24. .size atomic_add, .-atomic_add
  25. .globl atomic_sub
  26. .type atomic_sub,#function
  27. atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  28. 1: lduw [%o1], %g1
  29. sub %g1, %o0, %g7
  30. cas [%o1], %g1, %g7
  31. cmp %g1, %g7
  32. bne,pn %icc, 1b
  33. nop
  34. retl
  35. nop
  36. .size atomic_sub, .-atomic_sub
  37. /* On SMP we need to use memory barriers to ensure
  38. * correct memory operation ordering, nop these out
  39. * for uniprocessor.
  40. */
  41. #ifdef CONFIG_SMP
  42. #define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
  43. #define ATOMIC_POST_BARRIER \
  44. ba,pt %xcc, 80b; \
  45. membar #StoreLoad | #StoreStore
  46. 80: retl
  47. nop
  48. #else
  49. #define ATOMIC_PRE_BARRIER
  50. #define ATOMIC_POST_BARRIER
  51. #endif
  52. .globl atomic_add_ret
  53. .type atomic_add_ret,#function
  54. atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  55. ATOMIC_PRE_BARRIER
  56. 1: lduw [%o1], %g1
  57. add %g1, %o0, %g7
  58. cas [%o1], %g1, %g7
  59. cmp %g1, %g7
  60. bne,pn %icc, 1b
  61. add %g7, %o0, %g7
  62. sra %g7, 0, %o0
  63. ATOMIC_POST_BARRIER
  64. retl
  65. nop
  66. .size atomic_add_ret, .-atomic_add_ret
  67. .globl atomic_sub_ret
  68. .type atomic_sub_ret,#function
  69. atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  70. ATOMIC_PRE_BARRIER
  71. 1: lduw [%o1], %g1
  72. sub %g1, %o0, %g7
  73. cas [%o1], %g1, %g7
  74. cmp %g1, %g7
  75. bne,pn %icc, 1b
  76. sub %g7, %o0, %g7
  77. sra %g7, 0, %o0
  78. ATOMIC_POST_BARRIER
  79. retl
  80. nop
  81. .size atomic_sub_ret, .-atomic_sub_ret
  82. .globl atomic64_add
  83. .type atomic64_add,#function
  84. atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
  85. 1: ldx [%o1], %g1
  86. add %g1, %o0, %g7
  87. casx [%o1], %g1, %g7
  88. cmp %g1, %g7
  89. bne,pn %xcc, 1b
  90. nop
  91. retl
  92. nop
  93. .size atomic64_add, .-atomic64_add
  94. .globl atomic64_sub
  95. .type atomic64_sub,#function
  96. atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  97. 1: ldx [%o1], %g1
  98. sub %g1, %o0, %g7
  99. casx [%o1], %g1, %g7
  100. cmp %g1, %g7
  101. bne,pn %xcc, 1b
  102. nop
  103. retl
  104. nop
  105. .size atomic64_sub, .-atomic64_sub
  106. .globl atomic64_add_ret
  107. .type atomic64_add_ret,#function
  108. atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  109. ATOMIC_PRE_BARRIER
  110. 1: ldx [%o1], %g1
  111. add %g1, %o0, %g7
  112. casx [%o1], %g1, %g7
  113. cmp %g1, %g7
  114. bne,pn %xcc, 1b
  115. add %g7, %o0, %g7
  116. mov %g7, %o0
  117. ATOMIC_POST_BARRIER
  118. retl
  119. nop
  120. .size atomic64_add_ret, .-atomic64_add_ret
  121. .globl atomic64_sub_ret
  122. .type atomic64_sub_ret,#function
  123. atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  124. ATOMIC_PRE_BARRIER
  125. 1: ldx [%o1], %g1
  126. sub %g1, %o0, %g7
  127. casx [%o1], %g1, %g7
  128. cmp %g1, %g7
  129. bne,pn %xcc, 1b
  130. sub %g7, %o0, %g7
  131. mov %g7, %o0
  132. ATOMIC_POST_BARRIER
  133. retl
  134. nop
  135. .size atomic64_sub_ret, .-atomic64_sub_ret