atomic.S 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149
  1. /* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $
  2. * atomic.S: These things are too big to do inline.
  3. *
  4. * Copyright (C) 1999 David S. Miller (davem@redhat.com)
  5. */
  6. #include <linux/config.h>
  7. #include <asm/asi.h>
  8. .text
  9. /* Two versions of the atomic routines, one that
  10. * does not return a value and does not perform
  11. * memory barriers, and a second which returns
  12. * a value and does the barriers.
  13. */
  14. .globl atomic_add
  15. .type atomic_add,#function
  16. atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
  17. 1: lduw [%o1], %g1
  18. add %g1, %o0, %g7
  19. cas [%o1], %g1, %g7
  20. cmp %g1, %g7
  21. bne,pn %icc, 1b
  22. nop
  23. retl
  24. nop
  25. .size atomic_add, .-atomic_add
  26. .globl atomic_sub
  27. .type atomic_sub,#function
  28. atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  29. 1: lduw [%o1], %g1
  30. sub %g1, %o0, %g7
  31. cas [%o1], %g1, %g7
  32. cmp %g1, %g7
  33. bne,pn %icc, 1b
  34. nop
  35. retl
  36. nop
  37. .size atomic_sub, .-atomic_sub
  38. /* On SMP we need to use memory barriers to ensure
  39. * correct memory operation ordering, nop these out
  40. * for uniprocessor.
  41. */
  42. #ifdef CONFIG_SMP
  43. #define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
  44. #define ATOMIC_POST_BARRIER \
  45. ba,pt %xcc, 80b; \
  46. membar #StoreLoad | #StoreStore
  47. 80: retl
  48. nop
  49. #else
  50. #define ATOMIC_PRE_BARRIER
  51. #define ATOMIC_POST_BARRIER
  52. #endif
  53. .globl atomic_add_ret
  54. .type atomic_add_ret,#function
  55. atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  56. ATOMIC_PRE_BARRIER
  57. 1: lduw [%o1], %g1
  58. add %g1, %o0, %g7
  59. cas [%o1], %g1, %g7
  60. cmp %g1, %g7
  61. bne,pn %icc, 1b
  62. add %g7, %o0, %g7
  63. sra %g7, 0, %o0
  64. ATOMIC_POST_BARRIER
  65. retl
  66. nop
  67. .size atomic_add_ret, .-atomic_add_ret
  68. .globl atomic_sub_ret
  69. .type atomic_sub_ret,#function
  70. atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  71. ATOMIC_PRE_BARRIER
  72. 1: lduw [%o1], %g1
  73. sub %g1, %o0, %g7
  74. cas [%o1], %g1, %g7
  75. cmp %g1, %g7
  76. bne,pn %icc, 1b
  77. sub %g7, %o0, %g7
  78. sra %g7, 0, %o0
  79. ATOMIC_POST_BARRIER
  80. retl
  81. nop
  82. .size atomic_sub_ret, .-atomic_sub_ret
  83. .globl atomic64_add
  84. .type atomic64_add,#function
  85. atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
  86. 1: ldx [%o1], %g1
  87. add %g1, %o0, %g7
  88. casx [%o1], %g1, %g7
  89. cmp %g1, %g7
  90. bne,pn %xcc, 1b
  91. nop
  92. retl
  93. nop
  94. .size atomic64_add, .-atomic64_add
  95. .globl atomic64_sub
  96. .type atomic64_sub,#function
  97. atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
  98. 1: ldx [%o1], %g1
  99. sub %g1, %o0, %g7
  100. casx [%o1], %g1, %g7
  101. cmp %g1, %g7
  102. bne,pn %xcc, 1b
  103. nop
  104. retl
  105. nop
  106. .size atomic64_sub, .-atomic64_sub
  107. .globl atomic64_add_ret
  108. .type atomic64_add_ret,#function
  109. atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
  110. ATOMIC_PRE_BARRIER
  111. 1: ldx [%o1], %g1
  112. add %g1, %o0, %g7
  113. casx [%o1], %g1, %g7
  114. cmp %g1, %g7
  115. bne,pn %xcc, 1b
  116. add %g7, %o0, %g7
  117. mov %g7, %o0
  118. ATOMIC_POST_BARRIER
  119. retl
  120. nop
  121. .size atomic64_add_ret, .-atomic64_add_ret
  122. .globl atomic64_sub_ret
  123. .type atomic64_sub_ret,#function
  124. atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
  125. ATOMIC_PRE_BARRIER
  126. 1: ldx [%o1], %g1
  127. sub %g1, %o0, %g7
  128. casx [%o1], %g1, %g7
  129. cmp %g1, %g7
  130. bne,pn %xcc, 1b
  131. sub %g7, %o0, %g7
  132. mov %g7, %o0
  133. ATOMIC_POST_BARRIER
  134. retl
  135. nop
  136. .size atomic64_sub_ret, .-atomic64_sub_ret