atomic64_cx8_32.S 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224
  1. /*
  2. * atomic64_t for 586+
  3. *
  4. * Copyright © 2010 Luca Barbieri
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; either version 2 of the License, or
  9. * (at your option) any later version.
  10. */
  11. #include <linux/linkage.h>
  12. #include <asm/alternative-asm.h>
  13. #include <asm/dwarf2.h>
  14. .macro SAVE reg
  15. pushl %\reg
  16. CFI_ADJUST_CFA_OFFSET 4
  17. CFI_REL_OFFSET \reg, 0
  18. .endm
  19. .macro RESTORE reg
  20. popl %\reg
  21. CFI_ADJUST_CFA_OFFSET -4
  22. CFI_RESTORE \reg
  23. .endm
  24. .macro read64 reg
  25. movl %ebx, %eax
  26. movl %ecx, %edx
  27. /* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
  28. LOCK_PREFIX
  29. cmpxchg8b (\reg)
  30. .endm
  31. ENTRY(atomic64_read_cx8)
  32. CFI_STARTPROC
  33. read64 %ecx
  34. ret
  35. CFI_ENDPROC
  36. ENDPROC(atomic64_read_cx8)
  37. ENTRY(atomic64_set_cx8)
  38. CFI_STARTPROC
  39. 1:
  40. /* we don't need LOCK_PREFIX since aligned 64-bit writes
  41. * are atomic on 586 and newer */
  42. cmpxchg8b (%esi)
  43. jne 1b
  44. ret
  45. CFI_ENDPROC
  46. ENDPROC(atomic64_set_cx8)
  47. ENTRY(atomic64_xchg_cx8)
  48. CFI_STARTPROC
  49. movl %ebx, %eax
  50. movl %ecx, %edx
  51. 1:
  52. LOCK_PREFIX
  53. cmpxchg8b (%esi)
  54. jne 1b
  55. ret
  56. CFI_ENDPROC
  57. ENDPROC(atomic64_xchg_cx8)
  58. .macro addsub_return func ins insc
  59. ENTRY(atomic64_\func\()_return_cx8)
  60. CFI_STARTPROC
  61. SAVE ebp
  62. SAVE ebx
  63. SAVE esi
  64. SAVE edi
  65. movl %eax, %esi
  66. movl %edx, %edi
  67. movl %ecx, %ebp
  68. read64 %ebp
  69. 1:
  70. movl %eax, %ebx
  71. movl %edx, %ecx
  72. \ins\()l %esi, %ebx
  73. \insc\()l %edi, %ecx
  74. LOCK_PREFIX
  75. cmpxchg8b (%ebp)
  76. jne 1b
  77. 10:
  78. movl %ebx, %eax
  79. movl %ecx, %edx
  80. RESTORE edi
  81. RESTORE esi
  82. RESTORE ebx
  83. RESTORE ebp
  84. ret
  85. CFI_ENDPROC
  86. ENDPROC(atomic64_\func\()_return_cx8)
  87. .endm
  88. addsub_return add add adc
  89. addsub_return sub sub sbb
  90. .macro incdec_return func ins insc
  91. ENTRY(atomic64_\func\()_return_cx8)
  92. CFI_STARTPROC
  93. SAVE ebx
  94. read64 %esi
  95. 1:
  96. movl %eax, %ebx
  97. movl %edx, %ecx
  98. \ins\()l $1, %ebx
  99. \insc\()l $0, %ecx
  100. LOCK_PREFIX
  101. cmpxchg8b (%esi)
  102. jne 1b
  103. 10:
  104. movl %ebx, %eax
  105. movl %ecx, %edx
  106. RESTORE ebx
  107. ret
  108. CFI_ENDPROC
  109. ENDPROC(atomic64_\func\()_return_cx8)
  110. .endm
  111. incdec_return inc add adc
  112. incdec_return dec sub sbb
  113. ENTRY(atomic64_dec_if_positive_cx8)
  114. CFI_STARTPROC
  115. SAVE ebx
  116. read64 %esi
  117. 1:
  118. movl %eax, %ebx
  119. movl %edx, %ecx
  120. subl $1, %ebx
  121. sbb $0, %ecx
  122. js 2f
  123. LOCK_PREFIX
  124. cmpxchg8b (%esi)
  125. jne 1b
  126. 2:
  127. movl %ebx, %eax
  128. movl %ecx, %edx
  129. RESTORE ebx
  130. ret
  131. CFI_ENDPROC
  132. ENDPROC(atomic64_dec_if_positive_cx8)
  133. ENTRY(atomic64_add_unless_cx8)
  134. CFI_STARTPROC
  135. SAVE ebp
  136. SAVE ebx
  137. /* these just push these two parameters on the stack */
  138. SAVE edi
  139. SAVE esi
  140. movl %ecx, %ebp
  141. movl %eax, %esi
  142. movl %edx, %edi
  143. read64 %ebp
  144. 1:
  145. cmpl %eax, 0(%esp)
  146. je 4f
  147. 2:
  148. movl %eax, %ebx
  149. movl %edx, %ecx
  150. addl %esi, %ebx
  151. adcl %edi, %ecx
  152. LOCK_PREFIX
  153. cmpxchg8b (%ebp)
  154. jne 1b
  155. movl $1, %eax
  156. 3:
  157. addl $8, %esp
  158. CFI_ADJUST_CFA_OFFSET -8
  159. RESTORE ebx
  160. RESTORE ebp
  161. ret
  162. 4:
  163. cmpl %edx, 4(%esp)
  164. jne 2b
  165. xorl %eax, %eax
  166. jmp 3b
  167. CFI_ENDPROC
  168. ENDPROC(atomic64_add_unless_cx8)
  169. ENTRY(atomic64_inc_not_zero_cx8)
  170. CFI_STARTPROC
  171. SAVE ebx
  172. read64 %esi
  173. 1:
  174. testl %eax, %eax
  175. je 4f
  176. 2:
  177. movl %eax, %ebx
  178. movl %edx, %ecx
  179. addl $1, %ebx
  180. adcl $0, %ecx
  181. LOCK_PREFIX
  182. cmpxchg8b (%esi)
  183. jne 1b
  184. movl $1, %eax
  185. 3:
  186. RESTORE ebx
  187. ret
  188. 4:
  189. testl %edx, %edx
  190. jne 2b
  191. jmp 3b
  192. CFI_ENDPROC
  193. ENDPROC(atomic64_inc_not_zero_cx8)