atomic64_cx8_32.S 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222
  1. /*
  2. * atomic64_t for 586+
  3. *
  4. * Copyright © 2010 Luca Barbieri
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; either version 2 of the License, or
  9. * (at your option) any later version.
  10. */
  11. #include <linux/linkage.h>
  12. #include <asm/alternative-asm.h>
  13. #include <asm/dwarf2.h>
  14. .macro SAVE reg
  15. pushl_cfi %\reg
  16. CFI_REL_OFFSET \reg, 0
  17. .endm
  18. .macro RESTORE reg
  19. popl_cfi %\reg
  20. CFI_RESTORE \reg
  21. .endm
  22. .macro read64 reg
  23. movl %ebx, %eax
  24. movl %ecx, %edx
  25. /* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
  26. LOCK_PREFIX
  27. cmpxchg8b (\reg)
  28. .endm
  29. ENTRY(atomic64_read_cx8)
  30. CFI_STARTPROC
  31. read64 %ecx
  32. ret
  33. CFI_ENDPROC
  34. ENDPROC(atomic64_read_cx8)
  35. ENTRY(atomic64_set_cx8)
  36. CFI_STARTPROC
  37. 1:
  38. /* we don't need LOCK_PREFIX since aligned 64-bit writes
  39. * are atomic on 586 and newer */
  40. cmpxchg8b (%esi)
  41. jne 1b
  42. ret
  43. CFI_ENDPROC
  44. ENDPROC(atomic64_set_cx8)
  45. ENTRY(atomic64_xchg_cx8)
  46. CFI_STARTPROC
  47. movl %ebx, %eax
  48. movl %ecx, %edx
  49. 1:
  50. LOCK_PREFIX
  51. cmpxchg8b (%esi)
  52. jne 1b
  53. ret
  54. CFI_ENDPROC
  55. ENDPROC(atomic64_xchg_cx8)
  56. .macro addsub_return func ins insc
  57. ENTRY(atomic64_\func\()_return_cx8)
  58. CFI_STARTPROC
  59. SAVE ebp
  60. SAVE ebx
  61. SAVE esi
  62. SAVE edi
  63. movl %eax, %esi
  64. movl %edx, %edi
  65. movl %ecx, %ebp
  66. read64 %ebp
  67. 1:
  68. movl %eax, %ebx
  69. movl %edx, %ecx
  70. \ins\()l %esi, %ebx
  71. \insc\()l %edi, %ecx
  72. LOCK_PREFIX
  73. cmpxchg8b (%ebp)
  74. jne 1b
  75. 10:
  76. movl %ebx, %eax
  77. movl %ecx, %edx
  78. RESTORE edi
  79. RESTORE esi
  80. RESTORE ebx
  81. RESTORE ebp
  82. ret
  83. CFI_ENDPROC
  84. ENDPROC(atomic64_\func\()_return_cx8)
  85. .endm
  86. addsub_return add add adc
  87. addsub_return sub sub sbb
  88. .macro incdec_return func ins insc
  89. ENTRY(atomic64_\func\()_return_cx8)
  90. CFI_STARTPROC
  91. SAVE ebx
  92. read64 %esi
  93. 1:
  94. movl %eax, %ebx
  95. movl %edx, %ecx
  96. \ins\()l $1, %ebx
  97. \insc\()l $0, %ecx
  98. LOCK_PREFIX
  99. cmpxchg8b (%esi)
  100. jne 1b
  101. 10:
  102. movl %ebx, %eax
  103. movl %ecx, %edx
  104. RESTORE ebx
  105. ret
  106. CFI_ENDPROC
  107. ENDPROC(atomic64_\func\()_return_cx8)
  108. .endm
  109. incdec_return inc add adc
  110. incdec_return dec sub sbb
  111. ENTRY(atomic64_dec_if_positive_cx8)
  112. CFI_STARTPROC
  113. SAVE ebx
  114. read64 %esi
  115. 1:
  116. movl %eax, %ebx
  117. movl %edx, %ecx
  118. subl $1, %ebx
  119. sbb $0, %ecx
  120. js 2f
  121. LOCK_PREFIX
  122. cmpxchg8b (%esi)
  123. jne 1b
  124. 2:
  125. movl %ebx, %eax
  126. movl %ecx, %edx
  127. RESTORE ebx
  128. ret
  129. CFI_ENDPROC
  130. ENDPROC(atomic64_dec_if_positive_cx8)
  131. ENTRY(atomic64_add_unless_cx8)
  132. CFI_STARTPROC
  133. SAVE ebp
  134. SAVE ebx
  135. /* these just push these two parameters on the stack */
  136. SAVE edi
  137. SAVE esi
  138. movl %ecx, %ebp
  139. movl %eax, %esi
  140. movl %edx, %edi
  141. read64 %ebp
  142. 1:
  143. cmpl %eax, 0(%esp)
  144. je 4f
  145. 2:
  146. movl %eax, %ebx
  147. movl %edx, %ecx
  148. addl %esi, %ebx
  149. adcl %edi, %ecx
  150. LOCK_PREFIX
  151. cmpxchg8b (%ebp)
  152. jne 1b
  153. movl $1, %eax
  154. 3:
  155. addl $8, %esp
  156. CFI_ADJUST_CFA_OFFSET -8
  157. RESTORE ebx
  158. RESTORE ebp
  159. ret
  160. 4:
  161. cmpl %edx, 4(%esp)
  162. jne 2b
  163. xorl %eax, %eax
  164. jmp 3b
  165. CFI_ENDPROC
  166. ENDPROC(atomic64_add_unless_cx8)
  167. ENTRY(atomic64_inc_not_zero_cx8)
  168. CFI_STARTPROC
  169. SAVE ebx
  170. read64 %esi
  171. 1:
  172. testl %eax, %eax
  173. je 4f
  174. 2:
  175. movl %eax, %ebx
  176. movl %edx, %ecx
  177. addl $1, %ebx
  178. adcl $0, %ecx
  179. LOCK_PREFIX
  180. cmpxchg8b (%esi)
  181. jne 1b
  182. movl $1, %eax
  183. 3:
  184. RESTORE ebx
  185. ret
  186. 4:
  187. testl %edx, %edx
  188. jne 2b
  189. jmp 3b
  190. CFI_ENDPROC
  191. ENDPROC(atomic64_inc_not_zero_cx8)