copy_user_nocache_64.S 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133
  1. /*
  2. * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
  3. * Copyright 2002 Andi Kleen, SuSE Labs.
  4. * Subject to the GNU Public License v2.
  5. *
  6. * Functions to copy from and to user space.
  7. */
  8. #include <linux/linkage.h>
  9. #include <asm/dwarf2.h>
  10. #define FIX_ALIGNMENT 1
  11. #include <asm/current.h>
  12. #include <asm/asm-offsets.h>
  13. #include <asm/thread_info.h>
  14. #include <asm/asm.h>
  15. .macro ALIGN_DESTINATION
  16. #ifdef FIX_ALIGNMENT
  17. /* check for bad alignment of destination */
  18. movl %edi,%ecx
  19. andl $7,%ecx
  20. jz 102f /* already aligned */
  21. subl $8,%ecx
  22. negl %ecx
  23. subl %ecx,%edx
  24. 100: movb (%rsi),%al
  25. 101: movb %al,(%rdi)
  26. incq %rsi
  27. incq %rdi
  28. decl %ecx
  29. jnz 100b
  30. 102:
  31. .section .fixup,"ax"
  32. 103: addl %ecx,%edx /* ecx is zerorest also */
  33. jmp copy_user_handle_tail
  34. .previous
  35. _ASM_EXTABLE(100b,103b)
  36. _ASM_EXTABLE(101b,103b)
  37. #endif
  38. .endm
  39. /*
  40. * copy_user_nocache - Uncached memory copy with exception handling
  41. * This will force destination/source out of cache for more performance.
  42. */
  43. ENTRY(__copy_user_nocache)
  44. CFI_STARTPROC
  45. cmpl $8,%edx
  46. jb 20f /* less then 8 bytes, go to byte copy loop */
  47. ALIGN_DESTINATION
  48. movl %edx,%ecx
  49. andl $63,%edx
  50. shrl $6,%ecx
  51. jz 17f
  52. 1: movq (%rsi),%r8
  53. 2: movq 1*8(%rsi),%r9
  54. 3: movq 2*8(%rsi),%r10
  55. 4: movq 3*8(%rsi),%r11
  56. 5: movnti %r8,(%rdi)
  57. 6: movnti %r9,1*8(%rdi)
  58. 7: movnti %r10,2*8(%rdi)
  59. 8: movnti %r11,3*8(%rdi)
  60. 9: movq 4*8(%rsi),%r8
  61. 10: movq 5*8(%rsi),%r9
  62. 11: movq 6*8(%rsi),%r10
  63. 12: movq 7*8(%rsi),%r11
  64. 13: movnti %r8,4*8(%rdi)
  65. 14: movnti %r9,5*8(%rdi)
  66. 15: movnti %r10,6*8(%rdi)
  67. 16: movnti %r11,7*8(%rdi)
  68. leaq 64(%rsi),%rsi
  69. leaq 64(%rdi),%rdi
  70. decl %ecx
  71. jnz 1b
  72. 17: movl %edx,%ecx
  73. andl $7,%edx
  74. shrl $3,%ecx
  75. jz 20f
  76. 18: movq (%rsi),%r8
  77. 19: movnti %r8,(%rdi)
  78. leaq 8(%rsi),%rsi
  79. leaq 8(%rdi),%rdi
  80. decl %ecx
  81. jnz 18b
  82. 20: andl %edx,%edx
  83. jz 23f
  84. movl %edx,%ecx
  85. 21: movb (%rsi),%al
  86. 22: movb %al,(%rdi)
  87. incq %rsi
  88. incq %rdi
  89. decl %ecx
  90. jnz 21b
  91. 23: xorl %eax,%eax
  92. sfence
  93. ret
  94. .section .fixup,"ax"
  95. 30: shll $6,%ecx
  96. addl %ecx,%edx
  97. jmp 60f
  98. 40: lea (%rdx,%rcx,8),%rdx
  99. jmp 60f
  100. 50: movl %ecx,%edx
  101. 60: sfence
  102. jmp copy_user_handle_tail
  103. .previous
  104. _ASM_EXTABLE(1b,30b)
  105. _ASM_EXTABLE(2b,30b)
  106. _ASM_EXTABLE(3b,30b)
  107. _ASM_EXTABLE(4b,30b)
  108. _ASM_EXTABLE(5b,30b)
  109. _ASM_EXTABLE(6b,30b)
  110. _ASM_EXTABLE(7b,30b)
  111. _ASM_EXTABLE(8b,30b)
  112. _ASM_EXTABLE(9b,30b)
  113. _ASM_EXTABLE(10b,30b)
  114. _ASM_EXTABLE(11b,30b)
  115. _ASM_EXTABLE(12b,30b)
  116. _ASM_EXTABLE(13b,30b)
  117. _ASM_EXTABLE(14b,30b)
  118. _ASM_EXTABLE(15b,30b)
  119. _ASM_EXTABLE(16b,30b)
  120. _ASM_EXTABLE(18b,40b)
  121. _ASM_EXTABLE(19b,40b)
  122. _ASM_EXTABLE(21b,50b)
  123. _ASM_EXTABLE(22b,50b)
  124. CFI_ENDPROC
  125. ENDPROC(__copy_user_nocache)