copy_user_nocache_64.S 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. /*
  2. * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
  3. * Copyright 2002 Andi Kleen, SuSE Labs.
  4. * Subject to the GNU Public License v2.
  5. *
  6. * Functions to copy from and to user space.
  7. */
  8. #include <linux/linkage.h>
  9. #include <asm/dwarf2.h>
  10. #define FIX_ALIGNMENT 1
  11. #include <asm/current.h>
  12. #include <asm/asm-offsets.h>
  13. #include <asm/thread_info.h>
  14. #include <asm/asm.h>
  15. #include <asm/smap.h>
  16. .macro ALIGN_DESTINATION
  17. #ifdef FIX_ALIGNMENT
  18. /* check for bad alignment of destination */
  19. movl %edi,%ecx
  20. andl $7,%ecx
  21. jz 102f /* already aligned */
  22. subl $8,%ecx
  23. negl %ecx
  24. subl %ecx,%edx
  25. 100: movb (%rsi),%al
  26. 101: movb %al,(%rdi)
  27. incq %rsi
  28. incq %rdi
  29. decl %ecx
  30. jnz 100b
  31. 102:
  32. .section .fixup,"ax"
  33. 103: addl %ecx,%edx /* ecx is zerorest also */
  34. jmp copy_user_handle_tail
  35. .previous
  36. _ASM_EXTABLE(100b,103b)
  37. _ASM_EXTABLE(101b,103b)
  38. #endif
  39. .endm
  40. /*
  41. * copy_user_nocache - Uncached memory copy with exception handling
  42. * This will force destination/source out of cache for more performance.
  43. */
  44. ENTRY(__copy_user_nocache)
  45. CFI_STARTPROC
  46. ASM_STAC
  47. cmpl $8,%edx
  48. jb 20f /* less then 8 bytes, go to byte copy loop */
  49. ALIGN_DESTINATION
  50. movl %edx,%ecx
  51. andl $63,%edx
  52. shrl $6,%ecx
  53. jz 17f
  54. 1: movq (%rsi),%r8
  55. 2: movq 1*8(%rsi),%r9
  56. 3: movq 2*8(%rsi),%r10
  57. 4: movq 3*8(%rsi),%r11
  58. 5: movnti %r8,(%rdi)
  59. 6: movnti %r9,1*8(%rdi)
  60. 7: movnti %r10,2*8(%rdi)
  61. 8: movnti %r11,3*8(%rdi)
  62. 9: movq 4*8(%rsi),%r8
  63. 10: movq 5*8(%rsi),%r9
  64. 11: movq 6*8(%rsi),%r10
  65. 12: movq 7*8(%rsi),%r11
  66. 13: movnti %r8,4*8(%rdi)
  67. 14: movnti %r9,5*8(%rdi)
  68. 15: movnti %r10,6*8(%rdi)
  69. 16: movnti %r11,7*8(%rdi)
  70. leaq 64(%rsi),%rsi
  71. leaq 64(%rdi),%rdi
  72. decl %ecx
  73. jnz 1b
  74. 17: movl %edx,%ecx
  75. andl $7,%edx
  76. shrl $3,%ecx
  77. jz 20f
  78. 18: movq (%rsi),%r8
  79. 19: movnti %r8,(%rdi)
  80. leaq 8(%rsi),%rsi
  81. leaq 8(%rdi),%rdi
  82. decl %ecx
  83. jnz 18b
  84. 20: andl %edx,%edx
  85. jz 23f
  86. movl %edx,%ecx
  87. 21: movb (%rsi),%al
  88. 22: movb %al,(%rdi)
  89. incq %rsi
  90. incq %rdi
  91. decl %ecx
  92. jnz 21b
  93. 23: xorl %eax,%eax
  94. ASM_CLAC
  95. sfence
  96. ret
  97. .section .fixup,"ax"
  98. 30: shll $6,%ecx
  99. addl %ecx,%edx
  100. jmp 60f
  101. 40: lea (%rdx,%rcx,8),%rdx
  102. jmp 60f
  103. 50: movl %ecx,%edx
  104. 60: sfence
  105. jmp copy_user_handle_tail
  106. .previous
  107. _ASM_EXTABLE(1b,30b)
  108. _ASM_EXTABLE(2b,30b)
  109. _ASM_EXTABLE(3b,30b)
  110. _ASM_EXTABLE(4b,30b)
  111. _ASM_EXTABLE(5b,30b)
  112. _ASM_EXTABLE(6b,30b)
  113. _ASM_EXTABLE(7b,30b)
  114. _ASM_EXTABLE(8b,30b)
  115. _ASM_EXTABLE(9b,30b)
  116. _ASM_EXTABLE(10b,30b)
  117. _ASM_EXTABLE(11b,30b)
  118. _ASM_EXTABLE(12b,30b)
  119. _ASM_EXTABLE(13b,30b)
  120. _ASM_EXTABLE(14b,30b)
  121. _ASM_EXTABLE(15b,30b)
  122. _ASM_EXTABLE(16b,30b)
  123. _ASM_EXTABLE(18b,40b)
  124. _ASM_EXTABLE(19b,40b)
  125. _ASM_EXTABLE(21b,50b)
  126. _ASM_EXTABLE(22b,50b)
  127. CFI_ENDPROC
  128. ENDPROC(__copy_user_nocache)