suspend_asm.S 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104
  1. /* Copyright 2004,2005 Pavel Machek <pavel@suse.cz>, Andi Kleen <ak@suse.de>, Rafael J. Wysocki <rjw@sisk.pl>
  2. *
  3. * Distribute under GPLv2.
  4. *
  5. * swsusp_arch_resume may not use any stack, nor any variable that is
  6. * not "NoSave" during copying pages:
  7. *
  8. * Its rewriting one kernel image with another. What is stack in "old"
  9. * image could very well be data page in "new" image, and overwriting
  10. * your own stack under you is bad idea.
  11. */
  12. .text
  13. #include <linux/linkage.h>
  14. #include <asm/segment.h>
  15. #include <asm/page.h>
  16. #include <asm/offset.h>
  17. ENTRY(swsusp_arch_suspend)
  18. movq %rsp, saved_context_esp(%rip)
  19. movq %rax, saved_context_eax(%rip)
  20. movq %rbx, saved_context_ebx(%rip)
  21. movq %rcx, saved_context_ecx(%rip)
  22. movq %rdx, saved_context_edx(%rip)
  23. movq %rbp, saved_context_ebp(%rip)
  24. movq %rsi, saved_context_esi(%rip)
  25. movq %rdi, saved_context_edi(%rip)
  26. movq %r8, saved_context_r08(%rip)
  27. movq %r9, saved_context_r09(%rip)
  28. movq %r10, saved_context_r10(%rip)
  29. movq %r11, saved_context_r11(%rip)
  30. movq %r12, saved_context_r12(%rip)
  31. movq %r13, saved_context_r13(%rip)
  32. movq %r14, saved_context_r14(%rip)
  33. movq %r15, saved_context_r15(%rip)
  34. pushfq ; popq saved_context_eflags(%rip)
  35. call swsusp_save
  36. ret
  37. ENTRY(swsusp_arch_resume)
  38. /* set up cr3 */
  39. leaq init_level4_pgt(%rip),%rax
  40. subq $__START_KERNEL_map,%rax
  41. movq %rax,%cr3
  42. movq mmu_cr4_features(%rip), %rax
  43. movq %rax, %rdx
  44. andq $~(1<<7), %rdx # PGE
  45. movq %rdx, %cr4; # turn off PGE
  46. movq %cr3, %rcx; # flush TLB
  47. movq %rcx, %cr3;
  48. movq %rax, %cr4; # turn PGE back on
  49. movq pagedir_nosave(%rip), %rdx
  50. loop:
  51. testq %rdx, %rdx
  52. jz done
  53. /* get addresses from the pbe and copy the page */
  54. movq pbe_address(%rdx), %rsi
  55. movq pbe_orig_address(%rdx), %rdi
  56. movq $512, %rcx
  57. rep
  58. movsq
  59. /* progress to the next pbe */
  60. movq pbe_next(%rdx), %rdx
  61. jmp loop
  62. done:
  63. /* Flush TLB, including "global" things (vmalloc) */
  64. movq mmu_cr4_features(%rip), %rax
  65. movq %rax, %rdx
  66. andq $~(1<<7), %rdx; # PGE
  67. movq %rdx, %cr4; # turn off PGE
  68. movq %cr3, %rcx; # flush TLB
  69. movq %rcx, %cr3
  70. movq %rax, %cr4; # turn PGE back on
  71. movl $24, %eax
  72. movl %eax, %ds
  73. movq saved_context_esp(%rip), %rsp
  74. movq saved_context_ebp(%rip), %rbp
  75. /* Don't restore %rax, it must be 0 anyway */
  76. movq saved_context_ebx(%rip), %rbx
  77. movq saved_context_ecx(%rip), %rcx
  78. movq saved_context_edx(%rip), %rdx
  79. movq saved_context_esi(%rip), %rsi
  80. movq saved_context_edi(%rip), %rdi
  81. movq saved_context_r08(%rip), %r8
  82. movq saved_context_r09(%rip), %r9
  83. movq saved_context_r10(%rip), %r10
  84. movq saved_context_r11(%rip), %r11
  85. movq saved_context_r12(%rip), %r12
  86. movq saved_context_r13(%rip), %r13
  87. movq saved_context_r14(%rip), %r14
  88. movq saved_context_r15(%rip), %r15
  89. pushq saved_context_eflags(%rip) ; popfq
  90. xorq %rax, %rax
  91. ret