efi_stub_64.S 1.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116
  1. /*
  2. * Function calling ABI conversion from Linux to EFI for x86_64
  3. *
  4. * Copyright (C) 2007 Intel Corp
  5. * Bibo Mao <bibo.mao@intel.com>
  6. * Huang Ying <ying.huang@intel.com>
  7. */
  8. #include <linux/linkage.h>
  9. #define SAVE_XMM \
  10. mov %rsp, %rax; \
  11. subq $0x70, %rsp; \
  12. and $~0xf, %rsp; \
  13. mov %rax, (%rsp); \
  14. mov %cr0, %rax; \
  15. clts; \
  16. mov %rax, 0x8(%rsp); \
  17. movaps %xmm0, 0x60(%rsp); \
  18. movaps %xmm1, 0x50(%rsp); \
  19. movaps %xmm2, 0x40(%rsp); \
  20. movaps %xmm3, 0x30(%rsp); \
  21. movaps %xmm4, 0x20(%rsp); \
  22. movaps %xmm5, 0x10(%rsp)
  23. #define RESTORE_XMM \
  24. movaps 0x60(%rsp), %xmm0; \
  25. movaps 0x50(%rsp), %xmm1; \
  26. movaps 0x40(%rsp), %xmm2; \
  27. movaps 0x30(%rsp), %xmm3; \
  28. movaps 0x20(%rsp), %xmm4; \
  29. movaps 0x10(%rsp), %xmm5; \
  30. mov 0x8(%rsp), %rsi; \
  31. mov %rsi, %cr0; \
  32. mov (%rsp), %rsp
  33. ENTRY(efi_call0)
  34. SAVE_XMM
  35. subq $32, %rsp
  36. call *%rdi
  37. addq $32, %rsp
  38. RESTORE_XMM
  39. ret
  40. ENDPROC(efi_call0)
  41. ENTRY(efi_call1)
  42. SAVE_XMM
  43. subq $32, %rsp
  44. mov %rsi, %rcx
  45. call *%rdi
  46. addq $32, %rsp
  47. RESTORE_XMM
  48. ret
  49. ENDPROC(efi_call1)
  50. ENTRY(efi_call2)
  51. SAVE_XMM
  52. subq $32, %rsp
  53. mov %rsi, %rcx
  54. call *%rdi
  55. addq $32, %rsp
  56. RESTORE_XMM
  57. ret
  58. ENDPROC(efi_call2)
  59. ENTRY(efi_call3)
  60. SAVE_XMM
  61. subq $32, %rsp
  62. mov %rcx, %r8
  63. mov %rsi, %rcx
  64. call *%rdi
  65. addq $32, %rsp
  66. RESTORE_XMM
  67. ret
  68. ENDPROC(efi_call3)
  69. ENTRY(efi_call4)
  70. SAVE_XMM
  71. subq $32, %rsp
  72. mov %r8, %r9
  73. mov %rcx, %r8
  74. mov %rsi, %rcx
  75. call *%rdi
  76. addq $32, %rsp
  77. RESTORE_XMM
  78. ret
  79. ENDPROC(efi_call4)
  80. ENTRY(efi_call5)
  81. SAVE_XMM
  82. subq $48, %rsp
  83. mov %r9, 32(%rsp)
  84. mov %r8, %r9
  85. mov %rcx, %r8
  86. mov %rsi, %rcx
  87. call *%rdi
  88. addq $48, %rsp
  89. RESTORE_XMM
  90. ret
  91. ENDPROC(efi_call5)
  92. ENTRY(efi_call6)
  93. SAVE_XMM
  94. mov (%rsp), %rax
  95. mov 8(%rax), %rax
  96. subq $48, %rsp
  97. mov %r9, 32(%rsp)
  98. mov %rax, 40(%rsp)
  99. mov %r8, %r9
  100. mov %rcx, %r8
  101. mov %rsi, %rcx
  102. call *%rdi
  103. addq $48, %rsp
  104. RESTORE_XMM
  105. ret
  106. ENDPROC(efi_call6)