efi_stub_64.S 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109
  1. /*
  2. * Function calling ABI conversion from Linux to EFI for x86_64
  3. *
  4. * Copyright (C) 2007 Intel Corp
  5. * Bibo Mao <bibo.mao@intel.com>
  6. * Huang Ying <ying.huang@intel.com>
  7. */
  8. #include <linux/linkage.h>
  9. #define SAVE_XMM \
  10. mov %rsp, %rax; \
  11. subq $0x70, %rsp; \
  12. and $~0xf, %rsp; \
  13. mov %rax, (%rsp); \
  14. mov %cr0, %rax; \
  15. clts; \
  16. mov %rax, 0x8(%rsp); \
  17. movaps %xmm0, 0x60(%rsp); \
  18. movaps %xmm1, 0x50(%rsp); \
  19. movaps %xmm2, 0x40(%rsp); \
  20. movaps %xmm3, 0x30(%rsp); \
  21. movaps %xmm4, 0x20(%rsp); \
  22. movaps %xmm5, 0x10(%rsp)
  23. #define RESTORE_XMM \
  24. movaps 0x60(%rsp), %xmm0; \
  25. movaps 0x50(%rsp), %xmm1; \
  26. movaps 0x40(%rsp), %xmm2; \
  27. movaps 0x30(%rsp), %xmm3; \
  28. movaps 0x20(%rsp), %xmm4; \
  29. movaps 0x10(%rsp), %xmm5; \
  30. mov 0x8(%rsp), %rsi; \
  31. mov %rsi, %cr0; \
  32. mov (%rsp), %rsp
  33. ENTRY(efi_call0)
  34. SAVE_XMM
  35. subq $32, %rsp
  36. call *%rdi
  37. addq $32, %rsp
  38. RESTORE_XMM
  39. ret
  40. ENTRY(efi_call1)
  41. SAVE_XMM
  42. subq $32, %rsp
  43. mov %rsi, %rcx
  44. call *%rdi
  45. addq $32, %rsp
  46. RESTORE_XMM
  47. ret
  48. ENTRY(efi_call2)
  49. SAVE_XMM
  50. subq $32, %rsp
  51. mov %rsi, %rcx
  52. call *%rdi
  53. addq $32, %rsp
  54. RESTORE_XMM
  55. ret
  56. ENTRY(efi_call3)
  57. SAVE_XMM
  58. subq $32, %rsp
  59. mov %rcx, %r8
  60. mov %rsi, %rcx
  61. call *%rdi
  62. addq $32, %rsp
  63. RESTORE_XMM
  64. ret
  65. ENTRY(efi_call4)
  66. SAVE_XMM
  67. subq $32, %rsp
  68. mov %r8, %r9
  69. mov %rcx, %r8
  70. mov %rsi, %rcx
  71. call *%rdi
  72. addq $32, %rsp
  73. RESTORE_XMM
  74. ret
  75. ENTRY(efi_call5)
  76. SAVE_XMM
  77. subq $48, %rsp
  78. mov %r9, 32(%rsp)
  79. mov %r8, %r9
  80. mov %rcx, %r8
  81. mov %rsi, %rcx
  82. call *%rdi
  83. addq $48, %rsp
  84. RESTORE_XMM
  85. ret
  86. ENTRY(efi_call6)
  87. SAVE_XMM
  88. mov (%rsp), %rax
  89. mov 8(%rax), %rax
  90. subq $48, %rsp
  91. mov %r9, 32(%rsp)
  92. mov %rax, 40(%rsp)
  93. mov %r8, %r9
  94. mov %rcx, %r8
  95. mov %rsi, %rcx
  96. call *%rdi
  97. addq $48, %rsp
  98. RESTORE_XMM
  99. ret