calling.h 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156
  1. /*
  2. * Some macros to handle stack frames in assembly.
  3. */
  4. #include <linux/config.h>
  5. #define R15 0
  6. #define R14 8
  7. #define R13 16
  8. #define R12 24
  9. #define RBP 32
  10. #define RBX 40
  11. /* arguments: interrupts/non tracing syscalls only save upto here*/
  12. #define R11 48
  13. #define R10 56
  14. #define R9 64
  15. #define R8 72
  16. #define RAX 80
  17. #define RCX 88
  18. #define RDX 96
  19. #define RSI 104
  20. #define RDI 112
  21. #define ORIG_RAX 120 /* + error_code */
  22. /* end of arguments */
  23. /* cpu exception frame or undefined in case of fast syscall. */
  24. #define RIP 128
  25. #define CS 136
  26. #define EFLAGS 144
  27. #define RSP 152
  28. #define SS 160
  29. #define ARGOFFSET R11
  30. #define SWFRAME ORIG_RAX
  31. .macro SAVE_ARGS addskip=0,norcx=0,nor891011=0
  32. subq $9*8+\addskip,%rsp
  33. CFI_ADJUST_CFA_OFFSET 9*8+\addskip
  34. movq %rdi,8*8(%rsp)
  35. CFI_REL_OFFSET rdi,8*8
  36. movq %rsi,7*8(%rsp)
  37. CFI_REL_OFFSET rsi,7*8
  38. movq %rdx,6*8(%rsp)
  39. CFI_REL_OFFSET rdx,6*8
  40. .if \norcx
  41. .else
  42. movq %rcx,5*8(%rsp)
  43. CFI_REL_OFFSET rcx,5*8
  44. .endif
  45. movq %rax,4*8(%rsp)
  46. CFI_REL_OFFSET rax,4*8
  47. .if \nor891011
  48. .else
  49. movq %r8,3*8(%rsp)
  50. CFI_REL_OFFSET r8,3*8
  51. movq %r9,2*8(%rsp)
  52. CFI_REL_OFFSET r9,2*8
  53. movq %r10,1*8(%rsp)
  54. CFI_REL_OFFSET r10,1*8
  55. movq %r11,(%rsp)
  56. CFI_REL_OFFSET r11,0*8
  57. .endif
  58. .endm
  59. #define ARG_SKIP 9*8
  60. .macro RESTORE_ARGS skiprax=0,addskip=0,skiprcx=0,skipr11=0,skipr8910=0,skiprdx=0
  61. .if \skipr11
  62. .else
  63. movq (%rsp),%r11
  64. .endif
  65. .if \skipr8910
  66. .else
  67. movq 1*8(%rsp),%r10
  68. movq 2*8(%rsp),%r9
  69. movq 3*8(%rsp),%r8
  70. .endif
  71. .if \skiprax
  72. .else
  73. movq 4*8(%rsp),%rax
  74. .endif
  75. .if \skiprcx
  76. .else
  77. movq 5*8(%rsp),%rcx
  78. .endif
  79. .if \skiprdx
  80. .else
  81. movq 6*8(%rsp),%rdx
  82. .endif
  83. movq 7*8(%rsp),%rsi
  84. movq 8*8(%rsp),%rdi
  85. .if ARG_SKIP+\addskip > 0
  86. addq $ARG_SKIP+\addskip,%rsp
  87. CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip)
  88. .endif
  89. .endm
  90. .macro LOAD_ARGS offset
  91. movq \offset(%rsp),%r11
  92. movq \offset+8(%rsp),%r10
  93. movq \offset+16(%rsp),%r9
  94. movq \offset+24(%rsp),%r8
  95. movq \offset+40(%rsp),%rcx
  96. movq \offset+48(%rsp),%rdx
  97. movq \offset+56(%rsp),%rsi
  98. movq \offset+64(%rsp),%rdi
  99. movq \offset+72(%rsp),%rax
  100. .endm
  101. #define REST_SKIP 6*8
  102. .macro SAVE_REST
  103. subq $REST_SKIP,%rsp
  104. CFI_ADJUST_CFA_OFFSET REST_SKIP
  105. movq %rbx,5*8(%rsp)
  106. CFI_REL_OFFSET rbx,5*8
  107. movq %rbp,4*8(%rsp)
  108. CFI_REL_OFFSET rbp,4*8
  109. movq %r12,3*8(%rsp)
  110. CFI_REL_OFFSET r12,3*8
  111. movq %r13,2*8(%rsp)
  112. CFI_REL_OFFSET r13,2*8
  113. movq %r14,1*8(%rsp)
  114. CFI_REL_OFFSET r14,1*8
  115. movq %r15,(%rsp)
  116. CFI_REL_OFFSET r15,0*8
  117. .endm
  118. .macro RESTORE_REST
  119. movq (%rsp),%r15
  120. movq 1*8(%rsp),%r14
  121. movq 2*8(%rsp),%r13
  122. movq 3*8(%rsp),%r12
  123. movq 4*8(%rsp),%rbp
  124. movq 5*8(%rsp),%rbx
  125. addq $REST_SKIP,%rsp
  126. CFI_ADJUST_CFA_OFFSET -(REST_SKIP)
  127. .endm
  128. .macro SAVE_ALL
  129. SAVE_ARGS
  130. SAVE_REST
  131. .endm
  132. .macro RESTORE_ALL addskip=0
  133. RESTORE_REST
  134. RESTORE_ARGS 0,\addskip
  135. .endm
  136. .macro icebp
  137. .byte 0xf1
  138. .endm
  139. #ifdef CONFIG_FRAME_POINTER
  140. #define ENTER enter
  141. #define LEAVE leave
  142. #else
  143. #define ENTER
  144. #define LEAVE
  145. #endif