ftrace.h 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071
  1. #ifndef _ASM_X86_FTRACE_H
  2. #define _ASM_X86_FTRACE_H
  3. #ifdef __ASSEMBLY__
  4. /* skip is set if the stack was already partially adjusted */
  5. .macro MCOUNT_SAVE_FRAME skip=0
  6. /*
  7. * We add enough stack to save all regs.
  8. */
  9. subq $(SS+8-\skip), %rsp
  10. movq %rax, RAX(%rsp)
  11. movq %rcx, RCX(%rsp)
  12. movq %rdx, RDX(%rsp)
  13. movq %rsi, RSI(%rsp)
  14. movq %rdi, RDI(%rsp)
  15. movq %r8, R8(%rsp)
  16. movq %r9, R9(%rsp)
  17. /* Move RIP to its proper location */
  18. movq SS+8(%rsp), %rdx
  19. movq %rdx, RIP(%rsp)
  20. .endm
  21. .macro MCOUNT_RESTORE_FRAME skip=0
  22. movq R9(%rsp), %r9
  23. movq R8(%rsp), %r8
  24. movq RDI(%rsp), %rdi
  25. movq RSI(%rsp), %rsi
  26. movq RDX(%rsp), %rdx
  27. movq RCX(%rsp), %rcx
  28. movq RAX(%rsp), %rax
  29. addq $(SS+8-\skip), %rsp
  30. .endm
  31. #endif
  32. #ifdef CONFIG_FUNCTION_TRACER
  33. #define MCOUNT_ADDR ((long)(mcount))
  34. #define MCOUNT_INSN_SIZE 5 /* sizeof mcount call */
  35. #ifdef CONFIG_DYNAMIC_FTRACE
  36. #define ARCH_SUPPORTS_FTRACE_OPS 1
  37. #define ARCH_SUPPORTS_FTRACE_SAVE_REGS
  38. #endif
  39. #ifndef __ASSEMBLY__
  40. extern void mcount(void);
  41. extern atomic_t modifying_ftrace_code;
  42. static inline unsigned long ftrace_call_adjust(unsigned long addr)
  43. {
  44. /*
  45. * addr is the address of the mcount call instruction.
  46. * recordmcount does the necessary offset calculation.
  47. */
  48. return addr;
  49. }
  50. #ifdef CONFIG_DYNAMIC_FTRACE
  51. struct dyn_arch_ftrace {
  52. /* No extra data needed for x86 */
  53. };
  54. int ftrace_int3_handler(struct pt_regs *regs);
  55. #endif /* CONFIG_DYNAMIC_FTRACE */
  56. #endif /* __ASSEMBLY__ */
  57. #endif /* CONFIG_FUNCTION_TRACER */
  58. #endif /* _ASM_X86_FTRACE_H */