mcount64.S 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778
  1. /*
  2. * Copyright IBM Corp. 2008,2009
  3. *
  4. * Author(s): Heiko Carstens <heiko.carstens@de.ibm.com>,
  5. *
  6. */
  7. #include <asm/asm-offsets.h>
  8. .globl ftrace_stub
  9. ftrace_stub:
  10. br %r14
  11. .globl _mcount
  12. _mcount:
  13. #ifdef CONFIG_DYNAMIC_FTRACE
  14. br %r14
  15. .data
  16. .globl ftrace_dyn_func
  17. ftrace_dyn_func:
  18. .quad ftrace_stub
  19. .previous
  20. .globl ftrace_caller
  21. ftrace_caller:
  22. #endif
  23. larl %r1,function_trace_stop
  24. icm %r1,0xf,0(%r1)
  25. bnzr %r14
  26. stmg %r2,%r5,32(%r15)
  27. stg %r14,112(%r15)
  28. lgr %r1,%r15
  29. aghi %r15,-160
  30. stg %r1,__SF_BACKCHAIN(%r15)
  31. lgr %r2,%r14
  32. lg %r3,168(%r15)
  33. #ifdef CONFIG_DYNAMIC_FTRACE
  34. larl %r14,ftrace_dyn_func
  35. #else
  36. larl %r14,ftrace_trace_function
  37. #endif
  38. lg %r14,0(%r14)
  39. basr %r14,%r14
  40. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  41. #ifdef CONFIG_DYNAMIC_FTRACE
  42. .globl ftrace_graph_caller
  43. ftrace_graph_caller:
  44. # This unconditional branch gets runtime patched. Change only if
  45. # you know what you are doing. See ftrace_enable_graph_caller().
  46. j 0f
  47. #endif
  48. lg %r2,272(%r15)
  49. lg %r3,168(%r15)
  50. brasl %r14,prepare_ftrace_return
  51. stg %r2,168(%r15)
  52. 0:
  53. #endif
  54. aghi %r15,160
  55. lmg %r2,%r5,32(%r15)
  56. lg %r14,112(%r15)
  57. br %r14
  58. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  59. .globl return_to_handler
  60. return_to_handler:
  61. stmg %r2,%r5,32(%r15)
  62. lgr %r1,%r15
  63. aghi %r15,-160
  64. stg %r1,__SF_BACKCHAIN(%r15)
  65. brasl %r14,ftrace_return_to_handler
  66. aghi %r15,160
  67. lgr %r14,%r2
  68. lmg %r2,%r5,32(%r15)
  69. br %r14
  70. #endif