mcount64.S 1.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. /*
  2. * Copyright IBM Corp. 2008,2009
  3. *
  4. * Author(s): Heiko Carstens <heiko.carstens@de.ibm.com>,
  5. *
  6. */
  7. #include <asm/asm-offsets.h>
  8. .section .kprobes.text, "ax"
  9. .globl ftrace_stub
  10. ftrace_stub:
  11. br %r14
  12. .globl _mcount
  13. _mcount:
  14. #ifdef CONFIG_DYNAMIC_FTRACE
  15. br %r14
  16. .data
  17. .globl ftrace_dyn_func
  18. ftrace_dyn_func:
  19. .quad ftrace_stub
  20. .previous
  21. .globl ftrace_caller
  22. ftrace_caller:
  23. #endif
  24. larl %r1,function_trace_stop
  25. icm %r1,0xf,0(%r1)
  26. bnzr %r14
  27. stmg %r2,%r5,32(%r15)
  28. stg %r14,112(%r15)
  29. lgr %r1,%r15
  30. aghi %r15,-160
  31. stg %r1,__SF_BACKCHAIN(%r15)
  32. lgr %r2,%r14
  33. lg %r3,168(%r15)
  34. #ifdef CONFIG_DYNAMIC_FTRACE
  35. larl %r14,ftrace_dyn_func
  36. #else
  37. larl %r14,ftrace_trace_function
  38. #endif
  39. lg %r14,0(%r14)
  40. basr %r14,%r14
  41. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  42. #ifdef CONFIG_DYNAMIC_FTRACE
  43. .globl ftrace_graph_caller
  44. ftrace_graph_caller:
  45. # This unconditional branch gets runtime patched. Change only if
  46. # you know what you are doing. See ftrace_enable_graph_caller().
  47. j 0f
  48. #endif
  49. lg %r2,272(%r15)
  50. lg %r3,168(%r15)
  51. brasl %r14,prepare_ftrace_return
  52. stg %r2,168(%r15)
  53. 0:
  54. #endif
  55. aghi %r15,160
  56. lmg %r2,%r5,32(%r15)
  57. lg %r14,112(%r15)
  58. br %r14
  59. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  60. .globl return_to_handler
  61. return_to_handler:
  62. stmg %r2,%r5,32(%r15)
  63. lgr %r1,%r15
  64. aghi %r15,-160
  65. stg %r1,__SF_BACKCHAIN(%r15)
  66. brasl %r14,ftrace_return_to_handler
  67. aghi %r15,160
  68. lgr %r14,%r2
  69. lmg %r2,%r5,32(%r15)
  70. br %r14
  71. #endif