mcount.S 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306
  1. /*
  2. * arch/sh/lib/mcount.S
  3. *
  4. * Copyright (C) 2008 Paul Mundt
  5. * Copyright (C) 2008, 2009 Matt Fleming
  6. *
  7. * This file is subject to the terms and conditions of the GNU General Public
  8. * License. See the file "COPYING" in the main directory of this archive
  9. * for more details.
  10. */
  11. #include <asm/ftrace.h>
  12. #include <asm/thread_info.h>
  13. #include <asm/asm-offsets.h>
  14. #define MCOUNT_ENTER() \
  15. mov.l r4, @-r15; \
  16. mov.l r5, @-r15; \
  17. mov.l r6, @-r15; \
  18. mov.l r7, @-r15; \
  19. sts.l pr, @-r15; \
  20. \
  21. mov.l @(20,r15),r4; \
  22. sts pr, r5
  23. #define MCOUNT_LEAVE() \
  24. lds.l @r15+, pr; \
  25. mov.l @r15+, r7; \
  26. mov.l @r15+, r6; \
  27. mov.l @r15+, r5; \
  28. rts; \
  29. mov.l @r15+, r4
  30. #ifdef CONFIG_STACK_DEBUG
  31. /*
  32. * Perform diagnostic checks on the state of the kernel stack.
  33. *
  34. * Check for stack overflow. If there is less than 1KB free
  35. * then it has overflowed.
  36. *
  37. * Make sure the stack pointer contains a valid address. Valid
  38. * addresses for kernel stacks are anywhere after the bss
  39. * (after _ebss) and anywhere in init_thread_union (init_stack).
  40. */
  41. #define STACK_CHECK() \
  42. mov #(THREAD_SIZE >> 10), r0; \
  43. shll8 r0; \
  44. shll2 r0; \
  45. \
  46. /* r1 = sp & (THREAD_SIZE - 1) */ \
  47. mov #-1, r1; \
  48. add r0, r1; \
  49. and r15, r1; \
  50. \
  51. mov #TI_SIZE, r3; \
  52. mov #(STACK_WARN >> 8), r2; \
  53. shll8 r2; \
  54. add r3, r2; \
  55. \
  56. /* Is the stack overflowing? */ \
  57. cmp/hi r2, r1; \
  58. bf stack_panic; \
  59. \
  60. /* If sp > _ebss then we're OK. */ \
  61. mov.l .L_ebss, r1; \
  62. cmp/hi r1, r15; \
  63. bt 1f; \
  64. \
  65. /* If sp < init_stack, we're not OK. */ \
  66. mov.l .L_init_thread_union, r1; \
  67. cmp/hs r1, r15; \
  68. bf stack_panic; \
  69. \
  70. /* If sp > init_stack && sp < _ebss, not OK. */ \
  71. add r0, r1; \
  72. cmp/hs r1, r15; \
  73. bt stack_panic; \
  74. 1:
  75. #else
  76. #define STACK_CHECK()
  77. #endif /* CONFIG_STACK_DEBUG */
  78. .align 2
  79. .globl _mcount
  80. .type _mcount,@function
  81. .globl mcount
  82. .type mcount,@function
  83. _mcount:
  84. mcount:
  85. #ifndef CONFIG_DYNAMIC_FTRACE
  86. mov.l .Lfunction_trace_stop, r0
  87. mov.l @r0, r0
  88. tst r0, r0
  89. bf ftrace_stub
  90. #endif
  91. STACK_CHECK()
  92. MCOUNT_ENTER()
  93. #ifdef CONFIG_DYNAMIC_FTRACE
  94. .globl mcount_call
  95. mcount_call:
  96. mov.l .Lftrace_stub, r6
  97. #else
  98. mov.l .Lftrace_trace_function, r6
  99. mov.l ftrace_stub, r7
  100. cmp/eq r6, r7
  101. bt skip_trace
  102. mov.l @r6, r6
  103. #endif
  104. jsr @r6
  105. nop
  106. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  107. mov.l .Lftrace_graph_return, r6
  108. mov.l .Lftrace_stub, r7
  109. cmp/eq r6, r7
  110. bt 1f
  111. mov.l .Lftrace_graph_caller, r0
  112. jmp @r0
  113. nop
  114. 1:
  115. mov.l .Lftrace_graph_entry, r6
  116. mov.l .Lftrace_graph_entry_stub, r7
  117. cmp/eq r6, r7
  118. bt skip_trace
  119. mov.l .Lftrace_graph_caller, r0
  120. jmp @r0
  121. nop
  122. .align 2
  123. .Lftrace_graph_return:
  124. .long ftrace_graph_return
  125. .Lftrace_graph_entry:
  126. .long ftrace_graph_entry
  127. .Lftrace_graph_entry_stub:
  128. .long ftrace_graph_entry_stub
  129. .Lftrace_graph_caller:
  130. .long ftrace_graph_caller
  131. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  132. .globl skip_trace
  133. skip_trace:
  134. MCOUNT_LEAVE()
  135. .align 2
  136. .Lftrace_trace_function:
  137. .long ftrace_trace_function
  138. #ifdef CONFIG_DYNAMIC_FTRACE
  139. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  140. /*
  141. * NOTE: Do not move either ftrace_graph_call or ftrace_caller
  142. * as this will affect the calculation of GRAPH_INSN_OFFSET.
  143. */
  144. .globl ftrace_graph_call
  145. ftrace_graph_call:
  146. mov.l .Lskip_trace, r0
  147. jmp @r0
  148. nop
  149. .align 2
  150. .Lskip_trace:
  151. .long skip_trace
  152. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  153. .globl ftrace_caller
  154. ftrace_caller:
  155. mov.l .Lfunction_trace_stop, r0
  156. mov.l @r0, r0
  157. tst r0, r0
  158. bf ftrace_stub
  159. STACK_CHECK()
  160. MCOUNT_ENTER()
  161. .globl ftrace_call
  162. ftrace_call:
  163. mov.l .Lftrace_stub, r6
  164. jsr @r6
  165. nop
  166. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  167. bra ftrace_graph_call
  168. nop
  169. #else
  170. MCOUNT_LEAVE()
  171. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  172. #endif /* CONFIG_DYNAMIC_FTRACE */
  173. .align 2
  174. .Lfunction_trace_stop:
  175. .long function_trace_stop
  176. /*
  177. * NOTE: From here on the locations of the .Lftrace_stub label and
  178. * ftrace_stub itself are fixed. Adding additional data here will skew
  179. * the displacement for the memory table and break the block replacement.
  180. * Place new labels either after the ftrace_stub body, or before
  181. * ftrace_caller. You have been warned.
  182. */
  183. .Lftrace_stub:
  184. .long ftrace_stub
  185. .globl ftrace_stub
  186. ftrace_stub:
  187. rts
  188. nop
  189. #ifdef CONFIG_STACK_DEBUG
  190. .globl stack_panic
  191. stack_panic:
  192. mov.l .Ldump_stack, r0
  193. jsr @r0
  194. nop
  195. mov.l .Lpanic, r0
  196. jsr @r0
  197. mov.l .Lpanic_s, r4
  198. rts
  199. nop
  200. .align 2
  201. .L_ebss:
  202. .long _ebss
  203. .L_init_thread_union:
  204. .long init_thread_union
  205. .Lpanic:
  206. .long panic
  207. .Lpanic_s:
  208. .long .Lpanic_str
  209. .Ldump_stack:
  210. .long dump_stack
  211. .section .rodata
  212. .align 2
  213. .Lpanic_str:
  214. .string "Stack error"
  215. #endif /* CONFIG_STACK_DEBUG */
  216. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  217. .globl ftrace_graph_caller
  218. ftrace_graph_caller:
  219. mov.l 2f, r0
  220. mov.l @r0, r0
  221. tst r0, r0
  222. bt 1f
  223. mov.l 3f, r1
  224. jmp @r1
  225. nop
  226. 1:
  227. /*
  228. * MCOUNT_ENTER() pushed 5 registers onto the stack, so
  229. * the stack address containing our return address is
  230. * r15 + 20.
  231. */
  232. mov #20, r0
  233. add r15, r0
  234. mov r0, r4
  235. mov.l .Lprepare_ftrace_return, r0
  236. jsr @r0
  237. nop
  238. MCOUNT_LEAVE()
  239. .align 2
  240. 2: .long function_trace_stop
  241. 3: .long skip_trace
  242. .Lprepare_ftrace_return:
  243. .long prepare_ftrace_return
  244. .globl return_to_handler
  245. return_to_handler:
  246. /*
  247. * Save the return values.
  248. */
  249. mov.l r0, @-r15
  250. mov.l r1, @-r15
  251. mov #0, r4
  252. mov.l .Lftrace_return_to_handler, r0
  253. jsr @r0
  254. nop
  255. /*
  256. * The return value from ftrace_return_handler has the real
  257. * address that we should return to.
  258. */
  259. lds r0, pr
  260. mov.l @r15+, r1
  261. rts
  262. mov.l @r15+, r0
  263. .align 2
  264. .Lftrace_return_to_handler:
  265. .long ftrace_return_to_handler
  266. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */