mcount.S 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311
  1. /*
  2. * arch/sh/lib/mcount.S
  3. *
  4. * Copyright (C) 2008, 2009 Paul Mundt
  5. * Copyright (C) 2008, 2009 Matt Fleming
  6. *
  7. * This file is subject to the terms and conditions of the GNU General Public
  8. * License. See the file "COPYING" in the main directory of this archive
  9. * for more details.
  10. */
  11. #include <asm/ftrace.h>
  12. #include <asm/thread_info.h>
  13. #include <asm/asm-offsets.h>
  14. #define MCOUNT_ENTER() \
  15. mov.l r4, @-r15; \
  16. mov.l r5, @-r15; \
  17. mov.l r6, @-r15; \
  18. mov.l r7, @-r15; \
  19. sts.l pr, @-r15; \
  20. \
  21. mov.l @(20,r15),r4; \
  22. sts pr, r5
  23. #define MCOUNT_LEAVE() \
  24. lds.l @r15+, pr; \
  25. mov.l @r15+, r7; \
  26. mov.l @r15+, r6; \
  27. mov.l @r15+, r5; \
  28. rts; \
  29. mov.l @r15+, r4
  30. #ifdef CONFIG_STACK_DEBUG
  31. /*
  32. * Perform diagnostic checks on the state of the kernel stack.
  33. *
  34. * Check for stack overflow. If there is less than 1KB free
  35. * then it has overflowed.
  36. *
  37. * Make sure the stack pointer contains a valid address. Valid
  38. * addresses for kernel stacks are anywhere after the bss
  39. * (after _ebss) and anywhere in init_thread_union (init_stack).
  40. */
  41. #define STACK_CHECK() \
  42. mov #(THREAD_SIZE >> 10), r0; \
  43. shll8 r0; \
  44. shll2 r0; \
  45. \
  46. /* r1 = sp & (THREAD_SIZE - 1) */ \
  47. mov #-1, r1; \
  48. add r0, r1; \
  49. and r15, r1; \
  50. \
  51. mov #TI_SIZE, r3; \
  52. mov #(STACK_WARN >> 8), r2; \
  53. shll8 r2; \
  54. add r3, r2; \
  55. \
  56. /* Is the stack overflowing? */ \
  57. cmp/hi r2, r1; \
  58. bf stack_panic; \
  59. \
  60. /* If sp > _ebss then we're OK. */ \
  61. mov.l .L_ebss, r1; \
  62. cmp/hi r1, r15; \
  63. bt 1f; \
  64. \
  65. /* If sp < init_stack, we're not OK. */ \
  66. mov.l .L_init_thread_union, r1; \
  67. cmp/hs r1, r15; \
  68. bf stack_panic; \
  69. \
  70. /* If sp > init_stack && sp < _ebss, not OK. */ \
  71. add r0, r1; \
  72. cmp/hs r1, r15; \
  73. bt stack_panic; \
  74. 1:
  75. #else
  76. #define STACK_CHECK()
  77. #endif /* CONFIG_STACK_DEBUG */
  78. .align 2
  79. .globl _mcount
  80. .type _mcount,@function
  81. .globl mcount
  82. .type mcount,@function
  83. _mcount:
  84. mcount:
  85. STACK_CHECK()
  86. #ifndef CONFIG_FUNCTION_TRACER
  87. rts
  88. nop
  89. #else
  90. #ifndef CONFIG_DYNAMIC_FTRACE
  91. mov.l .Lfunction_trace_stop, r0
  92. mov.l @r0, r0
  93. tst r0, r0
  94. bf ftrace_stub
  95. #endif
  96. MCOUNT_ENTER()
  97. #ifdef CONFIG_DYNAMIC_FTRACE
  98. .globl mcount_call
  99. mcount_call:
  100. mov.l .Lftrace_stub, r6
  101. #else
  102. mov.l .Lftrace_trace_function, r6
  103. mov.l ftrace_stub, r7
  104. cmp/eq r6, r7
  105. bt skip_trace
  106. mov.l @r6, r6
  107. #endif
  108. jsr @r6
  109. nop
  110. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  111. mov.l .Lftrace_graph_return, r6
  112. mov.l .Lftrace_stub, r7
  113. cmp/eq r6, r7
  114. bt 1f
  115. mov.l .Lftrace_graph_caller, r0
  116. jmp @r0
  117. nop
  118. 1:
  119. mov.l .Lftrace_graph_entry, r6
  120. mov.l .Lftrace_graph_entry_stub, r7
  121. cmp/eq r6, r7
  122. bt skip_trace
  123. mov.l .Lftrace_graph_caller, r0
  124. jmp @r0
  125. nop
  126. .align 2
  127. .Lftrace_graph_return:
  128. .long ftrace_graph_return
  129. .Lftrace_graph_entry:
  130. .long ftrace_graph_entry
  131. .Lftrace_graph_entry_stub:
  132. .long ftrace_graph_entry_stub
  133. .Lftrace_graph_caller:
  134. .long ftrace_graph_caller
  135. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  136. .globl skip_trace
  137. skip_trace:
  138. MCOUNT_LEAVE()
  139. .align 2
  140. .Lftrace_trace_function:
  141. .long ftrace_trace_function
  142. #ifdef CONFIG_DYNAMIC_FTRACE
  143. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  144. /*
  145. * NOTE: Do not move either ftrace_graph_call or ftrace_caller
  146. * as this will affect the calculation of GRAPH_INSN_OFFSET.
  147. */
  148. .globl ftrace_graph_call
  149. ftrace_graph_call:
  150. mov.l .Lskip_trace, r0
  151. jmp @r0
  152. nop
  153. .align 2
  154. .Lskip_trace:
  155. .long skip_trace
  156. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  157. .globl ftrace_caller
  158. ftrace_caller:
  159. mov.l .Lfunction_trace_stop, r0
  160. mov.l @r0, r0
  161. tst r0, r0
  162. bf ftrace_stub
  163. MCOUNT_ENTER()
  164. .globl ftrace_call
  165. ftrace_call:
  166. mov.l .Lftrace_stub, r6
  167. jsr @r6
  168. nop
  169. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  170. bra ftrace_graph_call
  171. nop
  172. #else
  173. MCOUNT_LEAVE()
  174. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  175. #endif /* CONFIG_DYNAMIC_FTRACE */
  176. .align 2
  177. .Lfunction_trace_stop:
  178. .long function_trace_stop
  179. /*
  180. * NOTE: From here on the locations of the .Lftrace_stub label and
  181. * ftrace_stub itself are fixed. Adding additional data here will skew
  182. * the displacement for the memory table and break the block replacement.
  183. * Place new labels either after the ftrace_stub body, or before
  184. * ftrace_caller. You have been warned.
  185. */
  186. .Lftrace_stub:
  187. .long ftrace_stub
  188. .globl ftrace_stub
  189. ftrace_stub:
  190. rts
  191. nop
  192. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  193. .globl ftrace_graph_caller
  194. ftrace_graph_caller:
  195. mov.l 2f, r0
  196. mov.l @r0, r0
  197. tst r0, r0
  198. bt 1f
  199. mov.l 3f, r1
  200. jmp @r1
  201. nop
  202. 1:
  203. /*
  204. * MCOUNT_ENTER() pushed 5 registers onto the stack, so
  205. * the stack address containing our return address is
  206. * r15 + 20.
  207. */
  208. mov #20, r0
  209. add r15, r0
  210. mov r0, r4
  211. mov.l .Lprepare_ftrace_return, r0
  212. jsr @r0
  213. nop
  214. MCOUNT_LEAVE()
  215. .align 2
  216. 2: .long function_trace_stop
  217. 3: .long skip_trace
  218. .Lprepare_ftrace_return:
  219. .long prepare_ftrace_return
  220. .globl return_to_handler
  221. return_to_handler:
  222. /*
  223. * Save the return values.
  224. */
  225. mov.l r0, @-r15
  226. mov.l r1, @-r15
  227. mov #0, r4
  228. mov.l .Lftrace_return_to_handler, r0
  229. jsr @r0
  230. nop
  231. /*
  232. * The return value from ftrace_return_handler has the real
  233. * address that we should return to.
  234. */
  235. lds r0, pr
  236. mov.l @r15+, r1
  237. rts
  238. mov.l @r15+, r0
  239. .align 2
  240. .Lftrace_return_to_handler:
  241. .long ftrace_return_to_handler
  242. #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
  243. #endif /* CONFIG_FUNCTION_TRACER */
  244. #ifdef CONFIG_STACK_DEBUG
  245. .globl stack_panic
  246. stack_panic:
  247. mov.l .Ldump_stack, r0
  248. jsr @r0
  249. nop
  250. mov.l .Lpanic, r0
  251. jsr @r0
  252. mov.l .Lpanic_s, r4
  253. rts
  254. nop
  255. .align 2
  256. .L_ebss:
  257. .long _ebss
  258. .L_init_thread_union:
  259. .long init_thread_union
  260. .Lpanic:
  261. .long panic
  262. .Lpanic_s:
  263. .long .Lpanic_str
  264. .Ldump_stack:
  265. .long dump_stack
  266. .section .rodata
  267. .align 2
  268. .Lpanic_str:
  269. .string "Stack error"
  270. #endif /* CONFIG_STACK_DEBUG */