mcount.S 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. /*
  2. * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com)
  3. *
  4. * This file implements mcount(), which is used to collect profiling data.
  5. * This can also be tweaked for kernel stack overflow detection.
  6. */
  7. #include <linux/linkage.h>
  8. /*
  9. * This is the main variant and is called by C code. GCC's -pg option
  10. * automatically instruments every C function with a call to this.
  11. */
  12. .text
  13. .align 32
  14. .globl _mcount
  15. .type _mcount,#function
  16. .globl mcount
  17. .type mcount,#function
  18. _mcount:
  19. mcount:
  20. #ifdef CONFIG_FUNCTION_TRACER
  21. #ifdef CONFIG_DYNAMIC_FTRACE
  22. /* Do nothing, the retl/nop below is all we need. */
  23. #else
  24. sethi %hi(function_trace_stop), %g1
  25. lduw [%g1 + %lo(function_trace_stop)], %g2
  26. brnz,pn %g2, 2f
  27. sethi %hi(ftrace_trace_function), %g1
  28. sethi %hi(ftrace_stub), %g2
  29. ldx [%g1 + %lo(ftrace_trace_function)], %g1
  30. or %g2, %lo(ftrace_stub), %g2
  31. cmp %g1, %g2
  32. be,pn %icc, 1f
  33. mov %i7, %g3
  34. save %sp, -176, %sp
  35. mov %g3, %o1
  36. jmpl %g1, %o7
  37. mov %i7, %o0
  38. ret
  39. restore
  40. /* not reached */
  41. 1:
  42. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  43. sethi %hi(ftrace_graph_return), %g1
  44. ldx [%g1 + %lo(ftrace_graph_return)], %g3
  45. cmp %g2, %g3
  46. bne,pn %xcc, 5f
  47. sethi %hi(ftrace_graph_entry_stub), %g2
  48. sethi %hi(ftrace_graph_entry), %g1
  49. or %g2, %lo(ftrace_graph_entry_stub), %g2
  50. ldx [%g1 + %lo(ftrace_graph_entry)], %g1
  51. cmp %g1, %g2
  52. be,pt %xcc, 2f
  53. nop
  54. 5: mov %i7, %g2
  55. mov %fp, %g3
  56. save %sp, -176, %sp
  57. mov %g2, %l0
  58. ba,pt %xcc, ftrace_graph_caller
  59. mov %g3, %l1
  60. #endif
  61. 2:
  62. #endif
  63. #endif
  64. retl
  65. nop
  66. .size _mcount,.-_mcount
  67. .size mcount,.-mcount
  68. #ifdef CONFIG_FUNCTION_TRACER
  69. .globl ftrace_stub
  70. .type ftrace_stub,#function
  71. ftrace_stub:
  72. retl
  73. nop
  74. .size ftrace_stub,.-ftrace_stub
  75. #ifdef CONFIG_DYNAMIC_FTRACE
  76. .globl ftrace_caller
  77. .type ftrace_caller,#function
  78. ftrace_caller:
  79. sethi %hi(function_trace_stop), %g1
  80. mov %i7, %g2
  81. lduw [%g1 + %lo(function_trace_stop)], %g1
  82. brnz,pn %g1, ftrace_stub
  83. mov %fp, %g3
  84. save %sp, -176, %sp
  85. mov %g2, %o1
  86. mov %g2, %l0
  87. mov %g3, %l1
  88. .globl ftrace_call
  89. ftrace_call:
  90. call ftrace_stub
  91. mov %i7, %o0
  92. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  93. .globl ftrace_graph_call
  94. ftrace_graph_call:
  95. call ftrace_stub
  96. nop
  97. #endif
  98. ret
  99. restore
  100. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  101. .size ftrace_graph_call,.-ftrace_graph_call
  102. #endif
  103. .size ftrace_call,.-ftrace_call
  104. .size ftrace_caller,.-ftrace_caller
  105. #endif
  106. #endif
  107. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  108. ENTRY(ftrace_graph_caller)
  109. mov %l0, %o0
  110. mov %i7, %o1
  111. call prepare_ftrace_return
  112. mov %l1, %o2
  113. ret
  114. restore %o0, -8, %i7
  115. END(ftrace_graph_caller)
  116. ENTRY(return_to_handler)
  117. save %sp, -176, %sp
  118. call ftrace_return_to_handler
  119. mov %fp, %o0
  120. jmpl %o0 + 8, %g0
  121. restore
  122. END(return_to_handler)
  123. #endif