mcount_64.S 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217
  1. /*
  2. * linux/arch/x86_64/mcount_64.S
  3. *
  4. * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
  5. */
  6. #include <linux/linkage.h>
  7. #include <asm/ptrace.h>
  8. #include <asm/ftrace.h>
  9. .code64
  10. .section .entry.text, "ax"
  11. #ifdef CONFIG_FUNCTION_TRACER
  12. #ifdef CC_USING_FENTRY
  13. # define function_hook __fentry__
  14. #else
  15. # define function_hook mcount
  16. #endif
  17. #ifdef CONFIG_DYNAMIC_FTRACE
  18. ENTRY(function_hook)
  19. retq
  20. END(function_hook)
  21. /* skip is set if stack has been adjusted */
  22. .macro ftrace_caller_setup skip=0
  23. MCOUNT_SAVE_FRAME \skip
  24. /* Load the ftrace_ops into the 3rd parameter */
  25. movq function_trace_op(%rip), %rdx
  26. /* Load ip into the first parameter */
  27. movq RIP(%rsp), %rdi
  28. subq $MCOUNT_INSN_SIZE, %rdi
  29. /* Load the parent_ip into the second parameter */
  30. #ifdef CC_USING_FENTRY
  31. movq SS+16(%rsp), %rsi
  32. #else
  33. movq 8(%rbp), %rsi
  34. #endif
  35. .endm
  36. ENTRY(ftrace_caller)
  37. /* Check if tracing was disabled (quick check) */
  38. cmpl $0, function_trace_stop
  39. jne ftrace_stub
  40. ftrace_caller_setup
  41. /* regs go into 4th parameter (but make it NULL) */
  42. movq $0, %rcx
  43. GLOBAL(ftrace_call)
  44. call ftrace_stub
  45. MCOUNT_RESTORE_FRAME
  46. ftrace_return:
  47. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  48. GLOBAL(ftrace_graph_call)
  49. jmp ftrace_stub
  50. #endif
  51. GLOBAL(ftrace_stub)
  52. retq
  53. END(ftrace_caller)
  54. ENTRY(ftrace_regs_caller)
  55. /* Save the current flags before compare (in SS location)*/
  56. pushfq
  57. /* Check if tracing was disabled (quick check) */
  58. cmpl $0, function_trace_stop
  59. jne ftrace_restore_flags
  60. /* skip=8 to skip flags saved in SS */
  61. ftrace_caller_setup 8
  62. /* Save the rest of pt_regs */
  63. movq %r15, R15(%rsp)
  64. movq %r14, R14(%rsp)
  65. movq %r13, R13(%rsp)
  66. movq %r12, R12(%rsp)
  67. movq %r11, R11(%rsp)
  68. movq %r10, R10(%rsp)
  69. movq %rbp, RBP(%rsp)
  70. movq %rbx, RBX(%rsp)
  71. /* Copy saved flags */
  72. movq SS(%rsp), %rcx
  73. movq %rcx, EFLAGS(%rsp)
  74. /* Kernel segments */
  75. movq $__KERNEL_DS, %rcx
  76. movq %rcx, SS(%rsp)
  77. movq $__KERNEL_CS, %rcx
  78. movq %rcx, CS(%rsp)
  79. /* Stack - skipping return address */
  80. leaq SS+16(%rsp), %rcx
  81. movq %rcx, RSP(%rsp)
  82. /* regs go into 4th parameter */
  83. leaq (%rsp), %rcx
  84. GLOBAL(ftrace_regs_call)
  85. call ftrace_stub
  86. /* Copy flags back to SS, to restore them */
  87. movq EFLAGS(%rsp), %rax
  88. movq %rax, SS(%rsp)
  89. /* Handlers can change the RIP */
  90. movq RIP(%rsp), %rax
  91. movq %rax, SS+8(%rsp)
  92. /* restore the rest of pt_regs */
  93. movq R15(%rsp), %r15
  94. movq R14(%rsp), %r14
  95. movq R13(%rsp), %r13
  96. movq R12(%rsp), %r12
  97. movq R10(%rsp), %r10
  98. movq RBP(%rsp), %rbp
  99. movq RBX(%rsp), %rbx
  100. /* skip=8 to skip flags saved in SS */
  101. MCOUNT_RESTORE_FRAME 8
  102. /* Restore flags */
  103. popfq
  104. jmp ftrace_return
  105. ftrace_restore_flags:
  106. popfq
  107. jmp ftrace_stub
  108. END(ftrace_regs_caller)
  109. #else /* ! CONFIG_DYNAMIC_FTRACE */
  110. ENTRY(function_hook)
  111. cmpl $0, function_trace_stop
  112. jne ftrace_stub
  113. cmpq $ftrace_stub, ftrace_trace_function
  114. jnz trace
  115. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  116. cmpq $ftrace_stub, ftrace_graph_return
  117. jnz ftrace_graph_caller
  118. cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
  119. jnz ftrace_graph_caller
  120. #endif
  121. GLOBAL(ftrace_stub)
  122. retq
  123. trace:
  124. MCOUNT_SAVE_FRAME
  125. movq RIP(%rsp), %rdi
  126. #ifdef CC_USING_FENTRY
  127. movq SS+16(%rsp), %rsi
  128. #else
  129. movq 8(%rbp), %rsi
  130. #endif
  131. subq $MCOUNT_INSN_SIZE, %rdi
  132. call *ftrace_trace_function
  133. MCOUNT_RESTORE_FRAME
  134. jmp ftrace_stub
  135. END(function_hook)
  136. #endif /* CONFIG_DYNAMIC_FTRACE */
  137. #endif /* CONFIG_FUNCTION_TRACER */
  138. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  139. ENTRY(ftrace_graph_caller)
  140. MCOUNT_SAVE_FRAME
  141. #ifdef CC_USING_FENTRY
  142. leaq SS+16(%rsp), %rdi
  143. movq $0, %rdx /* No framepointers needed */
  144. #else
  145. leaq 8(%rbp), %rdi
  146. movq (%rbp), %rdx
  147. #endif
  148. movq RIP(%rsp), %rsi
  149. subq $MCOUNT_INSN_SIZE, %rsi
  150. call prepare_ftrace_return
  151. MCOUNT_RESTORE_FRAME
  152. retq
  153. END(ftrace_graph_caller)
  154. GLOBAL(return_to_handler)
  155. subq $24, %rsp
  156. /* Save the return values */
  157. movq %rax, (%rsp)
  158. movq %rdx, 8(%rsp)
  159. movq %rbp, %rdi
  160. call ftrace_return_to_handler
  161. movq %rax, %rdi
  162. movq 8(%rsp), %rdx
  163. movq (%rsp), %rax
  164. addq $24, %rsp
  165. jmp *%rdi
  166. #endif