ftrace_64.S 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332
  1. /*
  2. * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
  3. */
  4. #include <linux/linkage.h>
  5. #include <asm/ptrace.h>
  6. #include <asm/ftrace.h>
  7. #include <asm/export.h>
  8. .code64
  9. .section .entry.text, "ax"
  10. #ifdef CC_USING_FENTRY
  11. # define function_hook __fentry__
  12. EXPORT_SYMBOL(__fentry__)
  13. #else
  14. # define function_hook mcount
  15. EXPORT_SYMBOL(mcount)
  16. #endif
  17. /* All cases save the original rbp (8 bytes) */
  18. #ifdef CONFIG_FRAME_POINTER
  19. # ifdef CC_USING_FENTRY
  20. /* Save parent and function stack frames (rip and rbp) */
  21. # define MCOUNT_FRAME_SIZE (8+16*2)
  22. # else
  23. /* Save just function stack frame (rip and rbp) */
  24. # define MCOUNT_FRAME_SIZE (8+16)
  25. # endif
  26. #else
  27. /* No need to save a stack frame */
  28. # define MCOUNT_FRAME_SIZE 8
  29. #endif /* CONFIG_FRAME_POINTER */
  30. /* Size of stack used to save mcount regs in save_mcount_regs */
  31. #define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE)
  32. /*
  33. * gcc -pg option adds a call to 'mcount' in most functions.
  34. * When -mfentry is used, the call is to 'fentry' and not 'mcount'
  35. * and is done before the function's stack frame is set up.
  36. * They both require a set of regs to be saved before calling
  37. * any C code and restored before returning back to the function.
  38. *
  39. * On boot up, all these calls are converted into nops. When tracing
  40. * is enabled, the call can jump to either ftrace_caller or
  41. * ftrace_regs_caller. Callbacks (tracing functions) that require
  42. * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
  43. * it. For this reason, the size of the pt_regs structure will be
  44. * allocated on the stack and the required mcount registers will
  45. * be saved in the locations that pt_regs has them in.
  46. */
  47. /*
  48. * @added: the amount of stack added before calling this
  49. *
  50. * After this is called, the following registers contain:
  51. *
  52. * %rdi - holds the address that called the trampoline
  53. * %rsi - holds the parent function (traced function's return address)
  54. * %rdx - holds the original %rbp
  55. */
  56. .macro save_mcount_regs added=0
  57. /* Always save the original rbp */
  58. pushq %rbp
  59. #ifdef CONFIG_FRAME_POINTER
  60. /*
  61. * Stack traces will stop at the ftrace trampoline if the frame pointer
  62. * is not set up properly. If fentry is used, we need to save a frame
  63. * pointer for the parent as well as the function traced, because the
  64. * fentry is called before the stack frame is set up, where as mcount
  65. * is called afterward.
  66. */
  67. #ifdef CC_USING_FENTRY
  68. /* Save the parent pointer (skip orig rbp and our return address) */
  69. pushq \added+8*2(%rsp)
  70. pushq %rbp
  71. movq %rsp, %rbp
  72. /* Save the return address (now skip orig rbp, rbp and parent) */
  73. pushq \added+8*3(%rsp)
  74. #else
  75. /* Can't assume that rip is before this (unless added was zero) */
  76. pushq \added+8(%rsp)
  77. #endif
  78. pushq %rbp
  79. movq %rsp, %rbp
  80. #endif /* CONFIG_FRAME_POINTER */
  81. /*
  82. * We add enough stack to save all regs.
  83. */
  84. subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp
  85. movq %rax, RAX(%rsp)
  86. movq %rcx, RCX(%rsp)
  87. movq %rdx, RDX(%rsp)
  88. movq %rsi, RSI(%rsp)
  89. movq %rdi, RDI(%rsp)
  90. movq %r8, R8(%rsp)
  91. movq %r9, R9(%rsp)
  92. /*
  93. * Save the original RBP. Even though the mcount ABI does not
  94. * require this, it helps out callers.
  95. */
  96. movq MCOUNT_REG_SIZE-8(%rsp), %rdx
  97. movq %rdx, RBP(%rsp)
  98. /* Copy the parent address into %rsi (second parameter) */
  99. #ifdef CC_USING_FENTRY
  100. movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
  101. #else
  102. /* %rdx contains original %rbp */
  103. movq 8(%rdx), %rsi
  104. #endif
  105. /* Move RIP to its proper location */
  106. movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
  107. movq %rdi, RIP(%rsp)
  108. /*
  109. * Now %rdi (the first parameter) has the return address of
  110. * where ftrace_call returns. But the callbacks expect the
  111. * address of the call itself.
  112. */
  113. subq $MCOUNT_INSN_SIZE, %rdi
  114. .endm
  115. .macro restore_mcount_regs
  116. movq R9(%rsp), %r9
  117. movq R8(%rsp), %r8
  118. movq RDI(%rsp), %rdi
  119. movq RSI(%rsp), %rsi
  120. movq RDX(%rsp), %rdx
  121. movq RCX(%rsp), %rcx
  122. movq RAX(%rsp), %rax
  123. /* ftrace_regs_caller can modify %rbp */
  124. movq RBP(%rsp), %rbp
  125. addq $MCOUNT_REG_SIZE, %rsp
  126. .endm
  127. #ifdef CONFIG_DYNAMIC_FTRACE
  128. ENTRY(function_hook)
  129. retq
  130. END(function_hook)
  131. ENTRY(ftrace_caller)
  132. /* save_mcount_regs fills in first two parameters */
  133. save_mcount_regs
  134. GLOBAL(ftrace_caller_op_ptr)
  135. /* Load the ftrace_ops into the 3rd parameter */
  136. movq function_trace_op(%rip), %rdx
  137. /* regs go into 4th parameter (but make it NULL) */
  138. movq $0, %rcx
  139. GLOBAL(ftrace_call)
  140. call ftrace_stub
  141. restore_mcount_regs
  142. /*
  143. * The copied trampoline must call ftrace_epilogue as it
  144. * still may need to call the function graph tracer.
  145. *
  146. * The code up to this label is copied into trampolines so
  147. * think twice before adding any new code or changing the
  148. * layout here.
  149. */
  150. GLOBAL(ftrace_epilogue)
  151. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  152. GLOBAL(ftrace_graph_call)
  153. jmp ftrace_stub
  154. #endif
  155. /* This is weak to keep gas from relaxing the jumps */
  156. WEAK(ftrace_stub)
  157. retq
  158. END(ftrace_caller)
  159. ENTRY(ftrace_regs_caller)
  160. /* Save the current flags before any operations that can change them */
  161. pushfq
  162. /* added 8 bytes to save flags */
  163. save_mcount_regs 8
  164. /* save_mcount_regs fills in first two parameters */
  165. GLOBAL(ftrace_regs_caller_op_ptr)
  166. /* Load the ftrace_ops into the 3rd parameter */
  167. movq function_trace_op(%rip), %rdx
  168. /* Save the rest of pt_regs */
  169. movq %r15, R15(%rsp)
  170. movq %r14, R14(%rsp)
  171. movq %r13, R13(%rsp)
  172. movq %r12, R12(%rsp)
  173. movq %r11, R11(%rsp)
  174. movq %r10, R10(%rsp)
  175. movq %rbx, RBX(%rsp)
  176. /* Copy saved flags */
  177. movq MCOUNT_REG_SIZE(%rsp), %rcx
  178. movq %rcx, EFLAGS(%rsp)
  179. /* Kernel segments */
  180. movq $__KERNEL_DS, %rcx
  181. movq %rcx, SS(%rsp)
  182. movq $__KERNEL_CS, %rcx
  183. movq %rcx, CS(%rsp)
  184. /* Stack - skipping return address and flags */
  185. leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
  186. movq %rcx, RSP(%rsp)
  187. /* regs go into 4th parameter */
  188. leaq (%rsp), %rcx
  189. GLOBAL(ftrace_regs_call)
  190. call ftrace_stub
  191. /* Copy flags back to SS, to restore them */
  192. movq EFLAGS(%rsp), %rax
  193. movq %rax, MCOUNT_REG_SIZE(%rsp)
  194. /* Handlers can change the RIP */
  195. movq RIP(%rsp), %rax
  196. movq %rax, MCOUNT_REG_SIZE+8(%rsp)
  197. /* restore the rest of pt_regs */
  198. movq R15(%rsp), %r15
  199. movq R14(%rsp), %r14
  200. movq R13(%rsp), %r13
  201. movq R12(%rsp), %r12
  202. movq R10(%rsp), %r10
  203. movq RBX(%rsp), %rbx
  204. restore_mcount_regs
  205. /* Restore flags */
  206. popfq
  207. /*
  208. * As this jmp to ftrace_epilogue can be a short jump
  209. * it must not be copied into the trampoline.
  210. * The trampoline will add the code to jump
  211. * to the return.
  212. */
  213. GLOBAL(ftrace_regs_caller_end)
  214. jmp ftrace_epilogue
  215. END(ftrace_regs_caller)
  216. #else /* ! CONFIG_DYNAMIC_FTRACE */
  217. ENTRY(function_hook)
  218. cmpq $ftrace_stub, ftrace_trace_function
  219. jnz trace
  220. fgraph_trace:
  221. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  222. cmpq $ftrace_stub, ftrace_graph_return
  223. jnz ftrace_graph_caller
  224. cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
  225. jnz ftrace_graph_caller
  226. #endif
  227. GLOBAL(ftrace_stub)
  228. retq
  229. trace:
  230. /* save_mcount_regs fills in first two parameters */
  231. save_mcount_regs
  232. /*
  233. * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not
  234. * set (see include/asm/ftrace.h and include/linux/ftrace.h). Only the
  235. * ip and parent ip are used and the list function is called when
  236. * function tracing is enabled.
  237. */
  238. call *ftrace_trace_function
  239. restore_mcount_regs
  240. jmp fgraph_trace
  241. END(function_hook)
  242. #endif /* CONFIG_DYNAMIC_FTRACE */
  243. #ifdef CONFIG_FUNCTION_GRAPH_TRACER
  244. ENTRY(ftrace_graph_caller)
  245. /* Saves rbp into %rdx and fills first parameter */
  246. save_mcount_regs
  247. #ifdef CC_USING_FENTRY
  248. leaq MCOUNT_REG_SIZE+8(%rsp), %rsi
  249. movq $0, %rdx /* No framepointers needed */
  250. #else
  251. /* Save address of the return address of traced function */
  252. leaq 8(%rdx), %rsi
  253. /* ftrace does sanity checks against frame pointers */
  254. movq (%rdx), %rdx
  255. #endif
  256. call prepare_ftrace_return
  257. restore_mcount_regs
  258. retq
  259. END(ftrace_graph_caller)
  260. GLOBAL(return_to_handler)
  261. subq $24, %rsp
  262. /* Save the return values */
  263. movq %rax, (%rsp)
  264. movq %rdx, 8(%rsp)
  265. movq %rbp, %rdi
  266. call ftrace_return_to_handler
  267. movq %rax, %rdi
  268. movq 8(%rsp), %rdx
  269. movq (%rsp), %rax
  270. addq $24, %rsp
  271. jmp *%rdi
  272. #endif