|
@@ -0,0 +1,175 @@
|
|
|
+/*
|
|
|
+ * arch/arm64/kernel/entry-ftrace.S
|
|
|
+ *
|
|
|
+ * Copyright (C) 2013 Linaro Limited
|
|
|
+ * Author: AKASHI Takahiro <takahiro.akashi@linaro.org>
|
|
|
+ *
|
|
|
+ * This program is free software; you can redistribute it and/or modify
|
|
|
+ * it under the terms of the GNU General Public License version 2 as
|
|
|
+ * published by the Free Software Foundation.
|
|
|
+ */
|
|
|
+
|
|
|
+#include <linux/linkage.h>
|
|
|
+#include <asm/ftrace.h>
|
|
|
+#include <asm/insn.h>
|
|
|
+
|
|
|
+/*
|
|
|
+ * Gcc with -pg will put the following code in the beginning of each function:
|
|
|
+ * mov x0, x30
|
|
|
+ * bl _mcount
|
|
|
+ * [function's body ...]
|
|
|
+ * "bl _mcount" may be replaced to "bl ftrace_caller" or NOP if dynamic
|
|
|
+ * ftrace is enabled.
|
|
|
+ *
|
|
|
+ * Please note that x0 as an argument will not be used here because we can
|
|
|
+ * get lr(x30) of instrumented function at any time by winding up call stack
|
|
|
+ * as long as the kernel is compiled without -fomit-frame-pointer.
|
|
|
+ * (or CONFIG_FRAME_POINTER, this is forced on arm64)
|
|
|
+ *
|
|
|
+ * stack layout after mcount_enter in _mcount():
|
|
|
+ *
|
|
|
+ * current sp/fp => 0:+-----+
|
|
|
+ * in _mcount() | x29 | -> instrumented function's fp
|
|
|
+ * +-----+
|
|
|
+ * | x30 | -> _mcount()'s lr (= instrumented function's pc)
|
|
|
+ * old sp => +16:+-----+
|
|
|
+ * when instrumented | |
|
|
|
+ * function calls | ... |
|
|
|
+ * _mcount() | |
|
|
|
+ * | |
|
|
|
+ * instrumented => +xx:+-----+
|
|
|
+ * function's fp | x29 | -> parent's fp
|
|
|
+ * +-----+
|
|
|
+ * | x30 | -> instrumented function's lr (= parent's pc)
|
|
|
+ * +-----+
|
|
|
+ * | ... |
|
|
|
+ */
|
|
|
+
|
|
|
+ .macro mcount_enter
|
|
|
+ stp x29, x30, [sp, #-16]!
|
|
|
+ mov x29, sp
|
|
|
+ .endm
|
|
|
+
|
|
|
+ .macro mcount_exit
|
|
|
+ ldp x29, x30, [sp], #16
|
|
|
+ ret
|
|
|
+ .endm
|
|
|
+
|
|
|
+ .macro mcount_adjust_addr rd, rn
|
|
|
+ sub \rd, \rn, #AARCH64_INSN_SIZE
|
|
|
+ .endm
|
|
|
+
|
|
|
+ /* for instrumented function's parent */
|
|
|
+ .macro mcount_get_parent_fp reg
|
|
|
+ ldr \reg, [x29]
|
|
|
+ ldr \reg, [\reg]
|
|
|
+ .endm
|
|
|
+
|
|
|
+ /* for instrumented function */
|
|
|
+ .macro mcount_get_pc0 reg
|
|
|
+ mcount_adjust_addr \reg, x30
|
|
|
+ .endm
|
|
|
+
|
|
|
+ .macro mcount_get_pc reg
|
|
|
+ ldr \reg, [x29, #8]
|
|
|
+ mcount_adjust_addr \reg, \reg
|
|
|
+ .endm
|
|
|
+
|
|
|
+ .macro mcount_get_lr reg
|
|
|
+ ldr \reg, [x29]
|
|
|
+ ldr \reg, [\reg, #8]
|
|
|
+ mcount_adjust_addr \reg, \reg
|
|
|
+ .endm
|
|
|
+
|
|
|
+ .macro mcount_get_lr_addr reg
|
|
|
+ ldr \reg, [x29]
|
|
|
+ add \reg, \reg, #8
|
|
|
+ .endm
|
|
|
+
|
|
|
+/*
|
|
|
+ * void _mcount(unsigned long return_address)
|
|
|
+ * @return_address: return address to instrumented function
|
|
|
+ *
|
|
|
+ * This function makes calls, if enabled, to:
|
|
|
+ * - tracer function to probe instrumented function's entry,
|
|
|
+ * - ftrace_graph_caller to set up an exit hook
|
|
|
+ */
|
|
|
+ENTRY(_mcount)
|
|
|
+#ifdef CONFIG_HAVE_FUNCTION_TRACE_MCOUNT_TEST
|
|
|
+ ldr x0, =ftrace_trace_stop
|
|
|
+ ldr x0, [x0] // if ftrace_trace_stop
|
|
|
+ ret // return;
|
|
|
+#endif
|
|
|
+ mcount_enter
|
|
|
+
|
|
|
+ ldr x0, =ftrace_trace_function
|
|
|
+ ldr x2, [x0]
|
|
|
+ adr x0, ftrace_stub
|
|
|
+ cmp x0, x2 // if (ftrace_trace_function
|
|
|
+ b.eq skip_ftrace_call // != ftrace_stub) {
|
|
|
+
|
|
|
+ mcount_get_pc x0 // function's pc
|
|
|
+ mcount_get_lr x1 // function's lr (= parent's pc)
|
|
|
+ blr x2 // (*ftrace_trace_function)(pc, lr);
|
|
|
+
|
|
|
+#ifndef CONFIG_FUNCTION_GRAPH_TRACER
|
|
|
+skip_ftrace_call: // return;
|
|
|
+ mcount_exit // }
|
|
|
+#else
|
|
|
+ mcount_exit // return;
|
|
|
+ // }
|
|
|
+skip_ftrace_call:
|
|
|
+ ldr x1, =ftrace_graph_return
|
|
|
+ ldr x2, [x1] // if ((ftrace_graph_return
|
|
|
+ cmp x0, x2 // != ftrace_stub)
|
|
|
+ b.ne ftrace_graph_caller
|
|
|
+
|
|
|
+ ldr x1, =ftrace_graph_entry // || (ftrace_graph_entry
|
|
|
+ ldr x2, [x1] // != ftrace_graph_entry_stub))
|
|
|
+ ldr x0, =ftrace_graph_entry_stub
|
|
|
+ cmp x0, x2
|
|
|
+ b.ne ftrace_graph_caller // ftrace_graph_caller();
|
|
|
+
|
|
|
+ mcount_exit
|
|
|
+#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
|
|
|
+ENDPROC(_mcount)
|
|
|
+
|
|
|
+ENTRY(ftrace_stub)
|
|
|
+ ret
|
|
|
+ENDPROC(ftrace_stub)
|
|
|
+
|
|
|
+#ifdef CONFIG_FUNCTION_GRAPH_TRACER
|
|
|
+/*
|
|
|
+ * void ftrace_graph_caller(void)
|
|
|
+ *
|
|
|
+ * Called from _mcount() or ftrace_caller() when function_graph tracer is
|
|
|
+ * selected.
|
|
|
+ * This function w/ prepare_ftrace_return() fakes link register's value on
|
|
|
+ * the call stack in order to intercept instrumented function's return path
|
|
|
+ * and run return_to_handler() later on its exit.
|
|
|
+ */
|
|
|
+ENTRY(ftrace_graph_caller)
|
|
|
+ mcount_get_lr_addr x0 // pointer to function's saved lr
|
|
|
+ mcount_get_pc x1 // function's pc
|
|
|
+ mcount_get_parent_fp x2 // parent's fp
|
|
|
+ bl prepare_ftrace_return // prepare_ftrace_return(&lr, pc, fp)
|
|
|
+
|
|
|
+ mcount_exit
|
|
|
+ENDPROC(ftrace_graph_caller)
|
|
|
+
|
|
|
+/*
|
|
|
+ * void return_to_handler(void)
|
|
|
+ *
|
|
|
+ * Run ftrace_return_to_handler() before going back to parent.
|
|
|
+ * @fp is checked against the value passed by ftrace_graph_caller()
|
|
|
+ * only when CONFIG_FUNCTION_GRAPH_FP_TEST is enabled.
|
|
|
+ */
|
|
|
+ENTRY(return_to_handler)
|
|
|
+ str x0, [sp, #-16]!
|
|
|
+ mov x0, x29 // parent's fp
|
|
|
+ bl ftrace_return_to_handler// addr = ftrace_return_to_hander(fp);
|
|
|
+ mov x30, x0 // restore the original return address
|
|
|
+ ldr x0, [sp], #16
|
|
|
+ ret
|
|
|
+END(return_to_handler)
|
|
|
+#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
|