|
@@ -512,12 +512,44 @@ enum {
|
|
* can only be modified by current, we can reuse trace_recursion.
|
|
* can only be modified by current, we can reuse trace_recursion.
|
|
*/
|
|
*/
|
|
TRACE_IRQ_BIT,
|
|
TRACE_IRQ_BIT,
|
|
|
|
+
|
|
|
|
+ /* Set if the function is in the set_graph_function file */
|
|
|
|
+ TRACE_GRAPH_BIT,
|
|
|
|
+
|
|
|
|
+ /*
|
|
|
|
+ * In the very unlikely case that an interrupt came in
|
|
|
|
+ * at a start of graph tracing, and we want to trace
|
|
|
|
+ * the function in that interrupt, the depth can be greater
|
|
|
|
+ * than zero, because of the preempted start of a previous
|
|
|
|
+ * trace. In an even more unlikely case, depth could be 2
|
|
|
|
+ * if a softirq interrupted the start of graph tracing,
|
|
|
|
+ * followed by an interrupt preempting a start of graph
|
|
|
|
+ * tracing in the softirq, and depth can even be 3
|
|
|
|
+ * if an NMI came in at the start of an interrupt function
|
|
|
|
+ * that preempted a softirq start of a function that
|
|
|
|
+ * preempted normal context!!!! Luckily, it can't be
|
|
|
|
+ * greater than 3, so the next two bits are a mask
|
|
|
|
+ * of what the depth is when we set TRACE_GRAPH_BIT
|
|
|
|
+ */
|
|
|
|
+
|
|
|
|
+ TRACE_GRAPH_DEPTH_START_BIT,
|
|
|
|
+ TRACE_GRAPH_DEPTH_END_BIT,
|
|
};
|
|
};
|
|
|
|
|
|
#define trace_recursion_set(bit) do { (current)->trace_recursion |= (1<<(bit)); } while (0)
|
|
#define trace_recursion_set(bit) do { (current)->trace_recursion |= (1<<(bit)); } while (0)
|
|
#define trace_recursion_clear(bit) do { (current)->trace_recursion &= ~(1<<(bit)); } while (0)
|
|
#define trace_recursion_clear(bit) do { (current)->trace_recursion &= ~(1<<(bit)); } while (0)
|
|
#define trace_recursion_test(bit) ((current)->trace_recursion & (1<<(bit)))
|
|
#define trace_recursion_test(bit) ((current)->trace_recursion & (1<<(bit)))
|
|
|
|
|
|
|
|
+#define trace_recursion_depth() \
|
|
|
|
+ (((current)->trace_recursion >> TRACE_GRAPH_DEPTH_START_BIT) & 3)
|
|
|
|
+#define trace_recursion_set_depth(depth) \
|
|
|
|
+ do { \
|
|
|
|
+ current->trace_recursion &= \
|
|
|
|
+ ~(3 << TRACE_GRAPH_DEPTH_START_BIT); \
|
|
|
|
+ current->trace_recursion |= \
|
|
|
|
+ ((depth) & 3) << TRACE_GRAPH_DEPTH_START_BIT; \
|
|
|
|
+ } while (0)
|
|
|
|
+
|
|
#define TRACE_CONTEXT_BITS 4
|
|
#define TRACE_CONTEXT_BITS 4
|
|
|
|
|
|
#define TRACE_FTRACE_START TRACE_FTRACE_BIT
|
|
#define TRACE_FTRACE_START TRACE_FTRACE_BIT
|
|
@@ -843,8 +875,9 @@ extern void __trace_graph_return(struct trace_array *tr,
|
|
extern struct ftrace_hash *ftrace_graph_hash;
|
|
extern struct ftrace_hash *ftrace_graph_hash;
|
|
extern struct ftrace_hash *ftrace_graph_notrace_hash;
|
|
extern struct ftrace_hash *ftrace_graph_notrace_hash;
|
|
|
|
|
|
-static inline int ftrace_graph_addr(unsigned long addr)
|
|
|
|
|
|
+static inline int ftrace_graph_addr(struct ftrace_graph_ent *trace)
|
|
{
|
|
{
|
|
|
|
+ unsigned long addr = trace->func;
|
|
int ret = 0;
|
|
int ret = 0;
|
|
|
|
|
|
preempt_disable_notrace();
|
|
preempt_disable_notrace();
|
|
@@ -855,6 +888,14 @@ static inline int ftrace_graph_addr(unsigned long addr)
|
|
}
|
|
}
|
|
|
|
|
|
if (ftrace_lookup_ip(ftrace_graph_hash, addr)) {
|
|
if (ftrace_lookup_ip(ftrace_graph_hash, addr)) {
|
|
|
|
+
|
|
|
|
+ /*
|
|
|
|
+ * This needs to be cleared on the return functions
|
|
|
|
+ * when the depth is zero.
|
|
|
|
+ */
|
|
|
|
+ trace_recursion_set(TRACE_GRAPH_BIT);
|
|
|
|
+ trace_recursion_set_depth(trace->depth);
|
|
|
|
+
|
|
/*
|
|
/*
|
|
* If no irqs are to be traced, but a set_graph_function
|
|
* If no irqs are to be traced, but a set_graph_function
|
|
* is set, and called by an interrupt handler, we still
|
|
* is set, and called by an interrupt handler, we still
|
|
@@ -872,6 +913,13 @@ out:
|
|
return ret;
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static inline void ftrace_graph_addr_finish(struct ftrace_graph_ret *trace)
|
|
|
|
+{
|
|
|
|
+ if (trace_recursion_test(TRACE_GRAPH_BIT) &&
|
|
|
|
+ trace->depth == trace_recursion_depth())
|
|
|
|
+ trace_recursion_clear(TRACE_GRAPH_BIT);
|
|
|
|
+}
|
|
|
|
+
|
|
static inline int ftrace_graph_notrace_addr(unsigned long addr)
|
|
static inline int ftrace_graph_notrace_addr(unsigned long addr)
|
|
{
|
|
{
|
|
int ret = 0;
|
|
int ret = 0;
|
|
@@ -885,7 +933,7 @@ static inline int ftrace_graph_notrace_addr(unsigned long addr)
|
|
return ret;
|
|
return ret;
|
|
}
|
|
}
|
|
#else
|
|
#else
|
|
-static inline int ftrace_graph_addr(unsigned long addr)
|
|
|
|
|
|
+static inline int ftrace_graph_addr(struct ftrace_graph_ent *trace)
|
|
{
|
|
{
|
|
return 1;
|
|
return 1;
|
|
}
|
|
}
|
|
@@ -894,6 +942,8 @@ static inline int ftrace_graph_notrace_addr(unsigned long addr)
|
|
{
|
|
{
|
|
return 0;
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
+static inline void ftrace_graph_addr_finish(struct ftrace_graph_ret *trace)
|
|
|
|
+{ }
|
|
#endif /* CONFIG_DYNAMIC_FTRACE */
|
|
#endif /* CONFIG_DYNAMIC_FTRACE */
|
|
|
|
|
|
extern unsigned int fgraph_max_depth;
|
|
extern unsigned int fgraph_max_depth;
|
|
@@ -901,7 +951,8 @@ extern unsigned int fgraph_max_depth;
|
|
static inline bool ftrace_graph_ignore_func(struct ftrace_graph_ent *trace)
|
|
static inline bool ftrace_graph_ignore_func(struct ftrace_graph_ent *trace)
|
|
{
|
|
{
|
|
/* trace it when it is-nested-in or is a function enabled. */
|
|
/* trace it when it is-nested-in or is a function enabled. */
|
|
- return !(trace->depth || ftrace_graph_addr(trace->func)) ||
|
|
|
|
|
|
+ return !(trace_recursion_test(TRACE_GRAPH_BIT) ||
|
|
|
|
+ ftrace_graph_addr(trace)) ||
|
|
(trace->depth < 0) ||
|
|
(trace->depth < 0) ||
|
|
(fgraph_max_depth && trace->depth >= fgraph_max_depth);
|
|
(fgraph_max_depth && trace->depth >= fgraph_max_depth);
|
|
}
|
|
}
|