blob: 9f1ac258482fd7e98a55472ec3f3bcb72b2529b3 [file] [log] [blame]
Thomas Gleixner1802d0b2019-05-27 08:55:21 +02001// SPDX-License-Identifier: GPL-2.0-only
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -07002/*
3 * Copyright (C) 2008 ARM Limited
4 * Copyright (C) 2014 Regents of the University of California
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -07005 */
6
7#include <linux/export.h>
8#include <linux/kallsyms.h>
9#include <linux/sched.h>
10#include <linux/sched/debug.h>
11#include <linux/sched/task_stack.h>
12#include <linux/stacktrace.h>
Alan Kaob785ec12018-02-13 13:13:21 +080013#include <linux/ftrace.h>
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070014
Guenter Roeckaf2bdf82020-04-13 09:12:34 -070015register unsigned long sp_in_global __asm__("sp");
16
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070017#ifdef CONFIG_FRAME_POINTER
18
19struct stackframe {
20 unsigned long fp;
21 unsigned long ra;
22};
23
Mao Handbeb90b2019-08-29 14:57:00 +080024void notrace walk_stackframe(struct task_struct *task, struct pt_regs *regs,
25 bool (*fn)(unsigned long, void *), void *arg)
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070026{
27 unsigned long fp, sp, pc;
28
29 if (regs) {
Christoph Hellwig6ab77af2019-04-15 11:14:40 +020030 fp = frame_pointer(regs);
31 sp = user_stack_pointer(regs);
32 pc = instruction_pointer(regs);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070033 } else if (task == NULL || task == current) {
Palmer Dabbelt52e7c522020-02-27 11:07:28 -080034 const register unsigned long current_sp = sp_in_global;
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070035 fp = (unsigned long)__builtin_frame_address(0);
36 sp = current_sp;
37 pc = (unsigned long)walk_stackframe;
38 } else {
39 /* task blocked in __switch_to */
40 fp = task->thread.s[0];
41 sp = task->thread.sp;
42 pc = task->thread.ra;
43 }
44
45 for (;;) {
46 unsigned long low, high;
47 struct stackframe *frame;
48
49 if (unlikely(!__kernel_text_address(pc) || fn(pc, arg)))
50 break;
51
52 /* Validate frame pointer */
53 low = sp + sizeof(struct stackframe);
54 high = ALIGN(sp, THREAD_SIZE);
55 if (unlikely(fp < low || fp > high || fp & 0x7))
56 break;
57 /* Unwind stack frame */
58 frame = (struct stackframe *)fp - 1;
59 sp = fp;
60 fp = frame->fp;
Alan Kaob785ec12018-02-13 13:13:21 +080061 pc = ftrace_graph_ret_addr(current, NULL, frame->ra,
62 (unsigned long *)(fp - 8));
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070063 }
64}
65
66#else /* !CONFIG_FRAME_POINTER */
67
Kefeng Wang0502bee2020-05-11 10:19:53 +080068void notrace walk_stackframe(struct task_struct *task,
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070069 struct pt_regs *regs, bool (*fn)(unsigned long, void *), void *arg)
70{
71 unsigned long sp, pc;
72 unsigned long *ksp;
73
74 if (regs) {
Christoph Hellwig6ab77af2019-04-15 11:14:40 +020075 sp = user_stack_pointer(regs);
76 pc = instruction_pointer(regs);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070077 } else if (task == NULL || task == current) {
Palmer Dabbelt52e7c522020-02-27 11:07:28 -080078 sp = sp_in_global;
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070079 pc = (unsigned long)walk_stackframe;
80 } else {
81 /* task blocked in __switch_to */
82 sp = task->thread.sp;
83 pc = task->thread.ra;
84 }
85
86 if (unlikely(sp & 0x7))
87 return;
88
89 ksp = (unsigned long *)sp;
90 while (!kstack_end(ksp)) {
91 if (__kernel_text_address(pc) && unlikely(fn(pc, arg)))
92 break;
93 pc = (*ksp++) - 0x4;
94 }
95}
96
97#endif /* CONFIG_FRAME_POINTER */
98
99
100static bool print_trace_address(unsigned long pc, void *arg)
101{
Dmitry Safonov2062a4e2020-06-08 21:29:56 -0700102 print_ip_sym(KERN_DEFAULT, pc);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700103 return false;
104}
105
106void show_stack(struct task_struct *task, unsigned long *sp)
107{
108 pr_cont("Call Trace:\n");
109 walk_stackframe(task, NULL, print_trace_address, NULL);
110}
111
112
113static bool save_wchan(unsigned long pc, void *arg)
114{
115 if (!in_sched_functions(pc)) {
116 unsigned long *p = arg;
117 *p = pc;
118 return true;
119 }
120 return false;
121}
122
123unsigned long get_wchan(struct task_struct *task)
124{
125 unsigned long pc = 0;
126
127 if (likely(task && task != current && task->state != TASK_RUNNING))
128 walk_stackframe(task, NULL, save_wchan, &pc);
129 return pc;
130}
131
132
133#ifdef CONFIG_STACKTRACE
134
135static bool __save_trace(unsigned long pc, void *arg, bool nosched)
136{
137 struct stack_trace *trace = arg;
138
139 if (unlikely(nosched && in_sched_functions(pc)))
140 return false;
141 if (unlikely(trace->skip > 0)) {
142 trace->skip--;
143 return false;
144 }
145
146 trace->entries[trace->nr_entries++] = pc;
147 return (trace->nr_entries >= trace->max_entries);
148}
149
150static bool save_trace(unsigned long pc, void *arg)
151{
152 return __save_trace(pc, arg, false);
153}
154
155/*
156 * Save stack-backtrace addresses into a stack_trace buffer.
157 */
158void save_stack_trace_tsk(struct task_struct *tsk, struct stack_trace *trace)
159{
160 walk_stackframe(tsk, NULL, save_trace, trace);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700161}
162EXPORT_SYMBOL_GPL(save_stack_trace_tsk);
163
164void save_stack_trace(struct stack_trace *trace)
165{
166 save_stack_trace_tsk(NULL, trace);
167}
168EXPORT_SYMBOL_GPL(save_stack_trace);
169
170#endif /* CONFIG_STACKTRACE */