blob: aaa64bf007f8202b24bbfc8ccc6025f14d24dffb [file] [log] [blame]
Thomas Gleixner1802d0b2019-05-27 08:55:21 +02001// SPDX-License-Identifier: GPL-2.0-only
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -07002/*
3 * Copyright (C) 2008 ARM Limited
4 * Copyright (C) 2014 Regents of the University of California
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -07005 */
6
7#include <linux/export.h>
8#include <linux/kallsyms.h>
9#include <linux/sched.h>
10#include <linux/sched/debug.h>
11#include <linux/sched/task_stack.h>
12#include <linux/stacktrace.h>
Alan Kaob785ec12018-02-13 13:13:21 +080013#include <linux/ftrace.h>
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070014
Guenter Roeckaf2bdf82020-04-13 09:12:34 -070015register unsigned long sp_in_global __asm__("sp");
16
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070017#ifdef CONFIG_FRAME_POINTER
18
19struct stackframe {
20 unsigned long fp;
21 unsigned long ra;
22};
23
Mao Handbeb90b2019-08-29 14:57:00 +080024void notrace walk_stackframe(struct task_struct *task, struct pt_regs *regs,
25 bool (*fn)(unsigned long, void *), void *arg)
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070026{
27 unsigned long fp, sp, pc;
28
29 if (regs) {
Christoph Hellwig6ab77af2019-04-15 11:14:40 +020030 fp = frame_pointer(regs);
31 sp = user_stack_pointer(regs);
32 pc = instruction_pointer(regs);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070033 } else if (task == NULL || task == current) {
Palmer Dabbelt52e7c522020-02-27 11:07:28 -080034 const register unsigned long current_sp = sp_in_global;
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070035 fp = (unsigned long)__builtin_frame_address(0);
36 sp = current_sp;
37 pc = (unsigned long)walk_stackframe;
38 } else {
39 /* task blocked in __switch_to */
40 fp = task->thread.s[0];
41 sp = task->thread.sp;
42 pc = task->thread.ra;
43 }
44
45 for (;;) {
46 unsigned long low, high;
47 struct stackframe *frame;
48
49 if (unlikely(!__kernel_text_address(pc) || fn(pc, arg)))
50 break;
51
52 /* Validate frame pointer */
53 low = sp + sizeof(struct stackframe);
54 high = ALIGN(sp, THREAD_SIZE);
55 if (unlikely(fp < low || fp > high || fp & 0x7))
56 break;
57 /* Unwind stack frame */
58 frame = (struct stackframe *)fp - 1;
59 sp = fp;
60 fp = frame->fp;
Alan Kaob785ec12018-02-13 13:13:21 +080061 pc = ftrace_graph_ret_addr(current, NULL, frame->ra,
62 (unsigned long *)(fp - 8));
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070063 }
64}
65
66#else /* !CONFIG_FRAME_POINTER */
67
Kefeng Wang0502bee2020-05-11 10:19:53 +080068void notrace walk_stackframe(struct task_struct *task,
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070069 struct pt_regs *regs, bool (*fn)(unsigned long, void *), void *arg)
70{
71 unsigned long sp, pc;
72 unsigned long *ksp;
73
74 if (regs) {
Christoph Hellwig6ab77af2019-04-15 11:14:40 +020075 sp = user_stack_pointer(regs);
76 pc = instruction_pointer(regs);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070077 } else if (task == NULL || task == current) {
Palmer Dabbelt52e7c522020-02-27 11:07:28 -080078 sp = sp_in_global;
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070079 pc = (unsigned long)walk_stackframe;
80 } else {
81 /* task blocked in __switch_to */
82 sp = task->thread.sp;
83 pc = task->thread.ra;
84 }
85
86 if (unlikely(sp & 0x7))
87 return;
88
89 ksp = (unsigned long *)sp;
90 while (!kstack_end(ksp)) {
91 if (__kernel_text_address(pc) && unlikely(fn(pc, arg)))
92 break;
93 pc = (*ksp++) - 0x4;
94 }
95}
96
97#endif /* CONFIG_FRAME_POINTER */
98
99
100static bool print_trace_address(unsigned long pc, void *arg)
101{
Dmitry Safonov0b3d4362020-06-08 21:31:17 -0700102 const char *loglvl = arg;
103
104 print_ip_sym(loglvl, pc);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700105 return false;
106}
107
Dmitry Safonov0b3d4362020-06-08 21:31:17 -0700108void show_stack_loglvl(struct task_struct *task, unsigned long *sp,
109 const char *loglvl)
110{
111 pr_cont("Call Trace:\n");
112 walk_stackframe(task, NULL, print_trace_address, (void *)loglvl);
113}
114
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700115void show_stack(struct task_struct *task, unsigned long *sp)
116{
Dmitry Safonov0b3d4362020-06-08 21:31:17 -0700117 show_stack_loglvl(task, sp, KERN_DEFAULT);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700118}
119
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700120static bool save_wchan(unsigned long pc, void *arg)
121{
122 if (!in_sched_functions(pc)) {
123 unsigned long *p = arg;
124 *p = pc;
125 return true;
126 }
127 return false;
128}
129
130unsigned long get_wchan(struct task_struct *task)
131{
132 unsigned long pc = 0;
133
134 if (likely(task && task != current && task->state != TASK_RUNNING))
135 walk_stackframe(task, NULL, save_wchan, &pc);
136 return pc;
137}
138
139
140#ifdef CONFIG_STACKTRACE
141
142static bool __save_trace(unsigned long pc, void *arg, bool nosched)
143{
144 struct stack_trace *trace = arg;
145
146 if (unlikely(nosched && in_sched_functions(pc)))
147 return false;
148 if (unlikely(trace->skip > 0)) {
149 trace->skip--;
150 return false;
151 }
152
153 trace->entries[trace->nr_entries++] = pc;
154 return (trace->nr_entries >= trace->max_entries);
155}
156
157static bool save_trace(unsigned long pc, void *arg)
158{
159 return __save_trace(pc, arg, false);
160}
161
162/*
163 * Save stack-backtrace addresses into a stack_trace buffer.
164 */
165void save_stack_trace_tsk(struct task_struct *tsk, struct stack_trace *trace)
166{
167 walk_stackframe(tsk, NULL, save_trace, trace);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700168}
169EXPORT_SYMBOL_GPL(save_stack_trace_tsk);
170
171void save_stack_trace(struct stack_trace *trace)
172{
173 save_stack_trace_tsk(NULL, trace);
174}
175EXPORT_SYMBOL_GPL(save_stack_trace);
176
177#endif /* CONFIG_STACKTRACE */