blob: 2b3e0cb90d789bec4bd0b84acadc0d91b4cb4cd2 [file] [log] [blame]
Thomas Gleixner1802d0b2019-05-27 08:55:21 +02001// SPDX-License-Identifier: GPL-2.0-only
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -07002/*
3 * Copyright (C) 2008 ARM Limited
4 * Copyright (C) 2014 Regents of the University of California
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -07005 */
6
7#include <linux/export.h>
8#include <linux/kallsyms.h>
9#include <linux/sched.h>
10#include <linux/sched/debug.h>
11#include <linux/sched/task_stack.h>
12#include <linux/stacktrace.h>
Alan Kaob785ec12018-02-13 13:13:21 +080013#include <linux/ftrace.h>
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070014
Kefeng Wang99c168f2020-11-13 14:42:21 +080015#include <asm/stacktrace.h>
16
Kefeng Wang23c10752021-03-17 23:08:38 +080017register unsigned long sp_in_global __asm__("sp");
Guenter Roeckaf2bdf82020-04-13 09:12:34 -070018
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070019#ifdef CONFIG_FRAME_POINTER
20
Mao Handbeb90b2019-08-29 14:57:00 +080021void notrace walk_stackframe(struct task_struct *task, struct pt_regs *regs,
Kefeng Wang5cb00802020-11-13 14:42:23 +080022 bool (*fn)(void *, unsigned long), void *arg)
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070023{
24 unsigned long fp, sp, pc;
25
26 if (regs) {
Christoph Hellwig6ab77af2019-04-15 11:14:40 +020027 fp = frame_pointer(regs);
28 sp = user_stack_pointer(regs);
29 pc = instruction_pointer(regs);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070030 } else if (task == NULL || task == current) {
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070031 fp = (unsigned long)__builtin_frame_address(0);
Kefeng Wangdec82272021-01-14 10:46:57 +080032 sp = sp_in_global;
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070033 pc = (unsigned long)walk_stackframe;
34 } else {
35 /* task blocked in __switch_to */
36 fp = task->thread.s[0];
37 sp = task->thread.sp;
38 pc = task->thread.ra;
39 }
40
41 for (;;) {
42 unsigned long low, high;
43 struct stackframe *frame;
44
Kefeng Wang5cb00802020-11-13 14:42:23 +080045 if (unlikely(!__kernel_text_address(pc) || !fn(arg, pc)))
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070046 break;
47
48 /* Validate frame pointer */
49 low = sp + sizeof(struct stackframe);
50 high = ALIGN(sp, THREAD_SIZE);
51 if (unlikely(fp < low || fp > high || fp & 0x7))
52 break;
53 /* Unwind stack frame */
54 frame = (struct stackframe *)fp - 1;
55 sp = fp;
Chen Huangf766f772021-01-11 20:40:14 +080056 if (regs && (regs->epc == pc) && (frame->fp & 0x7)) {
57 fp = frame->ra;
58 pc = regs->ra;
59 } else {
60 fp = frame->fp;
61 pc = ftrace_graph_ret_addr(current, NULL, frame->ra,
62 (unsigned long *)(fp - 8));
63 }
64
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070065 }
66}
67
68#else /* !CONFIG_FRAME_POINTER */
69
Kefeng Wang0502bee2020-05-11 10:19:53 +080070void notrace walk_stackframe(struct task_struct *task,
Kefeng Wang9dd97062020-11-13 14:42:22 +080071 struct pt_regs *regs, bool (*fn)(void *, unsigned long), void *arg)
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070072{
73 unsigned long sp, pc;
74 unsigned long *ksp;
75
76 if (regs) {
Christoph Hellwig6ab77af2019-04-15 11:14:40 +020077 sp = user_stack_pointer(regs);
78 pc = instruction_pointer(regs);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070079 } else if (task == NULL || task == current) {
Palmer Dabbelt52e7c522020-02-27 11:07:28 -080080 sp = sp_in_global;
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070081 pc = (unsigned long)walk_stackframe;
82 } else {
83 /* task blocked in __switch_to */
84 sp = task->thread.sp;
85 pc = task->thread.ra;
86 }
87
88 if (unlikely(sp & 0x7))
89 return;
90
91 ksp = (unsigned long *)sp;
92 while (!kstack_end(ksp)) {
Kefeng Wang5cb00802020-11-13 14:42:23 +080093 if (__kernel_text_address(pc) && unlikely(!fn(arg, pc)))
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -070094 break;
95 pc = (*ksp++) - 0x4;
96 }
97}
98
99#endif /* CONFIG_FRAME_POINTER */
100
Kefeng Wang5cb00802020-11-13 14:42:23 +0800101static bool print_trace_address(void *arg, unsigned long pc)
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700102{
Dmitry Safonov0b3d4362020-06-08 21:31:17 -0700103 const char *loglvl = arg;
104
105 print_ip_sym(loglvl, pc);
Kefeng Wang5cb00802020-11-13 14:42:23 +0800106 return true;
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700107}
108
Kefeng Wang091b9452021-01-11 20:40:12 +0800109void dump_backtrace(struct pt_regs *regs, struct task_struct *task,
110 const char *loglvl)
111{
112 pr_cont("%sCall Trace:\n", loglvl);
113 walk_stackframe(task, regs, print_trace_address, (void *)loglvl);
114}
115
Dmitry Safonov9cb8f062020-06-08 21:32:29 -0700116void show_stack(struct task_struct *task, unsigned long *sp, const char *loglvl)
Dmitry Safonov0b3d4362020-06-08 21:31:17 -0700117{
Kefeng Wang091b9452021-01-11 20:40:12 +0800118 dump_backtrace(NULL, task, loglvl);
Dmitry Safonov0b3d4362020-06-08 21:31:17 -0700119}
120
Kefeng Wang9dd97062020-11-13 14:42:22 +0800121static bool save_wchan(void *arg, unsigned long pc)
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700122{
123 if (!in_sched_functions(pc)) {
124 unsigned long *p = arg;
125 *p = pc;
Kefeng Wang5cb00802020-11-13 14:42:23 +0800126 return false;
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700127 }
Kefeng Wang5cb00802020-11-13 14:42:23 +0800128 return true;
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700129}
130
131unsigned long get_wchan(struct task_struct *task)
132{
133 unsigned long pc = 0;
134
135 if (likely(task && task != current && task->state != TASK_RUNNING))
136 walk_stackframe(task, NULL, save_wchan, &pc);
137 return pc;
138}
139
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700140#ifdef CONFIG_STACKTRACE
141
Kefeng Wang5cb00802020-11-13 14:42:23 +0800142void arch_stack_walk(stack_trace_consume_fn consume_entry, void *cookie,
143 struct task_struct *task, struct pt_regs *regs)
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700144{
Kefeng Wang5cb00802020-11-13 14:42:23 +0800145 walk_stackframe(task, regs, consume_entry, cookie);
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700146}
147
Palmer Dabbelt5d8544e2017-07-10 18:03:19 -0700148#endif /* CONFIG_STACKTRACE */