Thomas Gleixner | 1802d0b | 2019-05-27 08:55:21 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-only |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2008 ARM Limited |
| 4 | * Copyright (C) 2014 Regents of the University of California |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #include <linux/export.h> |
| 8 | #include <linux/kallsyms.h> |
| 9 | #include <linux/sched.h> |
| 10 | #include <linux/sched/debug.h> |
| 11 | #include <linux/sched/task_stack.h> |
| 12 | #include <linux/stacktrace.h> |
Alan Kao | b785ec1 | 2018-02-13 13:13:21 +0800 | [diff] [blame] | 13 | #include <linux/ftrace.h> |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 14 | |
Guenter Roeck | af2bdf8 | 2020-04-13 09:12:34 -0700 | [diff] [blame] | 15 | register unsigned long sp_in_global __asm__("sp"); |
| 16 | |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 17 | #ifdef CONFIG_FRAME_POINTER |
| 18 | |
| 19 | struct stackframe { |
| 20 | unsigned long fp; |
| 21 | unsigned long ra; |
| 22 | }; |
| 23 | |
Mao Han | dbeb90b | 2019-08-29 14:57:00 +0800 | [diff] [blame] | 24 | void notrace walk_stackframe(struct task_struct *task, struct pt_regs *regs, |
| 25 | bool (*fn)(unsigned long, void *), void *arg) |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 26 | { |
| 27 | unsigned long fp, sp, pc; |
| 28 | |
| 29 | if (regs) { |
Christoph Hellwig | 6ab77af | 2019-04-15 11:14:40 +0200 | [diff] [blame] | 30 | fp = frame_pointer(regs); |
| 31 | sp = user_stack_pointer(regs); |
| 32 | pc = instruction_pointer(regs); |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 33 | } else if (task == NULL || task == current) { |
Palmer Dabbelt | 52e7c52 | 2020-02-27 11:07:28 -0800 | [diff] [blame] | 34 | const register unsigned long current_sp = sp_in_global; |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 35 | fp = (unsigned long)__builtin_frame_address(0); |
| 36 | sp = current_sp; |
| 37 | pc = (unsigned long)walk_stackframe; |
| 38 | } else { |
| 39 | /* task blocked in __switch_to */ |
| 40 | fp = task->thread.s[0]; |
| 41 | sp = task->thread.sp; |
| 42 | pc = task->thread.ra; |
| 43 | } |
| 44 | |
| 45 | for (;;) { |
| 46 | unsigned long low, high; |
| 47 | struct stackframe *frame; |
| 48 | |
| 49 | if (unlikely(!__kernel_text_address(pc) || fn(pc, arg))) |
| 50 | break; |
| 51 | |
| 52 | /* Validate frame pointer */ |
| 53 | low = sp + sizeof(struct stackframe); |
| 54 | high = ALIGN(sp, THREAD_SIZE); |
| 55 | if (unlikely(fp < low || fp > high || fp & 0x7)) |
| 56 | break; |
| 57 | /* Unwind stack frame */ |
| 58 | frame = (struct stackframe *)fp - 1; |
| 59 | sp = fp; |
| 60 | fp = frame->fp; |
Alan Kao | b785ec1 | 2018-02-13 13:13:21 +0800 | [diff] [blame] | 61 | pc = ftrace_graph_ret_addr(current, NULL, frame->ra, |
| 62 | (unsigned long *)(fp - 8)); |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 63 | } |
| 64 | } |
| 65 | |
| 66 | #else /* !CONFIG_FRAME_POINTER */ |
| 67 | |
Kefeng Wang | 0502bee | 2020-05-11 10:19:53 +0800 | [diff] [blame] | 68 | void notrace walk_stackframe(struct task_struct *task, |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 69 | struct pt_regs *regs, bool (*fn)(unsigned long, void *), void *arg) |
| 70 | { |
| 71 | unsigned long sp, pc; |
| 72 | unsigned long *ksp; |
| 73 | |
| 74 | if (regs) { |
Christoph Hellwig | 6ab77af | 2019-04-15 11:14:40 +0200 | [diff] [blame] | 75 | sp = user_stack_pointer(regs); |
| 76 | pc = instruction_pointer(regs); |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 77 | } else if (task == NULL || task == current) { |
Palmer Dabbelt | 52e7c52 | 2020-02-27 11:07:28 -0800 | [diff] [blame] | 78 | sp = sp_in_global; |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 79 | pc = (unsigned long)walk_stackframe; |
| 80 | } else { |
| 81 | /* task blocked in __switch_to */ |
| 82 | sp = task->thread.sp; |
| 83 | pc = task->thread.ra; |
| 84 | } |
| 85 | |
| 86 | if (unlikely(sp & 0x7)) |
| 87 | return; |
| 88 | |
| 89 | ksp = (unsigned long *)sp; |
| 90 | while (!kstack_end(ksp)) { |
| 91 | if (__kernel_text_address(pc) && unlikely(fn(pc, arg))) |
| 92 | break; |
| 93 | pc = (*ksp++) - 0x4; |
| 94 | } |
| 95 | } |
| 96 | |
| 97 | #endif /* CONFIG_FRAME_POINTER */ |
| 98 | |
| 99 | |
| 100 | static bool print_trace_address(unsigned long pc, void *arg) |
| 101 | { |
Dmitry Safonov | 0b3d436 | 2020-06-08 21:31:17 -0700 | [diff] [blame^] | 102 | const char *loglvl = arg; |
| 103 | |
| 104 | print_ip_sym(loglvl, pc); |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 105 | return false; |
| 106 | } |
| 107 | |
Dmitry Safonov | 0b3d436 | 2020-06-08 21:31:17 -0700 | [diff] [blame^] | 108 | void show_stack_loglvl(struct task_struct *task, unsigned long *sp, |
| 109 | const char *loglvl) |
| 110 | { |
| 111 | pr_cont("Call Trace:\n"); |
| 112 | walk_stackframe(task, NULL, print_trace_address, (void *)loglvl); |
| 113 | } |
| 114 | |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 115 | void show_stack(struct task_struct *task, unsigned long *sp) |
| 116 | { |
Dmitry Safonov | 0b3d436 | 2020-06-08 21:31:17 -0700 | [diff] [blame^] | 117 | show_stack_loglvl(task, sp, KERN_DEFAULT); |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 118 | } |
| 119 | |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 120 | static bool save_wchan(unsigned long pc, void *arg) |
| 121 | { |
| 122 | if (!in_sched_functions(pc)) { |
| 123 | unsigned long *p = arg; |
| 124 | *p = pc; |
| 125 | return true; |
| 126 | } |
| 127 | return false; |
| 128 | } |
| 129 | |
| 130 | unsigned long get_wchan(struct task_struct *task) |
| 131 | { |
| 132 | unsigned long pc = 0; |
| 133 | |
| 134 | if (likely(task && task != current && task->state != TASK_RUNNING)) |
| 135 | walk_stackframe(task, NULL, save_wchan, &pc); |
| 136 | return pc; |
| 137 | } |
| 138 | |
| 139 | |
| 140 | #ifdef CONFIG_STACKTRACE |
| 141 | |
| 142 | static bool __save_trace(unsigned long pc, void *arg, bool nosched) |
| 143 | { |
| 144 | struct stack_trace *trace = arg; |
| 145 | |
| 146 | if (unlikely(nosched && in_sched_functions(pc))) |
| 147 | return false; |
| 148 | if (unlikely(trace->skip > 0)) { |
| 149 | trace->skip--; |
| 150 | return false; |
| 151 | } |
| 152 | |
| 153 | trace->entries[trace->nr_entries++] = pc; |
| 154 | return (trace->nr_entries >= trace->max_entries); |
| 155 | } |
| 156 | |
| 157 | static bool save_trace(unsigned long pc, void *arg) |
| 158 | { |
| 159 | return __save_trace(pc, arg, false); |
| 160 | } |
| 161 | |
| 162 | /* |
| 163 | * Save stack-backtrace addresses into a stack_trace buffer. |
| 164 | */ |
| 165 | void save_stack_trace_tsk(struct task_struct *tsk, struct stack_trace *trace) |
| 166 | { |
| 167 | walk_stackframe(tsk, NULL, save_trace, trace); |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 168 | } |
| 169 | EXPORT_SYMBOL_GPL(save_stack_trace_tsk); |
| 170 | |
| 171 | void save_stack_trace(struct stack_trace *trace) |
| 172 | { |
| 173 | save_stack_trace_tsk(NULL, trace); |
| 174 | } |
| 175 | EXPORT_SYMBOL_GPL(save_stack_trace); |
| 176 | |
| 177 | #endif /* CONFIG_STACKTRACE */ |