blob: 6dc134b8dc70c70e886d539daff6d205b6b04173 [file] [log] [blame]
Steven Rostedte18eead32014-05-08 15:21:52 -04001/*
2 * linux/arch/x86_64/mcount_64.S
3 *
4 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
5 */
6
7#include <linux/linkage.h>
8#include <asm/ptrace.h>
9#include <asm/ftrace.h>
10
11
12 .code64
13 .section .entry.text, "ax"
14
15
16#ifdef CONFIG_FUNCTION_TRACER
17
18#ifdef CC_USING_FENTRY
19# define function_hook __fentry__
20#else
21# define function_hook mcount
22#endif
23
24#ifdef CONFIG_DYNAMIC_FTRACE
25
26ENTRY(function_hook)
27 retq
28END(function_hook)
29
30/* skip is set if stack has been adjusted */
Steven Rostedt (Red Hat)f3bea492014-07-02 23:23:31 -040031.macro ftrace_caller_setup trace_label skip=0
Steven Rostedte18eead32014-05-08 15:21:52 -040032 MCOUNT_SAVE_FRAME \skip
33
Steven Rostedt (Red Hat)f3bea492014-07-02 23:23:31 -040034 /* Save this location */
35GLOBAL(\trace_label)
Steven Rostedte18eead32014-05-08 15:21:52 -040036 /* Load the ftrace_ops into the 3rd parameter */
37 movq function_trace_op(%rip), %rdx
38
39 /* Load ip into the first parameter */
40 movq RIP(%rsp), %rdi
41 subq $MCOUNT_INSN_SIZE, %rdi
42 /* Load the parent_ip into the second parameter */
43#ifdef CC_USING_FENTRY
44 movq SS+16(%rsp), %rsi
45#else
46 movq 8(%rbp), %rsi
47#endif
48.endm
49
Steven Rostedt (Red Hat)9960efe2014-11-18 19:13:25 -050050#ifdef CONFIG_FRAME_POINTER
51/*
52 * Stack traces will stop at the ftrace trampoline if the frame pointer
53 * is not set up properly. If fentry is used, we need to save a frame
54 * pointer for the parent as well as the function traced, because the
55 * fentry is called before the stack frame is set up, where as mcount
56 * is called afterward.
57 */
58.macro create_frame parent rip
59#ifdef CC_USING_FENTRY
60 pushq \parent
61 pushq %rbp
62 movq %rsp, %rbp
63#endif
64 pushq \rip
65 pushq %rbp
66 movq %rsp, %rbp
67.endm
68
69.macro restore_frame
70#ifdef CC_USING_FENTRY
71 addq $16, %rsp
72#endif
73 popq %rbp
74 addq $8, %rsp
75.endm
76#else
77.macro create_frame parent rip
78.endm
79.macro restore_frame
80.endm
81#endif /* CONFIG_FRAME_POINTER */
82
Steven Rostedte18eead32014-05-08 15:21:52 -040083ENTRY(ftrace_caller)
Steven Rostedt (Red Hat)f3bea492014-07-02 23:23:31 -040084 ftrace_caller_setup ftrace_caller_op_ptr
Steven Rostedte18eead32014-05-08 15:21:52 -040085 /* regs go into 4th parameter (but make it NULL) */
86 movq $0, %rcx
87
Steven Rostedt (Red Hat)9960efe2014-11-18 19:13:25 -050088 create_frame %rsi, %rdi
89
Steven Rostedte18eead32014-05-08 15:21:52 -040090GLOBAL(ftrace_call)
91 call ftrace_stub
92
Steven Rostedt (Red Hat)9960efe2014-11-18 19:13:25 -050093 restore_frame
94
Steven Rostedte18eead32014-05-08 15:21:52 -040095 MCOUNT_RESTORE_FRAME
Steven Rostedt (Red Hat)f3bea492014-07-02 23:23:31 -040096
97 /*
98 * The copied trampoline must call ftrace_return as it
99 * still may need to call the function graph tracer.
100 */
101GLOBAL(ftrace_caller_end)
102
103GLOBAL(ftrace_return)
Steven Rostedte18eead32014-05-08 15:21:52 -0400104
105#ifdef CONFIG_FUNCTION_GRAPH_TRACER
106GLOBAL(ftrace_graph_call)
107 jmp ftrace_stub
108#endif
109
110GLOBAL(ftrace_stub)
111 retq
112END(ftrace_caller)
113
114ENTRY(ftrace_regs_caller)
115 /* Save the current flags before compare (in SS location)*/
116 pushfq
117
Steven Rostedte18eead32014-05-08 15:21:52 -0400118 /* skip=8 to skip flags saved in SS */
Steven Rostedt (Red Hat)f3bea492014-07-02 23:23:31 -0400119 ftrace_caller_setup ftrace_regs_caller_op_ptr 8
Steven Rostedte18eead32014-05-08 15:21:52 -0400120
121 /* Save the rest of pt_regs */
122 movq %r15, R15(%rsp)
123 movq %r14, R14(%rsp)
124 movq %r13, R13(%rsp)
125 movq %r12, R12(%rsp)
126 movq %r11, R11(%rsp)
127 movq %r10, R10(%rsp)
128 movq %rbp, RBP(%rsp)
129 movq %rbx, RBX(%rsp)
130 /* Copy saved flags */
131 movq SS(%rsp), %rcx
132 movq %rcx, EFLAGS(%rsp)
133 /* Kernel segments */
134 movq $__KERNEL_DS, %rcx
135 movq %rcx, SS(%rsp)
136 movq $__KERNEL_CS, %rcx
137 movq %rcx, CS(%rsp)
138 /* Stack - skipping return address */
139 leaq SS+16(%rsp), %rcx
140 movq %rcx, RSP(%rsp)
141
142 /* regs go into 4th parameter */
143 leaq (%rsp), %rcx
144
Steven Rostedt (Red Hat)9960efe2014-11-18 19:13:25 -0500145 create_frame %rsi, %rdi
146
Steven Rostedte18eead32014-05-08 15:21:52 -0400147GLOBAL(ftrace_regs_call)
148 call ftrace_stub
149
Steven Rostedt (Red Hat)9960efe2014-11-18 19:13:25 -0500150 restore_frame
151
Steven Rostedte18eead32014-05-08 15:21:52 -0400152 /* Copy flags back to SS, to restore them */
153 movq EFLAGS(%rsp), %rax
154 movq %rax, SS(%rsp)
155
156 /* Handlers can change the RIP */
157 movq RIP(%rsp), %rax
158 movq %rax, SS+8(%rsp)
159
160 /* restore the rest of pt_regs */
161 movq R15(%rsp), %r15
162 movq R14(%rsp), %r14
163 movq R13(%rsp), %r13
164 movq R12(%rsp), %r12
165 movq R10(%rsp), %r10
166 movq RBP(%rsp), %rbp
167 movq RBX(%rsp), %rbx
168
169 /* skip=8 to skip flags saved in SS */
170 MCOUNT_RESTORE_FRAME 8
171
172 /* Restore flags */
173 popfq
174
Steven Rostedt (Red Hat)f3bea492014-07-02 23:23:31 -0400175 /*
176 * As this jmp to ftrace_return can be a short jump
177 * it must not be copied into the trampoline.
178 * The trampoline will add the code to jump
179 * to the return.
180 */
181GLOBAL(ftrace_regs_caller_end)
182
Steven Rostedte18eead32014-05-08 15:21:52 -0400183 jmp ftrace_return
Steven Rostedt (Red Hat)fdc841b2014-06-25 11:59:45 -0400184
Steven Rostedte18eead32014-05-08 15:21:52 -0400185 popfq
186 jmp ftrace_stub
187
188END(ftrace_regs_caller)
189
190
191#else /* ! CONFIG_DYNAMIC_FTRACE */
192
193ENTRY(function_hook)
Steven Rostedte18eead32014-05-08 15:21:52 -0400194 cmpq $ftrace_stub, ftrace_trace_function
195 jnz trace
196
Steven Rostedt (Red Hat)62a207d2014-11-24 14:58:17 -0500197fgraph_trace:
Steven Rostedte18eead32014-05-08 15:21:52 -0400198#ifdef CONFIG_FUNCTION_GRAPH_TRACER
199 cmpq $ftrace_stub, ftrace_graph_return
200 jnz ftrace_graph_caller
201
202 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
203 jnz ftrace_graph_caller
204#endif
205
206GLOBAL(ftrace_stub)
207 retq
208
209trace:
210 MCOUNT_SAVE_FRAME
211
212 movq RIP(%rsp), %rdi
213#ifdef CC_USING_FENTRY
214 movq SS+16(%rsp), %rsi
215#else
216 movq 8(%rbp), %rsi
217#endif
218 subq $MCOUNT_INSN_SIZE, %rdi
219
220 call *ftrace_trace_function
221
222 MCOUNT_RESTORE_FRAME
223
Steven Rostedt (Red Hat)62a207d2014-11-24 14:58:17 -0500224 jmp fgraph_trace
Steven Rostedte18eead32014-05-08 15:21:52 -0400225END(function_hook)
226#endif /* CONFIG_DYNAMIC_FTRACE */
227#endif /* CONFIG_FUNCTION_TRACER */
228
229#ifdef CONFIG_FUNCTION_GRAPH_TRACER
230ENTRY(ftrace_graph_caller)
231 MCOUNT_SAVE_FRAME
232
233#ifdef CC_USING_FENTRY
234 leaq SS+16(%rsp), %rdi
235 movq $0, %rdx /* No framepointers needed */
236#else
237 leaq 8(%rbp), %rdi
238 movq (%rbp), %rdx
239#endif
240 movq RIP(%rsp), %rsi
241 subq $MCOUNT_INSN_SIZE, %rsi
242
243 call prepare_ftrace_return
244
Steven Rostedte18eead32014-05-08 15:21:52 -0400245 MCOUNT_RESTORE_FRAME
246
247 retq
248END(ftrace_graph_caller)
249
250GLOBAL(return_to_handler)
251 subq $24, %rsp
252
253 /* Save the return values */
254 movq %rax, (%rsp)
255 movq %rdx, 8(%rsp)
256 movq %rbp, %rdi
257
258 call ftrace_return_to_handler
259
260 movq %rax, %rdi
261 movq 8(%rsp), %rdx
262 movq (%rsp), %rax
263 addq $24, %rsp
264 jmp *%rdi
265#endif