H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 1 | #ifndef _ASM_X86_FTRACE_H |
| 2 | #define _ASM_X86_FTRACE_H |
Abhishek Sagar | 395a59d | 2008-06-21 23:47:27 +0530 | [diff] [blame] | 3 | |
Cyrill Gorcunov | d680fe4 | 2008-12-13 00:09:08 +0300 | [diff] [blame] | 4 | #ifdef __ASSEMBLY__ |
| 5 | |
Steven Rostedt | 08f6fba | 2012-04-30 16:20:23 -0400 | [diff] [blame^] | 6 | /* skip is set if the stack was already partially adjusted */ |
| 7 | .macro MCOUNT_SAVE_FRAME skip=0 |
| 8 | /* |
| 9 | * We add enough stack to save all regs. |
| 10 | */ |
| 11 | subq $(SS+8-\skip), %rsp |
| 12 | movq %rax, RAX(%rsp) |
| 13 | movq %rcx, RCX(%rsp) |
| 14 | movq %rdx, RDX(%rsp) |
| 15 | movq %rsi, RSI(%rsp) |
| 16 | movq %rdi, RDI(%rsp) |
| 17 | movq %r8, R8(%rsp) |
| 18 | movq %r9, R9(%rsp) |
| 19 | /* Move RIP to its proper location */ |
| 20 | movq SS+8(%rsp), %rdx |
| 21 | movq %rdx, RIP(%rsp) |
Cyrill Gorcunov | d680fe4 | 2008-12-13 00:09:08 +0300 | [diff] [blame] | 22 | .endm |
| 23 | |
Steven Rostedt | 08f6fba | 2012-04-30 16:20:23 -0400 | [diff] [blame^] | 24 | .macro MCOUNT_RESTORE_FRAME skip=0 |
| 25 | movq R9(%rsp), %r9 |
| 26 | movq R8(%rsp), %r8 |
| 27 | movq RDI(%rsp), %rdi |
| 28 | movq RSI(%rsp), %rsi |
| 29 | movq RDX(%rsp), %rdx |
| 30 | movq RCX(%rsp), %rcx |
| 31 | movq RAX(%rsp), %rax |
| 32 | addq $(SS+8-\skip), %rsp |
Cyrill Gorcunov | d680fe4 | 2008-12-13 00:09:08 +0300 | [diff] [blame] | 33 | .endm |
| 34 | |
| 35 | #endif |
| 36 | |
Ingo Molnar | 4944dd6 | 2008-10-27 10:50:54 +0100 | [diff] [blame] | 37 | #ifdef CONFIG_FUNCTION_TRACER |
Abhishek Sagar | 395a59d | 2008-06-21 23:47:27 +0530 | [diff] [blame] | 38 | #define MCOUNT_ADDR ((long)(mcount)) |
| 39 | #define MCOUNT_INSN_SIZE 5 /* sizeof mcount call */ |
| 40 | |
Steven Rostedt | 28fb5df | 2011-08-10 22:00:55 -0400 | [diff] [blame] | 41 | #ifdef CONFIG_DYNAMIC_FTRACE |
Steven Rostedt | 2f5f6ad | 2011-08-08 16:57:47 -0400 | [diff] [blame] | 42 | #define ARCH_SUPPORTS_FTRACE_OPS 1 |
Steven Rostedt | 08f6fba | 2012-04-30 16:20:23 -0400 | [diff] [blame^] | 43 | #ifdef CONFIG_X86_64 |
| 44 | #define ARCH_SUPPORTS_FTRACE_SAVE_REGS |
| 45 | #endif |
Steven Rostedt | 2f5f6ad | 2011-08-08 16:57:47 -0400 | [diff] [blame] | 46 | #endif |
| 47 | |
Abhishek Sagar | 395a59d | 2008-06-21 23:47:27 +0530 | [diff] [blame] | 48 | #ifndef __ASSEMBLY__ |
| 49 | extern void mcount(void); |
Steven Rostedt | a192cd0 | 2012-05-30 13:26:37 -0400 | [diff] [blame] | 50 | extern atomic_t modifying_ftrace_code; |
Steven Rostedt | 68bf21a | 2008-08-14 15:45:08 -0400 | [diff] [blame] | 51 | |
| 52 | static inline unsigned long ftrace_call_adjust(unsigned long addr) |
| 53 | { |
| 54 | /* |
Martin Schwidefsky | 521ccb5 | 2011-05-10 10:10:41 +0200 | [diff] [blame] | 55 | * addr is the address of the mcount call instruction. |
| 56 | * recordmcount does the necessary offset calculation. |
Steven Rostedt | 68bf21a | 2008-08-14 15:45:08 -0400 | [diff] [blame] | 57 | */ |
Martin Schwidefsky | 521ccb5 | 2011-05-10 10:10:41 +0200 | [diff] [blame] | 58 | return addr; |
Steven Rostedt | 68bf21a | 2008-08-14 15:45:08 -0400 | [diff] [blame] | 59 | } |
Steven Rostedt | 31e8890 | 2008-11-14 16:21:19 -0800 | [diff] [blame] | 60 | |
| 61 | #ifdef CONFIG_DYNAMIC_FTRACE |
| 62 | |
| 63 | struct dyn_arch_ftrace { |
| 64 | /* No extra data needed for x86 */ |
| 65 | }; |
| 66 | |
Steven Rostedt | 08d636b | 2011-08-16 09:57:10 -0400 | [diff] [blame] | 67 | int ftrace_int3_handler(struct pt_regs *regs); |
| 68 | |
Steven Rostedt | 31e8890 | 2008-11-14 16:21:19 -0800 | [diff] [blame] | 69 | #endif /* CONFIG_DYNAMIC_FTRACE */ |
Steven Rostedt | a26a2a2 | 2008-10-31 00:03:22 -0400 | [diff] [blame] | 70 | #endif /* __ASSEMBLY__ */ |
Ingo Molnar | 4944dd6 | 2008-10-27 10:50:54 +0100 | [diff] [blame] | 71 | #endif /* CONFIG_FUNCTION_TRACER */ |
Abhishek Sagar | 395a59d | 2008-06-21 23:47:27 +0530 | [diff] [blame] | 72 | |
H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 73 | #endif /* _ASM_X86_FTRACE_H */ |