ftrace: Fix checking of trampoline ftrace_ops in finding trampoline
[linux-2.6-block.git] / arch / x86 / kernel / mcount_64.S
CommitLineData
e18eead3
SR
1/*
2 * linux/arch/x86_64/mcount_64.S
3 *
4 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
5 */
6
7#include <linux/linkage.h>
8#include <asm/ptrace.h>
9#include <asm/ftrace.h>
10
11
12 .code64
13 .section .entry.text, "ax"
14
15
16#ifdef CONFIG_FUNCTION_TRACER
17
18#ifdef CC_USING_FENTRY
19# define function_hook __fentry__
20#else
21# define function_hook mcount
22#endif
23
24#ifdef CONFIG_DYNAMIC_FTRACE
25
26ENTRY(function_hook)
27 retq
28END(function_hook)
29
30/* skip is set if stack has been adjusted */
31.macro ftrace_caller_setup skip=0
32 MCOUNT_SAVE_FRAME \skip
33
34 /* Load the ftrace_ops into the 3rd parameter */
35 movq function_trace_op(%rip), %rdx
36
37 /* Load ip into the first parameter */
38 movq RIP(%rsp), %rdi
39 subq $MCOUNT_INSN_SIZE, %rdi
40 /* Load the parent_ip into the second parameter */
41#ifdef CC_USING_FENTRY
42 movq SS+16(%rsp), %rsi
43#else
44 movq 8(%rbp), %rsi
45#endif
46.endm
47
48ENTRY(ftrace_caller)
e18eead3
SR
49 ftrace_caller_setup
50 /* regs go into 4th parameter (but make it NULL) */
51 movq $0, %rcx
52
53GLOBAL(ftrace_call)
54 call ftrace_stub
55
56 MCOUNT_RESTORE_FRAME
57ftrace_return:
58
59#ifdef CONFIG_FUNCTION_GRAPH_TRACER
60GLOBAL(ftrace_graph_call)
61 jmp ftrace_stub
62#endif
63
64GLOBAL(ftrace_stub)
65 retq
66END(ftrace_caller)
67
68ENTRY(ftrace_regs_caller)
69 /* Save the current flags before compare (in SS location)*/
70 pushfq
71
e18eead3
SR
72 /* skip=8 to skip flags saved in SS */
73 ftrace_caller_setup 8
74
75 /* Save the rest of pt_regs */
76 movq %r15, R15(%rsp)
77 movq %r14, R14(%rsp)
78 movq %r13, R13(%rsp)
79 movq %r12, R12(%rsp)
80 movq %r11, R11(%rsp)
81 movq %r10, R10(%rsp)
82 movq %rbp, RBP(%rsp)
83 movq %rbx, RBX(%rsp)
84 /* Copy saved flags */
85 movq SS(%rsp), %rcx
86 movq %rcx, EFLAGS(%rsp)
87 /* Kernel segments */
88 movq $__KERNEL_DS, %rcx
89 movq %rcx, SS(%rsp)
90 movq $__KERNEL_CS, %rcx
91 movq %rcx, CS(%rsp)
92 /* Stack - skipping return address */
93 leaq SS+16(%rsp), %rcx
94 movq %rcx, RSP(%rsp)
95
96 /* regs go into 4th parameter */
97 leaq (%rsp), %rcx
98
99GLOBAL(ftrace_regs_call)
100 call ftrace_stub
101
102 /* Copy flags back to SS, to restore them */
103 movq EFLAGS(%rsp), %rax
104 movq %rax, SS(%rsp)
105
106 /* Handlers can change the RIP */
107 movq RIP(%rsp), %rax
108 movq %rax, SS+8(%rsp)
109
110 /* restore the rest of pt_regs */
111 movq R15(%rsp), %r15
112 movq R14(%rsp), %r14
113 movq R13(%rsp), %r13
114 movq R12(%rsp), %r12
115 movq R10(%rsp), %r10
116 movq RBP(%rsp), %rbp
117 movq RBX(%rsp), %rbx
118
119 /* skip=8 to skip flags saved in SS */
120 MCOUNT_RESTORE_FRAME 8
121
122 /* Restore flags */
123 popfq
124
125 jmp ftrace_return
fdc841b5 126
e18eead3
SR
127 popfq
128 jmp ftrace_stub
129
130END(ftrace_regs_caller)
131
132
133#else /* ! CONFIG_DYNAMIC_FTRACE */
134
135ENTRY(function_hook)
e18eead3
SR
136 cmpq $ftrace_stub, ftrace_trace_function
137 jnz trace
138
139#ifdef CONFIG_FUNCTION_GRAPH_TRACER
140 cmpq $ftrace_stub, ftrace_graph_return
141 jnz ftrace_graph_caller
142
143 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
144 jnz ftrace_graph_caller
145#endif
146
147GLOBAL(ftrace_stub)
148 retq
149
150trace:
151 MCOUNT_SAVE_FRAME
152
153 movq RIP(%rsp), %rdi
154#ifdef CC_USING_FENTRY
155 movq SS+16(%rsp), %rsi
156#else
157 movq 8(%rbp), %rsi
158#endif
159 subq $MCOUNT_INSN_SIZE, %rdi
160
161 call *ftrace_trace_function
162
163 MCOUNT_RESTORE_FRAME
164
165 jmp ftrace_stub
166END(function_hook)
167#endif /* CONFIG_DYNAMIC_FTRACE */
168#endif /* CONFIG_FUNCTION_TRACER */
169
170#ifdef CONFIG_FUNCTION_GRAPH_TRACER
171ENTRY(ftrace_graph_caller)
172 MCOUNT_SAVE_FRAME
173
174#ifdef CC_USING_FENTRY
175 leaq SS+16(%rsp), %rdi
176 movq $0, %rdx /* No framepointers needed */
177#else
178 leaq 8(%rbp), %rdi
179 movq (%rbp), %rdx
180#endif
181 movq RIP(%rsp), %rsi
182 subq $MCOUNT_INSN_SIZE, %rsi
183
184 call prepare_ftrace_return
185
186 MCOUNT_RESTORE_FRAME
187
188 retq
189END(ftrace_graph_caller)
190
191GLOBAL(return_to_handler)
192 subq $24, %rsp
193
194 /* Save the return values */
195 movq %rax, (%rsp)
196 movq %rdx, 8(%rsp)
197 movq %rbp, %rdi
198
199 call ftrace_return_to_handler
200
201 movq %rax, %rdi
202 movq 8(%rsp), %rdx
203 movq (%rsp), %rax
204 addq $24, %rsp
205 jmp *%rdi
206#endif