ftrace/x86: Simplify save_mcount_regs on getting RIP
[linux-block.git] / arch / x86 / kernel / mcount_64.S
CommitLineData
e18eead3
SR
1/*
2 * linux/arch/x86_64/mcount_64.S
3 *
4 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
5 */
6
7#include <linux/linkage.h>
8#include <asm/ptrace.h>
9#include <asm/ftrace.h>
10
11
12 .code64
13 .section .entry.text, "ax"
14
15
16#ifdef CONFIG_FUNCTION_TRACER
17
18#ifdef CC_USING_FENTRY
19# define function_hook __fentry__
20#else
21# define function_hook mcount
22#endif
23
05df710e
SRRH
24/*
25 * gcc -pg option adds a call to 'mcount' in most functions.
26 * When -mfentry is used, the call is to 'fentry' and not 'mcount'
27 * and is done before the function's stack frame is set up.
28 * They both require a set of regs to be saved before calling
29 * any C code and restored before returning back to the function.
30 *
31 * On boot up, all these calls are converted into nops. When tracing
32 * is enabled, the call can jump to either ftrace_caller or
33 * ftrace_regs_caller. Callbacks (tracing functions) that require
34 * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
35 * it. For this reason, the size of the pt_regs structure will be
36 * allocated on the stack and the required mcount registers will
37 * be saved in the locations that pt_regs has them in.
38 */
39
527aa75b
SRRH
40/* @added: the amount of stack added before calling this */
41.macro save_mcount_regs added=0
4bcdf152
SRRH
42 /*
43 * We add enough stack to save all regs.
44 */
527aa75b 45 subq $(SS+8), %rsp
4bcdf152
SRRH
46 movq %rax, RAX(%rsp)
47 movq %rcx, RCX(%rsp)
48 movq %rdx, RDX(%rsp)
49 movq %rsi, RSI(%rsp)
50 movq %rdi, RDI(%rsp)
51 movq %r8, R8(%rsp)
52 movq %r9, R9(%rsp)
53 /* Move RIP to its proper location */
527aa75b 54 movq SS+8+\added(%rsp), %rdi
094dfc54 55 movq %rdi, RIP(%rsp)
4bcdf152
SRRH
56 .endm
57
527aa75b 58.macro restore_mcount_regs
4bcdf152
SRRH
59 movq R9(%rsp), %r9
60 movq R8(%rsp), %r8
61 movq RDI(%rsp), %rdi
62 movq RSI(%rsp), %rsi
63 movq RDX(%rsp), %rdx
64 movq RCX(%rsp), %rcx
65 movq RAX(%rsp), %rax
527aa75b 66 addq $(SS+8), %rsp
4bcdf152
SRRH
67 .endm
68
e18eead3 69/* skip is set if stack has been adjusted */
527aa75b
SRRH
70.macro ftrace_caller_setup trace_label added=0
71 save_mcount_regs \added
e18eead3 72
f3bea491
SRRH
73 /* Save this location */
74GLOBAL(\trace_label)
e18eead3
SR
75 /* Load the ftrace_ops into the 3rd parameter */
76 movq function_trace_op(%rip), %rdx
77
094dfc54 78 /* %rdi already has %rip from the save_mcount_regs macro */
e18eead3
SR
79 subq $MCOUNT_INSN_SIZE, %rdi
80 /* Load the parent_ip into the second parameter */
81#ifdef CC_USING_FENTRY
527aa75b 82 movq SS+16+\added(%rsp), %rsi
e18eead3 83#else
527aa75b 84 movq 8+\added(%rbp), %rsi
e18eead3
SR
85#endif
86.endm
87
76c2f13c
SRRH
88#ifdef CONFIG_DYNAMIC_FTRACE
89
90ENTRY(function_hook)
91 retq
92END(function_hook)
93
9960efeb
SRRH
94#ifdef CONFIG_FRAME_POINTER
95/*
96 * Stack traces will stop at the ftrace trampoline if the frame pointer
97 * is not set up properly. If fentry is used, we need to save a frame
98 * pointer for the parent as well as the function traced, because the
99 * fentry is called before the stack frame is set up, where as mcount
100 * is called afterward.
101 */
102.macro create_frame parent rip
103#ifdef CC_USING_FENTRY
104 pushq \parent
105 pushq %rbp
106 movq %rsp, %rbp
107#endif
108 pushq \rip
109 pushq %rbp
110 movq %rsp, %rbp
111.endm
112
113.macro restore_frame
114#ifdef CC_USING_FENTRY
115 addq $16, %rsp
116#endif
117 popq %rbp
118 addq $8, %rsp
119.endm
120#else
121.macro create_frame parent rip
122.endm
123.macro restore_frame
124.endm
125#endif /* CONFIG_FRAME_POINTER */
126
e18eead3 127ENTRY(ftrace_caller)
f3bea491 128 ftrace_caller_setup ftrace_caller_op_ptr
e18eead3
SR
129 /* regs go into 4th parameter (but make it NULL) */
130 movq $0, %rcx
131
9960efeb
SRRH
132 create_frame %rsi, %rdi
133
e18eead3
SR
134GLOBAL(ftrace_call)
135 call ftrace_stub
136
9960efeb
SRRH
137 restore_frame
138
05df710e 139 restore_mcount_regs
f3bea491
SRRH
140
141 /*
142 * The copied trampoline must call ftrace_return as it
143 * still may need to call the function graph tracer.
144 */
145GLOBAL(ftrace_caller_end)
146
147GLOBAL(ftrace_return)
e18eead3
SR
148
149#ifdef CONFIG_FUNCTION_GRAPH_TRACER
150GLOBAL(ftrace_graph_call)
151 jmp ftrace_stub
152#endif
153
154GLOBAL(ftrace_stub)
155 retq
156END(ftrace_caller)
157
158ENTRY(ftrace_regs_caller)
527aa75b 159 /* Save the current flags before any operations that can change them */
e18eead3
SR
160 pushfq
161
527aa75b 162 /* added 8 bytes to save flags */
f3bea491 163 ftrace_caller_setup ftrace_regs_caller_op_ptr 8
e18eead3
SR
164
165 /* Save the rest of pt_regs */
166 movq %r15, R15(%rsp)
167 movq %r14, R14(%rsp)
168 movq %r13, R13(%rsp)
169 movq %r12, R12(%rsp)
170 movq %r11, R11(%rsp)
171 movq %r10, R10(%rsp)
172 movq %rbp, RBP(%rsp)
173 movq %rbx, RBX(%rsp)
174 /* Copy saved flags */
527aa75b 175 movq SS+8(%rsp), %rcx
e18eead3
SR
176 movq %rcx, EFLAGS(%rsp)
177 /* Kernel segments */
178 movq $__KERNEL_DS, %rcx
179 movq %rcx, SS(%rsp)
180 movq $__KERNEL_CS, %rcx
181 movq %rcx, CS(%rsp)
527aa75b
SRRH
182 /* Stack - skipping return address and flags */
183 leaq SS+8*3(%rsp), %rcx
e18eead3
SR
184 movq %rcx, RSP(%rsp)
185
186 /* regs go into 4th parameter */
187 leaq (%rsp), %rcx
188
9960efeb
SRRH
189 create_frame %rsi, %rdi
190
e18eead3
SR
191GLOBAL(ftrace_regs_call)
192 call ftrace_stub
193
9960efeb
SRRH
194 restore_frame
195
e18eead3
SR
196 /* Copy flags back to SS, to restore them */
197 movq EFLAGS(%rsp), %rax
527aa75b 198 movq %rax, SS+8(%rsp)
e18eead3
SR
199
200 /* Handlers can change the RIP */
201 movq RIP(%rsp), %rax
527aa75b 202 movq %rax, SS+8*2(%rsp)
e18eead3
SR
203
204 /* restore the rest of pt_regs */
205 movq R15(%rsp), %r15
206 movq R14(%rsp), %r14
207 movq R13(%rsp), %r13
208 movq R12(%rsp), %r12
209 movq R10(%rsp), %r10
210 movq RBP(%rsp), %rbp
211 movq RBX(%rsp), %rbx
212
527aa75b 213 restore_mcount_regs
e18eead3
SR
214
215 /* Restore flags */
216 popfq
217
f3bea491
SRRH
218 /*
219 * As this jmp to ftrace_return can be a short jump
220 * it must not be copied into the trampoline.
221 * The trampoline will add the code to jump
222 * to the return.
223 */
224GLOBAL(ftrace_regs_caller_end)
225
e18eead3 226 jmp ftrace_return
fdc841b5 227
e18eead3
SR
228 popfq
229 jmp ftrace_stub
230
231END(ftrace_regs_caller)
232
233
234#else /* ! CONFIG_DYNAMIC_FTRACE */
235
236ENTRY(function_hook)
e18eead3
SR
237 cmpq $ftrace_stub, ftrace_trace_function
238 jnz trace
239
62a207d7 240fgraph_trace:
e18eead3
SR
241#ifdef CONFIG_FUNCTION_GRAPH_TRACER
242 cmpq $ftrace_stub, ftrace_graph_return
243 jnz ftrace_graph_caller
244
245 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
246 jnz ftrace_graph_caller
247#endif
248
249GLOBAL(ftrace_stub)
250 retq
251
252trace:
76c2f13c 253 ftrace_caller_setup ftrace_caller_op_ptr
e18eead3
SR
254
255 call *ftrace_trace_function
256
05df710e 257 restore_mcount_regs
e18eead3 258
62a207d7 259 jmp fgraph_trace
e18eead3
SR
260END(function_hook)
261#endif /* CONFIG_DYNAMIC_FTRACE */
262#endif /* CONFIG_FUNCTION_TRACER */
263
264#ifdef CONFIG_FUNCTION_GRAPH_TRACER
265ENTRY(ftrace_graph_caller)
05df710e 266 save_mcount_regs
e18eead3
SR
267
268#ifdef CC_USING_FENTRY
269 leaq SS+16(%rsp), %rdi
270 movq $0, %rdx /* No framepointers needed */
271#else
272 leaq 8(%rbp), %rdi
273 movq (%rbp), %rdx
274#endif
275 movq RIP(%rsp), %rsi
276 subq $MCOUNT_INSN_SIZE, %rsi
277
278 call prepare_ftrace_return
279
05df710e 280 restore_mcount_regs
e18eead3
SR
281
282 retq
283END(ftrace_graph_caller)
284
285GLOBAL(return_to_handler)
286 subq $24, %rsp
287
288 /* Save the return values */
289 movq %rax, (%rsp)
290 movq %rdx, 8(%rsp)
291 movq %rbp, %rdi
292
293 call ftrace_return_to_handler
294
295 movq %rax, %rdi
296 movq 8(%rsp), %rdx
297 movq (%rsp), %rax
298 addq $24, %rsp
299 jmp *%rdi
300#endif