objtool: Add asm version of STACK_FRAME_NON_STANDARD
[linux-block.git] / arch / x86 / kernel / ftrace_64.S
CommitLineData
b2441318 1/* SPDX-License-Identifier: GPL-2.0 */
e18eead3 2/*
e18eead3
SR
3 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
4 */
5
6#include <linux/linkage.h>
7#include <asm/ptrace.h>
8#include <asm/ftrace.h>
784d5699 9#include <asm/export.h>
9351803b 10#include <asm/nospec-branch.h>
e2ac83d7 11#include <asm/unwind_hints.h>
ea1ed38d 12#include <asm/frame.h>
e18eead3
SR
13
14 .code64
b9f6976b 15 .section .text, "ax"
e18eead3 16
0687c36e 17#ifdef CONFIG_FRAME_POINTER
0687c36e
SRRH
18/* Save parent and function stack frames (rip and rbp) */
19# define MCOUNT_FRAME_SIZE (8+16*2)
0687c36e
SRRH
20#else
21/* No need to save a stack frame */
e2ac83d7 22# define MCOUNT_FRAME_SIZE 0
0687c36e
SRRH
23#endif /* CONFIG_FRAME_POINTER */
24
85f6f029 25/* Size of stack used to save mcount regs in save_mcount_regs */
dc2745b6 26#define MCOUNT_REG_SIZE (FRAME_SIZE + MCOUNT_FRAME_SIZE)
85f6f029 27
05df710e
SRRH
28/*
29 * gcc -pg option adds a call to 'mcount' in most functions.
30 * When -mfentry is used, the call is to 'fentry' and not 'mcount'
31 * and is done before the function's stack frame is set up.
32 * They both require a set of regs to be saved before calling
33 * any C code and restored before returning back to the function.
34 *
35 * On boot up, all these calls are converted into nops. When tracing
36 * is enabled, the call can jump to either ftrace_caller or
37 * ftrace_regs_caller. Callbacks (tracing functions) that require
38 * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
39 * it. For this reason, the size of the pt_regs structure will be
40 * allocated on the stack and the required mcount registers will
41 * be saved in the locations that pt_regs has them in.
42 */
43
f1ab00af
SRRH
44/*
45 * @added: the amount of stack added before calling this
46 *
47 * After this is called, the following registers contain:
48 *
49 * %rdi - holds the address that called the trampoline
50 * %rsi - holds the parent function (traced function's return address)
51 * %rdx - holds the original %rbp
52 */
527aa75b 53.macro save_mcount_regs added=0
0687c36e 54
e2ac83d7
JP
55#ifdef CONFIG_FRAME_POINTER
56 /* Save the original rbp */
0687c36e
SRRH
57 pushq %rbp
58
0687c36e
SRRH
59 /*
60 * Stack traces will stop at the ftrace trampoline if the frame pointer
61 * is not set up properly. If fentry is used, we need to save a frame
62 * pointer for the parent as well as the function traced, because the
63 * fentry is called before the stack frame is set up, where as mcount
64 * is called afterward.
65 */
562e14f7 66
0687c36e
SRRH
67 /* Save the parent pointer (skip orig rbp and our return address) */
68 pushq \added+8*2(%rsp)
69 pushq %rbp
70 movq %rsp, %rbp
71 /* Save the return address (now skip orig rbp, rbp and parent) */
72 pushq \added+8*3(%rsp)
0687c36e
SRRH
73 pushq %rbp
74 movq %rsp, %rbp
75#endif /* CONFIG_FRAME_POINTER */
76
77 /*
78 * We add enough stack to save all regs.
79 */
dc2745b6 80 subq $(FRAME_SIZE), %rsp
4bcdf152
SRRH
81 movq %rax, RAX(%rsp)
82 movq %rcx, RCX(%rsp)
83 movq %rdx, RDX(%rsp)
84 movq %rsi, RSI(%rsp)
85 movq %rdi, RDI(%rsp)
86 movq %r8, R8(%rsp)
87 movq %r9, R9(%rsp)
562955fe 88 movq $0, ORIG_RAX(%rsp)
0687c36e
SRRH
89 /*
90 * Save the original RBP. Even though the mcount ABI does not
91 * require this, it helps out callers.
92 */
e2ac83d7 93#ifdef CONFIG_FRAME_POINTER
0687c36e 94 movq MCOUNT_REG_SIZE-8(%rsp), %rdx
e2ac83d7
JP
95#else
96 movq %rbp, %rdx
97#endif
0687c36e
SRRH
98 movq %rdx, RBP(%rsp)
99
f1ab00af 100 /* Copy the parent address into %rsi (second parameter) */
f1ab00af 101 movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
f1ab00af 102
4bcdf152 103 /* Move RIP to its proper location */
85f6f029 104 movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
094dfc54 105 movq %rdi, RIP(%rsp)
f1ab00af
SRRH
106
107 /*
108 * Now %rdi (the first parameter) has the return address of
109 * where ftrace_call returns. But the callbacks expect the
6a06bdbf 110 * address of the call itself.
f1ab00af
SRRH
111 */
112 subq $MCOUNT_INSN_SIZE, %rdi
4bcdf152
SRRH
113 .endm
114
562955fe
SRV
115.macro restore_mcount_regs save=0
116
117 /* ftrace_regs_caller or frame pointers require this */
118 movq RBP(%rsp), %rbp
119
4bcdf152
SRRH
120 movq R9(%rsp), %r9
121 movq R8(%rsp), %r8
122 movq RDI(%rsp), %rdi
123 movq RSI(%rsp), %rsi
124 movq RDX(%rsp), %rdx
125 movq RCX(%rsp), %rcx
126 movq RAX(%rsp), %rax
0687c36e 127
562955fe 128 addq $MCOUNT_REG_SIZE-\save, %rsp
0687c36e 129
4bcdf152
SRRH
130 .endm
131
76c2f13c
SRRH
132#ifdef CONFIG_DYNAMIC_FTRACE
133
0f42c1ad 134SYM_FUNC_START(__fentry__)
76c2f13c 135 retq
0f42c1ad
BP
136SYM_FUNC_END(__fentry__)
137EXPORT_SYMBOL(__fentry__)
76c2f13c 138
6dcc5627 139SYM_FUNC_START(ftrace_caller)
f1ab00af
SRRH
140 /* save_mcount_regs fills in first two parameters */
141 save_mcount_regs
142
02a474ca
SRV
143 /* Stack - skipping return address of ftrace_caller */
144 leaq MCOUNT_REG_SIZE+8(%rsp), %rcx
145 movq %rcx, RSP(%rsp)
146
26ba4e57 147SYM_INNER_LABEL(ftrace_caller_op_ptr, SYM_L_GLOBAL)
f1ab00af
SRRH
148 /* Load the ftrace_ops into the 3rd parameter */
149 movq function_trace_op(%rip), %rdx
150
02a474ca
SRV
151 /* regs go into 4th parameter */
152 leaq (%rsp), %rcx
153
154 /* Only ops with REGS flag set should have CS register set */
155 movq $0, CS(%rsp)
e18eead3 156
26ba4e57 157SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
e18eead3
SR
158 call ftrace_stub
159
2860cd8a
SRV
160 /* Handlers can change the RIP */
161 movq RIP(%rsp), %rax
162 movq %rax, MCOUNT_REG_SIZE(%rsp)
163
05df710e 164 restore_mcount_regs
f3bea491
SRRH
165
166 /*
f1b92bb6
BP
167 * The code up to this label is copied into trampolines so
168 * think twice before adding any new code or changing the
169 * layout here.
f3bea491 170 */
0298739b 171SYM_INNER_LABEL(ftrace_caller_end, SYM_L_GLOBAL)
e18eead3 172
0298739b
PZ
173 jmp ftrace_epilogue
174SYM_FUNC_END(ftrace_caller);
175
176SYM_FUNC_START(ftrace_epilogue)
e18eead3 177#ifdef CONFIG_FUNCTION_GRAPH_TRACER
26ba4e57 178SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL)
e18eead3
SR
179 jmp ftrace_stub
180#endif
181
d2a68c4e
SRV
182/*
183 * This is weak to keep gas from relaxing the jumps.
184 * It is also used to copy the retq for trampolines.
185 */
13fbe784 186SYM_INNER_LABEL_ALIGN(ftrace_stub, SYM_L_WEAK)
18660698 187 UNWIND_HINT_FUNC
e18eead3 188 retq
0298739b 189SYM_FUNC_END(ftrace_epilogue)
e18eead3 190
6dcc5627 191SYM_FUNC_START(ftrace_regs_caller)
527aa75b 192 /* Save the current flags before any operations that can change them */
e18eead3
SR
193 pushfq
194
527aa75b 195 /* added 8 bytes to save flags */
f1ab00af
SRRH
196 save_mcount_regs 8
197 /* save_mcount_regs fills in first two parameters */
198
26ba4e57 199SYM_INNER_LABEL(ftrace_regs_caller_op_ptr, SYM_L_GLOBAL)
f1ab00af
SRRH
200 /* Load the ftrace_ops into the 3rd parameter */
201 movq function_trace_op(%rip), %rdx
e18eead3
SR
202
203 /* Save the rest of pt_regs */
204 movq %r15, R15(%rsp)
205 movq %r14, R14(%rsp)
206 movq %r13, R13(%rsp)
207 movq %r12, R12(%rsp)
208 movq %r11, R11(%rsp)
209 movq %r10, R10(%rsp)
e18eead3
SR
210 movq %rbx, RBX(%rsp)
211 /* Copy saved flags */
85f6f029 212 movq MCOUNT_REG_SIZE(%rsp), %rcx
e18eead3
SR
213 movq %rcx, EFLAGS(%rsp)
214 /* Kernel segments */
215 movq $__KERNEL_DS, %rcx
216 movq %rcx, SS(%rsp)
217 movq $__KERNEL_CS, %rcx
218 movq %rcx, CS(%rsp)
527aa75b 219 /* Stack - skipping return address and flags */
85f6f029 220 leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
e18eead3
SR
221 movq %rcx, RSP(%rsp)
222
ea1ed38d
PZ
223 ENCODE_FRAME_POINTER
224
e18eead3
SR
225 /* regs go into 4th parameter */
226 leaq (%rsp), %rcx
227
26ba4e57 228SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
e18eead3
SR
229 call ftrace_stub
230
231 /* Copy flags back to SS, to restore them */
232 movq EFLAGS(%rsp), %rax
85f6f029 233 movq %rax, MCOUNT_REG_SIZE(%rsp)
e18eead3
SR
234
235 /* Handlers can change the RIP */
236 movq RIP(%rsp), %rax
85f6f029 237 movq %rax, MCOUNT_REG_SIZE+8(%rsp)
e18eead3
SR
238
239 /* restore the rest of pt_regs */
240 movq R15(%rsp), %r15
241 movq R14(%rsp), %r14
242 movq R13(%rsp), %r13
243 movq R12(%rsp), %r12
244 movq R10(%rsp), %r10
e18eead3
SR
245 movq RBX(%rsp), %rbx
246
562955fe
SRV
247 movq ORIG_RAX(%rsp), %rax
248 movq %rax, MCOUNT_REG_SIZE-8(%rsp)
249
0298739b
PZ
250 /*
251 * If ORIG_RAX is anything but zero, make this a call to that.
252 * See arch_ftrace_set_direct_caller().
253 */
562955fe 254 movq ORIG_RAX(%rsp), %rax
9f2dfd61 255 testq %rax, %rax
fe58acef 256SYM_INNER_LABEL(ftrace_regs_caller_jmp, SYM_L_GLOBAL)
0b4f8ddc 257 jnz 1f
562955fe 258
0b4f8ddc 259 restore_mcount_regs
e18eead3
SR
260 /* Restore flags */
261 popfq
262
f3bea491 263 /*
f1b92bb6 264 * As this jmp to ftrace_epilogue can be a short jump
f3bea491
SRRH
265 * it must not be copied into the trampoline.
266 * The trampoline will add the code to jump
267 * to the return.
268 */
5da7cd11 269SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
f1b92bb6 270 jmp ftrace_epilogue
fdc841b5 271
0b4f8ddc
SRV
272 /* Swap the flags with orig_rax */
2731: movq MCOUNT_REG_SIZE(%rsp), %rdi
274 movq %rdi, MCOUNT_REG_SIZE-8(%rsp)
275 movq %rax, MCOUNT_REG_SIZE(%rsp)
276
277 restore_mcount_regs 8
278 /* Restore flags */
279 popfq
0b4f8ddc
SRV
280 UNWIND_HINT_RET_OFFSET
281 jmp ftrace_epilogue
282
6dcc5627 283SYM_FUNC_END(ftrace_regs_caller)
e18eead3
SR
284
285
286#else /* ! CONFIG_DYNAMIC_FTRACE */
287
0f42c1ad 288SYM_FUNC_START(__fentry__)
e18eead3
SR
289 cmpq $ftrace_stub, ftrace_trace_function
290 jnz trace
291
62a207d7 292fgraph_trace:
e18eead3
SR
293#ifdef CONFIG_FUNCTION_GRAPH_TRACER
294 cmpq $ftrace_stub, ftrace_graph_return
295 jnz ftrace_graph_caller
296
297 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
298 jnz ftrace_graph_caller
299#endif
300
26ba4e57 301SYM_INNER_LABEL(ftrace_stub, SYM_L_GLOBAL)
e18eead3
SR
302 retq
303
304trace:
f1ab00af
SRRH
305 /* save_mcount_regs fills in first two parameters */
306 save_mcount_regs
e18eead3 307
112677d6
NK
308 /*
309 * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not
310 * set (see include/asm/ftrace.h and include/linux/ftrace.h). Only the
311 * ip and parent ip are used and the list function is called when
312 * function tracing is enabled.
313 */
9351803b 314 movq ftrace_trace_function, %r8
34fdce69 315 CALL_NOSPEC r8
05df710e 316 restore_mcount_regs
e18eead3 317
62a207d7 318 jmp fgraph_trace
0f42c1ad
BP
319SYM_FUNC_END(__fentry__)
320EXPORT_SYMBOL(__fentry__)
e18eead3 321#endif /* CONFIG_DYNAMIC_FTRACE */
e18eead3
SR
322
323#ifdef CONFIG_FUNCTION_GRAPH_TRACER
6dcc5627 324SYM_FUNC_START(ftrace_graph_caller)
6a06bdbf 325 /* Saves rbp into %rdx and fills first parameter */
05df710e 326 save_mcount_regs
e18eead3 327
6a06bdbf 328 leaq MCOUNT_REG_SIZE+8(%rsp), %rsi
e18eead3 329 movq $0, %rdx /* No framepointers needed */
e18eead3
SR
330 call prepare_ftrace_return
331
05df710e 332 restore_mcount_regs
e18eead3
SR
333
334 retq
6dcc5627 335SYM_FUNC_END(ftrace_graph_caller)
e18eead3 336
bc7b11c0 337SYM_CODE_START(return_to_handler)
e2ac83d7 338 UNWIND_HINT_EMPTY
e18eead3
SR
339 subq $24, %rsp
340
341 /* Save the return values */
342 movq %rax, (%rsp)
343 movq %rdx, 8(%rsp)
344 movq %rbp, %rdi
345
346 call ftrace_return_to_handler
347
348 movq %rax, %rdi
349 movq 8(%rsp), %rdx
350 movq (%rsp), %rax
351 addq $24, %rsp
34fdce69 352 JMP_NOSPEC rdi
bc7b11c0 353SYM_CODE_END(return_to_handler)
e18eead3 354#endif