x86/ftrace: Remove ftrace_epilogue()
[linux-block.git] / arch / x86 / kernel / ftrace_64.S
CommitLineData
b2441318 1/* SPDX-License-Identifier: GPL-2.0 */
e18eead3 2/*
e18eead3
SR
3 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
4 */
5
6#include <linux/linkage.h>
7#include <asm/ptrace.h>
8#include <asm/ftrace.h>
784d5699 9#include <asm/export.h>
9351803b 10#include <asm/nospec-branch.h>
e2ac83d7 11#include <asm/unwind_hints.h>
ea1ed38d 12#include <asm/frame.h>
e18eead3
SR
13
14 .code64
b9f6976b 15 .section .text, "ax"
e18eead3 16
0687c36e 17#ifdef CONFIG_FRAME_POINTER
0687c36e
SRRH
18/* Save parent and function stack frames (rip and rbp) */
19# define MCOUNT_FRAME_SIZE (8+16*2)
0687c36e
SRRH
20#else
21/* No need to save a stack frame */
e2ac83d7 22# define MCOUNT_FRAME_SIZE 0
0687c36e
SRRH
23#endif /* CONFIG_FRAME_POINTER */
24
85f6f029 25/* Size of stack used to save mcount regs in save_mcount_regs */
dc2745b6 26#define MCOUNT_REG_SIZE (FRAME_SIZE + MCOUNT_FRAME_SIZE)
85f6f029 27
05df710e
SRRH
28/*
29 * gcc -pg option adds a call to 'mcount' in most functions.
30 * When -mfentry is used, the call is to 'fentry' and not 'mcount'
31 * and is done before the function's stack frame is set up.
32 * They both require a set of regs to be saved before calling
33 * any C code and restored before returning back to the function.
34 *
35 * On boot up, all these calls are converted into nops. When tracing
36 * is enabled, the call can jump to either ftrace_caller or
37 * ftrace_regs_caller. Callbacks (tracing functions) that require
38 * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
39 * it. For this reason, the size of the pt_regs structure will be
40 * allocated on the stack and the required mcount registers will
41 * be saved in the locations that pt_regs has them in.
42 */
43
f1ab00af
SRRH
44/*
45 * @added: the amount of stack added before calling this
46 *
47 * After this is called, the following registers contain:
48 *
49 * %rdi - holds the address that called the trampoline
50 * %rsi - holds the parent function (traced function's return address)
51 * %rdx - holds the original %rbp
52 */
527aa75b 53.macro save_mcount_regs added=0
0687c36e 54
e2ac83d7
JP
55#ifdef CONFIG_FRAME_POINTER
56 /* Save the original rbp */
0687c36e
SRRH
57 pushq %rbp
58
0687c36e
SRRH
59 /*
60 * Stack traces will stop at the ftrace trampoline if the frame pointer
61 * is not set up properly. If fentry is used, we need to save a frame
62 * pointer for the parent as well as the function traced, because the
63 * fentry is called before the stack frame is set up, where as mcount
64 * is called afterward.
65 */
562e14f7 66
0687c36e
SRRH
67 /* Save the parent pointer (skip orig rbp and our return address) */
68 pushq \added+8*2(%rsp)
69 pushq %rbp
70 movq %rsp, %rbp
71 /* Save the return address (now skip orig rbp, rbp and parent) */
72 pushq \added+8*3(%rsp)
0687c36e
SRRH
73 pushq %rbp
74 movq %rsp, %rbp
75#endif /* CONFIG_FRAME_POINTER */
76
77 /*
78 * We add enough stack to save all regs.
79 */
dc2745b6 80 subq $(FRAME_SIZE), %rsp
4bcdf152
SRRH
81 movq %rax, RAX(%rsp)
82 movq %rcx, RCX(%rsp)
83 movq %rdx, RDX(%rsp)
84 movq %rsi, RSI(%rsp)
85 movq %rdi, RDI(%rsp)
86 movq %r8, R8(%rsp)
87 movq %r9, R9(%rsp)
562955fe 88 movq $0, ORIG_RAX(%rsp)
0687c36e
SRRH
89 /*
90 * Save the original RBP. Even though the mcount ABI does not
91 * require this, it helps out callers.
92 */
e2ac83d7 93#ifdef CONFIG_FRAME_POINTER
0687c36e 94 movq MCOUNT_REG_SIZE-8(%rsp), %rdx
e2ac83d7
JP
95#else
96 movq %rbp, %rdx
97#endif
0687c36e
SRRH
98 movq %rdx, RBP(%rsp)
99
f1ab00af 100 /* Copy the parent address into %rsi (second parameter) */
f1ab00af 101 movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
f1ab00af 102
4bcdf152 103 /* Move RIP to its proper location */
85f6f029 104 movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
094dfc54 105 movq %rdi, RIP(%rsp)
f1ab00af
SRRH
106
107 /*
108 * Now %rdi (the first parameter) has the return address of
109 * where ftrace_call returns. But the callbacks expect the
6a06bdbf 110 * address of the call itself.
f1ab00af
SRRH
111 */
112 subq $MCOUNT_INSN_SIZE, %rdi
4bcdf152
SRRH
113 .endm
114
562955fe
SRV
115.macro restore_mcount_regs save=0
116
117 /* ftrace_regs_caller or frame pointers require this */
118 movq RBP(%rsp), %rbp
119
4bcdf152
SRRH
120 movq R9(%rsp), %r9
121 movq R8(%rsp), %r8
122 movq RDI(%rsp), %rdi
123 movq RSI(%rsp), %rsi
124 movq RDX(%rsp), %rdx
125 movq RCX(%rsp), %rcx
126 movq RAX(%rsp), %rax
0687c36e 127
562955fe 128 addq $MCOUNT_REG_SIZE-\save, %rsp
0687c36e 129
4bcdf152
SRRH
130 .endm
131
76c2f13c
SRRH
132#ifdef CONFIG_DYNAMIC_FTRACE
133
0f42c1ad 134SYM_FUNC_START(__fentry__)
f94909ce 135 RET
0f42c1ad
BP
136SYM_FUNC_END(__fentry__)
137EXPORT_SYMBOL(__fentry__)
76c2f13c 138
6dcc5627 139SYM_FUNC_START(ftrace_caller)
f1ab00af
SRRH
140 /* save_mcount_regs fills in first two parameters */
141 save_mcount_regs
142
02a474ca
SRV
143 /* Stack - skipping return address of ftrace_caller */
144 leaq MCOUNT_REG_SIZE+8(%rsp), %rcx
145 movq %rcx, RSP(%rsp)
146
26ba4e57 147SYM_INNER_LABEL(ftrace_caller_op_ptr, SYM_L_GLOBAL)
3215de84 148 ANNOTATE_NOENDBR
f1ab00af
SRRH
149 /* Load the ftrace_ops into the 3rd parameter */
150 movq function_trace_op(%rip), %rdx
151
02a474ca
SRV
152 /* regs go into 4th parameter */
153 leaq (%rsp), %rcx
154
155 /* Only ops with REGS flag set should have CS register set */
156 movq $0, CS(%rsp)
e18eead3 157
26ba4e57 158SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
3215de84 159 ANNOTATE_NOENDBR
e18eead3
SR
160 call ftrace_stub
161
2860cd8a
SRV
162 /* Handlers can change the RIP */
163 movq RIP(%rsp), %rax
164 movq %rax, MCOUNT_REG_SIZE(%rsp)
165
05df710e 166 restore_mcount_regs
f3bea491
SRRH
167
168 /*
f1b92bb6
BP
169 * The code up to this label is copied into trampolines so
170 * think twice before adding any new code or changing the
171 * layout here.
f3bea491 172 */
0298739b 173SYM_INNER_LABEL(ftrace_caller_end, SYM_L_GLOBAL)
3215de84 174 ANNOTATE_NOENDBR
b5f1fc31 175 RET
0298739b 176SYM_FUNC_END(ftrace_caller);
7b6c7a87 177STACK_FRAME_NON_STANDARD_FP(ftrace_caller)
0298739b 178
b5f1fc31 179SYM_FUNC_START(ftrace_stub)
18660698 180 UNWIND_HINT_FUNC
f94909ce 181 RET
b5f1fc31 182SYM_FUNC_END(ftrace_stub)
e18eead3 183
6dcc5627 184SYM_FUNC_START(ftrace_regs_caller)
527aa75b 185 /* Save the current flags before any operations that can change them */
e18eead3
SR
186 pushfq
187
527aa75b 188 /* added 8 bytes to save flags */
f1ab00af
SRRH
189 save_mcount_regs 8
190 /* save_mcount_regs fills in first two parameters */
191
26ba4e57 192SYM_INNER_LABEL(ftrace_regs_caller_op_ptr, SYM_L_GLOBAL)
3215de84 193 ANNOTATE_NOENDBR
f1ab00af
SRRH
194 /* Load the ftrace_ops into the 3rd parameter */
195 movq function_trace_op(%rip), %rdx
e18eead3
SR
196
197 /* Save the rest of pt_regs */
198 movq %r15, R15(%rsp)
199 movq %r14, R14(%rsp)
200 movq %r13, R13(%rsp)
201 movq %r12, R12(%rsp)
202 movq %r11, R11(%rsp)
203 movq %r10, R10(%rsp)
e18eead3
SR
204 movq %rbx, RBX(%rsp)
205 /* Copy saved flags */
85f6f029 206 movq MCOUNT_REG_SIZE(%rsp), %rcx
e18eead3
SR
207 movq %rcx, EFLAGS(%rsp)
208 /* Kernel segments */
209 movq $__KERNEL_DS, %rcx
210 movq %rcx, SS(%rsp)
211 movq $__KERNEL_CS, %rcx
212 movq %rcx, CS(%rsp)
527aa75b 213 /* Stack - skipping return address and flags */
85f6f029 214 leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
e18eead3
SR
215 movq %rcx, RSP(%rsp)
216
ea1ed38d
PZ
217 ENCODE_FRAME_POINTER
218
e18eead3
SR
219 /* regs go into 4th parameter */
220 leaq (%rsp), %rcx
221
26ba4e57 222SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL)
3215de84 223 ANNOTATE_NOENDBR
e18eead3
SR
224 call ftrace_stub
225
226 /* Copy flags back to SS, to restore them */
227 movq EFLAGS(%rsp), %rax
85f6f029 228 movq %rax, MCOUNT_REG_SIZE(%rsp)
e18eead3
SR
229
230 /* Handlers can change the RIP */
231 movq RIP(%rsp), %rax
85f6f029 232 movq %rax, MCOUNT_REG_SIZE+8(%rsp)
e18eead3
SR
233
234 /* restore the rest of pt_regs */
235 movq R15(%rsp), %r15
236 movq R14(%rsp), %r14
237 movq R13(%rsp), %r13
238 movq R12(%rsp), %r12
239 movq R10(%rsp), %r10
e18eead3
SR
240 movq RBX(%rsp), %rbx
241
562955fe
SRV
242 movq ORIG_RAX(%rsp), %rax
243 movq %rax, MCOUNT_REG_SIZE-8(%rsp)
244
0298739b
PZ
245 /*
246 * If ORIG_RAX is anything but zero, make this a call to that.
247 * See arch_ftrace_set_direct_caller().
248 */
9f2dfd61 249 testq %rax, %rax
fe58acef 250SYM_INNER_LABEL(ftrace_regs_caller_jmp, SYM_L_GLOBAL)
3215de84 251 ANNOTATE_NOENDBR
0b4f8ddc 252 jnz 1f
562955fe 253
0b4f8ddc 254 restore_mcount_regs
e18eead3
SR
255 /* Restore flags */
256 popfq
257
f3bea491 258 /*
b5f1fc31 259 * The trampoline will add the return.
f3bea491 260 */
5da7cd11 261SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
3215de84 262 ANNOTATE_NOENDBR
b5f1fc31 263 RET
fdc841b5 264
0b4f8ddc
SRV
265 /* Swap the flags with orig_rax */
2661: movq MCOUNT_REG_SIZE(%rsp), %rdi
267 movq %rdi, MCOUNT_REG_SIZE-8(%rsp)
268 movq %rax, MCOUNT_REG_SIZE(%rsp)
269
270 restore_mcount_regs 8
271 /* Restore flags */
272 popfq
b735bd3e 273 UNWIND_HINT_FUNC
b5f1fc31 274 RET
0b4f8ddc 275
6dcc5627 276SYM_FUNC_END(ftrace_regs_caller)
7b6c7a87 277STACK_FRAME_NON_STANDARD_FP(ftrace_regs_caller)
e18eead3
SR
278
279
280#else /* ! CONFIG_DYNAMIC_FTRACE */
281
0f42c1ad 282SYM_FUNC_START(__fentry__)
e18eead3
SR
283 cmpq $ftrace_stub, ftrace_trace_function
284 jnz trace
285
26ba4e57 286SYM_INNER_LABEL(ftrace_stub, SYM_L_GLOBAL)
e52fc2cf 287 ENDBR
f94909ce 288 RET
e18eead3
SR
289
290trace:
f1ab00af
SRRH
291 /* save_mcount_regs fills in first two parameters */
292 save_mcount_regs
e18eead3 293
112677d6
NK
294 /*
295 * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not
296 * set (see include/asm/ftrace.h and include/linux/ftrace.h). Only the
297 * ip and parent ip are used and the list function is called when
298 * function tracing is enabled.
299 */
9351803b 300 movq ftrace_trace_function, %r8
34fdce69 301 CALL_NOSPEC r8
05df710e 302 restore_mcount_regs
e18eead3 303
0c0593b4 304 jmp ftrace_stub
0f42c1ad
BP
305SYM_FUNC_END(__fentry__)
306EXPORT_SYMBOL(__fentry__)
7b6c7a87
JP
307STACK_FRAME_NON_STANDARD_FP(__fentry__)
308
e18eead3 309#endif /* CONFIG_DYNAMIC_FTRACE */
e18eead3
SR
310
311#ifdef CONFIG_FUNCTION_GRAPH_TRACER
7b6c7a87
JP
312SYM_CODE_START(return_to_handler)
313 UNWIND_HINT_EMPTY
314 ANNOTATE_NOENDBR
e52fc2cf 315 subq $16, %rsp
e18eead3
SR
316
317 /* Save the return values */
318 movq %rax, (%rsp)
319 movq %rdx, 8(%rsp)
320 movq %rbp, %rdi
321
322 call ftrace_return_to_handler
323
324 movq %rax, %rdi
325 movq 8(%rsp), %rdx
326 movq (%rsp), %rax
e52fc2cf
PZ
327
328 addq $16, %rsp
329 /*
330 * Jump back to the old return address. This cannot be JMP_NOSPEC rdi
331 * since IBT would demand that contain ENDBR, which simply isn't so for
332 * return addresses. Use a retpoline here to keep the RSB balanced.
333 */
334 ANNOTATE_INTRA_FUNCTION_CALL
335 call .Ldo_rop
336 int3
337.Ldo_rop:
338 mov %rdi, (%rsp)
e52fc2cf 339 RET
7b6c7a87 340SYM_CODE_END(return_to_handler)
e18eead3 341#endif