Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
e18eead3 | 2 | /* |
e18eead3 SR |
3 | * Copyright (C) 2014 Steven Rostedt, Red Hat Inc |
4 | */ | |
5 | ||
6 | #include <linux/linkage.h> | |
7 | #include <asm/ptrace.h> | |
8 | #include <asm/ftrace.h> | |
784d5699 | 9 | #include <asm/export.h> |
9351803b | 10 | #include <asm/nospec-branch.h> |
e2ac83d7 | 11 | #include <asm/unwind_hints.h> |
ea1ed38d | 12 | #include <asm/frame.h> |
e18eead3 SR |
13 | |
14 | .code64 | |
15 | .section .entry.text, "ax" | |
16 | ||
e18eead3 | 17 | # define function_hook __fentry__ |
5de0a8c0 | 18 | EXPORT_SYMBOL(__fentry__) |
e18eead3 | 19 | |
0687c36e | 20 | #ifdef CONFIG_FRAME_POINTER |
0687c36e SRRH |
21 | /* Save parent and function stack frames (rip and rbp) */ |
22 | # define MCOUNT_FRAME_SIZE (8+16*2) | |
0687c36e SRRH |
23 | #else |
24 | /* No need to save a stack frame */ | |
e2ac83d7 | 25 | # define MCOUNT_FRAME_SIZE 0 |
0687c36e SRRH |
26 | #endif /* CONFIG_FRAME_POINTER */ |
27 | ||
85f6f029 | 28 | /* Size of stack used to save mcount regs in save_mcount_regs */ |
0687c36e | 29 | #define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE) |
85f6f029 | 30 | |
05df710e SRRH |
31 | /* |
32 | * gcc -pg option adds a call to 'mcount' in most functions. | |
33 | * When -mfentry is used, the call is to 'fentry' and not 'mcount' | |
34 | * and is done before the function's stack frame is set up. | |
35 | * They both require a set of regs to be saved before calling | |
36 | * any C code and restored before returning back to the function. | |
37 | * | |
38 | * On boot up, all these calls are converted into nops. When tracing | |
39 | * is enabled, the call can jump to either ftrace_caller or | |
40 | * ftrace_regs_caller. Callbacks (tracing functions) that require | |
41 | * ftrace_regs_caller (like kprobes) need to have pt_regs passed to | |
42 | * it. For this reason, the size of the pt_regs structure will be | |
43 | * allocated on the stack and the required mcount registers will | |
44 | * be saved in the locations that pt_regs has them in. | |
45 | */ | |
46 | ||
f1ab00af SRRH |
47 | /* |
48 | * @added: the amount of stack added before calling this | |
49 | * | |
50 | * After this is called, the following registers contain: | |
51 | * | |
52 | * %rdi - holds the address that called the trampoline | |
53 | * %rsi - holds the parent function (traced function's return address) | |
54 | * %rdx - holds the original %rbp | |
55 | */ | |
527aa75b | 56 | .macro save_mcount_regs added=0 |
0687c36e | 57 | |
e2ac83d7 JP |
58 | #ifdef CONFIG_FRAME_POINTER |
59 | /* Save the original rbp */ | |
0687c36e SRRH |
60 | pushq %rbp |
61 | ||
0687c36e SRRH |
62 | /* |
63 | * Stack traces will stop at the ftrace trampoline if the frame pointer | |
64 | * is not set up properly. If fentry is used, we need to save a frame | |
65 | * pointer for the parent as well as the function traced, because the | |
66 | * fentry is called before the stack frame is set up, where as mcount | |
67 | * is called afterward. | |
68 | */ | |
562e14f7 | 69 | |
0687c36e SRRH |
70 | /* Save the parent pointer (skip orig rbp and our return address) */ |
71 | pushq \added+8*2(%rsp) | |
72 | pushq %rbp | |
73 | movq %rsp, %rbp | |
74 | /* Save the return address (now skip orig rbp, rbp and parent) */ | |
75 | pushq \added+8*3(%rsp) | |
0687c36e SRRH |
76 | pushq %rbp |
77 | movq %rsp, %rbp | |
78 | #endif /* CONFIG_FRAME_POINTER */ | |
79 | ||
80 | /* | |
81 | * We add enough stack to save all regs. | |
82 | */ | |
83 | subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp | |
4bcdf152 SRRH |
84 | movq %rax, RAX(%rsp) |
85 | movq %rcx, RCX(%rsp) | |
86 | movq %rdx, RDX(%rsp) | |
87 | movq %rsi, RSI(%rsp) | |
88 | movq %rdi, RDI(%rsp) | |
89 | movq %r8, R8(%rsp) | |
90 | movq %r9, R9(%rsp) | |
0687c36e SRRH |
91 | /* |
92 | * Save the original RBP. Even though the mcount ABI does not | |
93 | * require this, it helps out callers. | |
94 | */ | |
e2ac83d7 | 95 | #ifdef CONFIG_FRAME_POINTER |
0687c36e | 96 | movq MCOUNT_REG_SIZE-8(%rsp), %rdx |
e2ac83d7 JP |
97 | #else |
98 | movq %rbp, %rdx | |
99 | #endif | |
0687c36e SRRH |
100 | movq %rdx, RBP(%rsp) |
101 | ||
f1ab00af | 102 | /* Copy the parent address into %rsi (second parameter) */ |
f1ab00af | 103 | movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi |
f1ab00af | 104 | |
4bcdf152 | 105 | /* Move RIP to its proper location */ |
85f6f029 | 106 | movq MCOUNT_REG_SIZE+\added(%rsp), %rdi |
094dfc54 | 107 | movq %rdi, RIP(%rsp) |
f1ab00af SRRH |
108 | |
109 | /* | |
110 | * Now %rdi (the first parameter) has the return address of | |
111 | * where ftrace_call returns. But the callbacks expect the | |
6a06bdbf | 112 | * address of the call itself. |
f1ab00af SRRH |
113 | */ |
114 | subq $MCOUNT_INSN_SIZE, %rdi | |
4bcdf152 SRRH |
115 | .endm |
116 | ||
527aa75b | 117 | .macro restore_mcount_regs |
4bcdf152 SRRH |
118 | movq R9(%rsp), %r9 |
119 | movq R8(%rsp), %r8 | |
120 | movq RDI(%rsp), %rdi | |
121 | movq RSI(%rsp), %rsi | |
122 | movq RDX(%rsp), %rdx | |
123 | movq RCX(%rsp), %rcx | |
124 | movq RAX(%rsp), %rax | |
0687c36e SRRH |
125 | |
126 | /* ftrace_regs_caller can modify %rbp */ | |
127 | movq RBP(%rsp), %rbp | |
128 | ||
85f6f029 | 129 | addq $MCOUNT_REG_SIZE, %rsp |
0687c36e | 130 | |
4bcdf152 SRRH |
131 | .endm |
132 | ||
76c2f13c SRRH |
133 | #ifdef CONFIG_DYNAMIC_FTRACE |
134 | ||
135 | ENTRY(function_hook) | |
136 | retq | |
e2ac83d7 | 137 | ENDPROC(function_hook) |
76c2f13c | 138 | |
e18eead3 | 139 | ENTRY(ftrace_caller) |
f1ab00af SRRH |
140 | /* save_mcount_regs fills in first two parameters */ |
141 | save_mcount_regs | |
142 | ||
26ba4e57 | 143 | SYM_INNER_LABEL(ftrace_caller_op_ptr, SYM_L_GLOBAL) |
f1ab00af SRRH |
144 | /* Load the ftrace_ops into the 3rd parameter */ |
145 | movq function_trace_op(%rip), %rdx | |
146 | ||
e18eead3 SR |
147 | /* regs go into 4th parameter (but make it NULL) */ |
148 | movq $0, %rcx | |
149 | ||
26ba4e57 | 150 | SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL) |
e18eead3 SR |
151 | call ftrace_stub |
152 | ||
05df710e | 153 | restore_mcount_regs |
f3bea491 SRRH |
154 | |
155 | /* | |
f1b92bb6 BP |
156 | * The code up to this label is copied into trampolines so |
157 | * think twice before adding any new code or changing the | |
158 | * layout here. | |
f3bea491 | 159 | */ |
26ba4e57 | 160 | SYM_INNER_LABEL(ftrace_epilogue, SYM_L_GLOBAL) |
e18eead3 SR |
161 | |
162 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER | |
26ba4e57 | 163 | SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL) |
e18eead3 SR |
164 | jmp ftrace_stub |
165 | #endif | |
166 | ||
d2a68c4e SRV |
167 | /* |
168 | * This is weak to keep gas from relaxing the jumps. | |
169 | * It is also used to copy the retq for trampolines. | |
170 | */ | |
8329e818 | 171 | WEAK(ftrace_stub) |
e18eead3 | 172 | retq |
e2ac83d7 | 173 | ENDPROC(ftrace_caller) |
e18eead3 SR |
174 | |
175 | ENTRY(ftrace_regs_caller) | |
527aa75b | 176 | /* Save the current flags before any operations that can change them */ |
e18eead3 SR |
177 | pushfq |
178 | ||
527aa75b | 179 | /* added 8 bytes to save flags */ |
f1ab00af SRRH |
180 | save_mcount_regs 8 |
181 | /* save_mcount_regs fills in first two parameters */ | |
182 | ||
26ba4e57 | 183 | SYM_INNER_LABEL(ftrace_regs_caller_op_ptr, SYM_L_GLOBAL) |
f1ab00af SRRH |
184 | /* Load the ftrace_ops into the 3rd parameter */ |
185 | movq function_trace_op(%rip), %rdx | |
e18eead3 SR |
186 | |
187 | /* Save the rest of pt_regs */ | |
188 | movq %r15, R15(%rsp) | |
189 | movq %r14, R14(%rsp) | |
190 | movq %r13, R13(%rsp) | |
191 | movq %r12, R12(%rsp) | |
192 | movq %r11, R11(%rsp) | |
193 | movq %r10, R10(%rsp) | |
e18eead3 SR |
194 | movq %rbx, RBX(%rsp) |
195 | /* Copy saved flags */ | |
85f6f029 | 196 | movq MCOUNT_REG_SIZE(%rsp), %rcx |
e18eead3 SR |
197 | movq %rcx, EFLAGS(%rsp) |
198 | /* Kernel segments */ | |
199 | movq $__KERNEL_DS, %rcx | |
200 | movq %rcx, SS(%rsp) | |
201 | movq $__KERNEL_CS, %rcx | |
202 | movq %rcx, CS(%rsp) | |
527aa75b | 203 | /* Stack - skipping return address and flags */ |
85f6f029 | 204 | leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx |
e18eead3 SR |
205 | movq %rcx, RSP(%rsp) |
206 | ||
ea1ed38d PZ |
207 | ENCODE_FRAME_POINTER |
208 | ||
e18eead3 SR |
209 | /* regs go into 4th parameter */ |
210 | leaq (%rsp), %rcx | |
211 | ||
26ba4e57 | 212 | SYM_INNER_LABEL(ftrace_regs_call, SYM_L_GLOBAL) |
e18eead3 SR |
213 | call ftrace_stub |
214 | ||
215 | /* Copy flags back to SS, to restore them */ | |
216 | movq EFLAGS(%rsp), %rax | |
85f6f029 | 217 | movq %rax, MCOUNT_REG_SIZE(%rsp) |
e18eead3 SR |
218 | |
219 | /* Handlers can change the RIP */ | |
220 | movq RIP(%rsp), %rax | |
85f6f029 | 221 | movq %rax, MCOUNT_REG_SIZE+8(%rsp) |
e18eead3 SR |
222 | |
223 | /* restore the rest of pt_regs */ | |
224 | movq R15(%rsp), %r15 | |
225 | movq R14(%rsp), %r14 | |
226 | movq R13(%rsp), %r13 | |
227 | movq R12(%rsp), %r12 | |
228 | movq R10(%rsp), %r10 | |
e18eead3 SR |
229 | movq RBX(%rsp), %rbx |
230 | ||
527aa75b | 231 | restore_mcount_regs |
e18eead3 SR |
232 | |
233 | /* Restore flags */ | |
234 | popfq | |
235 | ||
f3bea491 | 236 | /* |
f1b92bb6 | 237 | * As this jmp to ftrace_epilogue can be a short jump |
f3bea491 SRRH |
238 | * it must not be copied into the trampoline. |
239 | * The trampoline will add the code to jump | |
240 | * to the return. | |
241 | */ | |
26ba4e57 | 242 | SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL) |
f3bea491 | 243 | |
f1b92bb6 | 244 | jmp ftrace_epilogue |
fdc841b5 | 245 | |
e2ac83d7 | 246 | ENDPROC(ftrace_regs_caller) |
e18eead3 SR |
247 | |
248 | ||
249 | #else /* ! CONFIG_DYNAMIC_FTRACE */ | |
250 | ||
251 | ENTRY(function_hook) | |
e18eead3 SR |
252 | cmpq $ftrace_stub, ftrace_trace_function |
253 | jnz trace | |
254 | ||
62a207d7 | 255 | fgraph_trace: |
e18eead3 SR |
256 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER |
257 | cmpq $ftrace_stub, ftrace_graph_return | |
258 | jnz ftrace_graph_caller | |
259 | ||
260 | cmpq $ftrace_graph_entry_stub, ftrace_graph_entry | |
261 | jnz ftrace_graph_caller | |
262 | #endif | |
263 | ||
26ba4e57 | 264 | SYM_INNER_LABEL(ftrace_stub, SYM_L_GLOBAL) |
e18eead3 SR |
265 | retq |
266 | ||
267 | trace: | |
f1ab00af SRRH |
268 | /* save_mcount_regs fills in first two parameters */ |
269 | save_mcount_regs | |
e18eead3 | 270 | |
112677d6 NK |
271 | /* |
272 | * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not | |
273 | * set (see include/asm/ftrace.h and include/linux/ftrace.h). Only the | |
274 | * ip and parent ip are used and the list function is called when | |
275 | * function tracing is enabled. | |
276 | */ | |
9351803b DW |
277 | movq ftrace_trace_function, %r8 |
278 | CALL_NOSPEC %r8 | |
05df710e | 279 | restore_mcount_regs |
e18eead3 | 280 | |
62a207d7 | 281 | jmp fgraph_trace |
dd085168 | 282 | ENDPROC(function_hook) |
e18eead3 | 283 | #endif /* CONFIG_DYNAMIC_FTRACE */ |
e18eead3 SR |
284 | |
285 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER | |
286 | ENTRY(ftrace_graph_caller) | |
6a06bdbf | 287 | /* Saves rbp into %rdx and fills first parameter */ |
05df710e | 288 | save_mcount_regs |
e18eead3 | 289 | |
6a06bdbf | 290 | leaq MCOUNT_REG_SIZE+8(%rsp), %rsi |
e18eead3 | 291 | movq $0, %rdx /* No framepointers needed */ |
e18eead3 SR |
292 | call prepare_ftrace_return |
293 | ||
05df710e | 294 | restore_mcount_regs |
e18eead3 SR |
295 | |
296 | retq | |
e2ac83d7 | 297 | ENDPROC(ftrace_graph_caller) |
e18eead3 | 298 | |
e2ac83d7 JP |
299 | ENTRY(return_to_handler) |
300 | UNWIND_HINT_EMPTY | |
e18eead3 SR |
301 | subq $24, %rsp |
302 | ||
303 | /* Save the return values */ | |
304 | movq %rax, (%rsp) | |
305 | movq %rdx, 8(%rsp) | |
306 | movq %rbp, %rdi | |
307 | ||
308 | call ftrace_return_to_handler | |
309 | ||
310 | movq %rax, %rdi | |
311 | movq 8(%rsp), %rdx | |
312 | movq (%rsp), %rax | |
313 | addq $24, %rsp | |
9351803b | 314 | JMP_NOSPEC %rdi |
e2ac83d7 | 315 | END(return_to_handler) |
e18eead3 | 316 | #endif |