kprobes: Add IPMODIFY flag to kprobe_ftrace_ops
[linux-2.6-block.git] / arch / x86 / kernel / mcount_64.S
CommitLineData
e18eead3
SR
1/*
2 * linux/arch/x86_64/mcount_64.S
3 *
4 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
5 */
6
7#include <linux/linkage.h>
8#include <asm/ptrace.h>
9#include <asm/ftrace.h>
10
11
12 .code64
13 .section .entry.text, "ax"
14
15
16#ifdef CONFIG_FUNCTION_TRACER
17
18#ifdef CC_USING_FENTRY
19# define function_hook __fentry__
20#else
21# define function_hook mcount
22#endif
23
24#ifdef CONFIG_DYNAMIC_FTRACE
25
26ENTRY(function_hook)
27 retq
28END(function_hook)
29
30/* skip is set if stack has been adjusted */
f3bea491 31.macro ftrace_caller_setup trace_label skip=0
e18eead3
SR
32 MCOUNT_SAVE_FRAME \skip
33
f3bea491
SRRH
34 /* Save this location */
35GLOBAL(\trace_label)
e18eead3
SR
36 /* Load the ftrace_ops into the 3rd parameter */
37 movq function_trace_op(%rip), %rdx
38
39 /* Load ip into the first parameter */
40 movq RIP(%rsp), %rdi
41 subq $MCOUNT_INSN_SIZE, %rdi
42 /* Load the parent_ip into the second parameter */
43#ifdef CC_USING_FENTRY
44 movq SS+16(%rsp), %rsi
45#else
46 movq 8(%rbp), %rsi
47#endif
48.endm
49
9960efeb
SRRH
50#ifdef CONFIG_FRAME_POINTER
51/*
52 * Stack traces will stop at the ftrace trampoline if the frame pointer
53 * is not set up properly. If fentry is used, we need to save a frame
54 * pointer for the parent as well as the function traced, because the
55 * fentry is called before the stack frame is set up, where as mcount
56 * is called afterward.
57 */
58.macro create_frame parent rip
59#ifdef CC_USING_FENTRY
60 pushq \parent
61 pushq %rbp
62 movq %rsp, %rbp
63#endif
64 pushq \rip
65 pushq %rbp
66 movq %rsp, %rbp
67.endm
68
69.macro restore_frame
70#ifdef CC_USING_FENTRY
71 addq $16, %rsp
72#endif
73 popq %rbp
74 addq $8, %rsp
75.endm
76#else
77.macro create_frame parent rip
78.endm
79.macro restore_frame
80.endm
81#endif /* CONFIG_FRAME_POINTER */
82
e18eead3 83ENTRY(ftrace_caller)
f3bea491 84 ftrace_caller_setup ftrace_caller_op_ptr
e18eead3
SR
85 /* regs go into 4th parameter (but make it NULL) */
86 movq $0, %rcx
87
9960efeb
SRRH
88 create_frame %rsi, %rdi
89
e18eead3
SR
90GLOBAL(ftrace_call)
91 call ftrace_stub
92
9960efeb
SRRH
93 restore_frame
94
e18eead3 95 MCOUNT_RESTORE_FRAME
f3bea491
SRRH
96
97 /*
98 * The copied trampoline must call ftrace_return as it
99 * still may need to call the function graph tracer.
100 */
101GLOBAL(ftrace_caller_end)
102
103GLOBAL(ftrace_return)
e18eead3
SR
104
105#ifdef CONFIG_FUNCTION_GRAPH_TRACER
106GLOBAL(ftrace_graph_call)
107 jmp ftrace_stub
108#endif
109
110GLOBAL(ftrace_stub)
111 retq
112END(ftrace_caller)
113
114ENTRY(ftrace_regs_caller)
115 /* Save the current flags before compare (in SS location)*/
116 pushfq
117
e18eead3 118 /* skip=8 to skip flags saved in SS */
f3bea491 119 ftrace_caller_setup ftrace_regs_caller_op_ptr 8
e18eead3
SR
120
121 /* Save the rest of pt_regs */
122 movq %r15, R15(%rsp)
123 movq %r14, R14(%rsp)
124 movq %r13, R13(%rsp)
125 movq %r12, R12(%rsp)
126 movq %r11, R11(%rsp)
127 movq %r10, R10(%rsp)
128 movq %rbp, RBP(%rsp)
129 movq %rbx, RBX(%rsp)
130 /* Copy saved flags */
131 movq SS(%rsp), %rcx
132 movq %rcx, EFLAGS(%rsp)
133 /* Kernel segments */
134 movq $__KERNEL_DS, %rcx
135 movq %rcx, SS(%rsp)
136 movq $__KERNEL_CS, %rcx
137 movq %rcx, CS(%rsp)
138 /* Stack - skipping return address */
139 leaq SS+16(%rsp), %rcx
140 movq %rcx, RSP(%rsp)
141
142 /* regs go into 4th parameter */
143 leaq (%rsp), %rcx
144
9960efeb
SRRH
145 create_frame %rsi, %rdi
146
e18eead3
SR
147GLOBAL(ftrace_regs_call)
148 call ftrace_stub
149
9960efeb
SRRH
150 restore_frame
151
e18eead3
SR
152 /* Copy flags back to SS, to restore them */
153 movq EFLAGS(%rsp), %rax
154 movq %rax, SS(%rsp)
155
156 /* Handlers can change the RIP */
157 movq RIP(%rsp), %rax
158 movq %rax, SS+8(%rsp)
159
160 /* restore the rest of pt_regs */
161 movq R15(%rsp), %r15
162 movq R14(%rsp), %r14
163 movq R13(%rsp), %r13
164 movq R12(%rsp), %r12
165 movq R10(%rsp), %r10
166 movq RBP(%rsp), %rbp
167 movq RBX(%rsp), %rbx
168
169 /* skip=8 to skip flags saved in SS */
170 MCOUNT_RESTORE_FRAME 8
171
172 /* Restore flags */
173 popfq
174
f3bea491
SRRH
175 /*
176 * As this jmp to ftrace_return can be a short jump
177 * it must not be copied into the trampoline.
178 * The trampoline will add the code to jump
179 * to the return.
180 */
181GLOBAL(ftrace_regs_caller_end)
182
e18eead3 183 jmp ftrace_return
fdc841b5 184
e18eead3
SR
185 popfq
186 jmp ftrace_stub
187
188END(ftrace_regs_caller)
189
190
191#else /* ! CONFIG_DYNAMIC_FTRACE */
192
193ENTRY(function_hook)
e18eead3
SR
194 cmpq $ftrace_stub, ftrace_trace_function
195 jnz trace
196
197#ifdef CONFIG_FUNCTION_GRAPH_TRACER
198 cmpq $ftrace_stub, ftrace_graph_return
199 jnz ftrace_graph_caller
200
201 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
202 jnz ftrace_graph_caller
203#endif
204
205GLOBAL(ftrace_stub)
206 retq
207
208trace:
209 MCOUNT_SAVE_FRAME
210
211 movq RIP(%rsp), %rdi
212#ifdef CC_USING_FENTRY
213 movq SS+16(%rsp), %rsi
214#else
215 movq 8(%rbp), %rsi
216#endif
217 subq $MCOUNT_INSN_SIZE, %rdi
218
219 call *ftrace_trace_function
220
221 MCOUNT_RESTORE_FRAME
222
223 jmp ftrace_stub
224END(function_hook)
225#endif /* CONFIG_DYNAMIC_FTRACE */
226#endif /* CONFIG_FUNCTION_TRACER */
227
228#ifdef CONFIG_FUNCTION_GRAPH_TRACER
229ENTRY(ftrace_graph_caller)
230 MCOUNT_SAVE_FRAME
231
232#ifdef CC_USING_FENTRY
233 leaq SS+16(%rsp), %rdi
234 movq $0, %rdx /* No framepointers needed */
235#else
236 leaq 8(%rbp), %rdi
237 movq (%rbp), %rdx
238#endif
239 movq RIP(%rsp), %rsi
240 subq $MCOUNT_INSN_SIZE, %rsi
241
242 call prepare_ftrace_return
243
244 MCOUNT_RESTORE_FRAME
245
246 retq
247END(ftrace_graph_caller)
248
249GLOBAL(return_to_handler)
250 subq $24, %rsp
251
252 /* Save the return values */
253 movq %rax, (%rsp)
254 movq %rdx, 8(%rsp)
255 movq %rbp, %rdi
256
257 call ftrace_return_to_handler
258
259 movq %rax, %rdi
260 movq 8(%rsp), %rdx
261 movq (%rsp), %rax
262 addq $24, %rsp
263 jmp *%rdi
264#endif