Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
1965aae3 PA |
2 | #ifndef _ASM_X86_PARAVIRT_H |
3 | #define _ASM_X86_PARAVIRT_H | |
d3561b7f RR |
4 | /* Various instructions on x86 need to be replaced for |
5 | * para-virtualization: those hooks are defined here. */ | |
b239fb25 | 6 | |
239f2e24 TG |
7 | #include <asm/paravirt_types.h> |
8 | ||
b239fb25 | 9 | #ifdef CONFIG_PARAVIRT |
54321d94 | 10 | #include <asm/pgtable_types.h> |
658be9d3 | 11 | #include <asm/asm.h> |
3010a066 | 12 | #include <asm/nospec-branch.h> |
d3561b7f | 13 | |
d3561b7f | 14 | #ifndef __ASSEMBLY__ |
187f1882 | 15 | #include <linux/bug.h> |
3dc494e8 | 16 | #include <linux/types.h> |
d4c10477 | 17 | #include <linux/cpumask.h> |
a0e2bf7c | 18 | #include <linux/static_call_types.h> |
87b240cb | 19 | #include <asm/frame.h> |
1a45b7aa | 20 | |
a0e2bf7c JG |
21 | u64 dummy_steal_clock(int cpu); |
22 | u64 dummy_sched_clock(void); | |
23 | ||
24 | DECLARE_STATIC_CALL(pv_steal_clock, dummy_steal_clock); | |
25 | DECLARE_STATIC_CALL(pv_sched_clock, dummy_sched_clock); | |
26 | ||
27 | void paravirt_set_sched_clock(u64 (*func)(void)); | |
28 | ||
29 | static inline u64 paravirt_sched_clock(void) | |
fdc0269e | 30 | { |
a0e2bf7c | 31 | return static_call(pv_sched_clock)(); |
fdc0269e JG |
32 | } |
33 | ||
34 | struct static_key; | |
35 | extern struct static_key paravirt_steal_enabled; | |
36 | extern struct static_key paravirt_steal_rq_enabled; | |
37 | ||
89f579ce YW |
38 | __visible void __native_queued_spin_unlock(struct qspinlock *lock); |
39 | bool pv_is_native_spin_unlock(void); | |
40 | __visible bool __native_vcpu_is_preempted(long cpu); | |
41 | bool pv_is_native_vcpu_is_preempted(void); | |
42 | ||
fdc0269e JG |
43 | static inline u64 paravirt_steal_clock(int cpu) |
44 | { | |
a0e2bf7c | 45 | return static_call(pv_steal_clock)(cpu); |
fdc0269e JG |
46 | } |
47 | ||
4e629211 JG |
48 | #ifdef CONFIG_PARAVIRT_SPINLOCKS |
49 | void __init paravirt_set_cap(void); | |
50 | #endif | |
51 | ||
fdc0269e JG |
52 | /* The paravirtualized I/O functions */ |
53 | static inline void slow_down_io(void) | |
54 | { | |
eac46b32 | 55 | PVOP_VCALL0(cpu.io_delay); |
fdc0269e | 56 | #ifdef REALLY_SLOW_IO |
eac46b32 PZ |
57 | PVOP_VCALL0(cpu.io_delay); |
58 | PVOP_VCALL0(cpu.io_delay); | |
59 | PVOP_VCALL0(cpu.io_delay); | |
fdc0269e JG |
60 | #endif |
61 | } | |
62 | ||
2faf153b | 63 | void native_flush_tlb_local(void); |
cd30d26c | 64 | void native_flush_tlb_global(void); |
127ac915 | 65 | void native_flush_tlb_one_user(unsigned long addr); |
4ce94eab | 66 | void native_flush_tlb_multi(const struct cpumask *cpumask, |
29def599 | 67 | const struct flush_tlb_info *info); |
2faf153b TG |
68 | |
69 | static inline void __flush_tlb_local(void) | |
fdc0269e JG |
70 | { |
71 | PVOP_VCALL0(mmu.flush_tlb_user); | |
72 | } | |
73 | ||
74 | static inline void __flush_tlb_global(void) | |
75 | { | |
76 | PVOP_VCALL0(mmu.flush_tlb_kernel); | |
77 | } | |
78 | ||
79 | static inline void __flush_tlb_one_user(unsigned long addr) | |
80 | { | |
81 | PVOP_VCALL1(mmu.flush_tlb_one_user, addr); | |
82 | } | |
83 | ||
4ce94eab | 84 | static inline void __flush_tlb_multi(const struct cpumask *cpumask, |
29def599 | 85 | const struct flush_tlb_info *info) |
fdc0269e | 86 | { |
4ce94eab | 87 | PVOP_VCALL2(mmu.flush_tlb_multi, cpumask, info); |
fdc0269e JG |
88 | } |
89 | ||
90 | static inline void paravirt_tlb_remove_table(struct mmu_gather *tlb, void *table) | |
91 | { | |
92 | PVOP_VCALL2(mmu.tlb_remove_table, tlb, table); | |
93 | } | |
94 | ||
95 | static inline void paravirt_arch_exit_mmap(struct mm_struct *mm) | |
96 | { | |
97 | PVOP_VCALL1(mmu.exit_mmap, mm); | |
98 | } | |
99 | ||
064ce6c5 BS |
100 | static inline void notify_page_enc_status_changed(unsigned long pfn, |
101 | int npages, bool enc) | |
102 | { | |
103 | PVOP_VCALL3(mmu.notify_page_enc_status_changed, pfn, npages, enc); | |
fdc0269e JG |
104 | } |
105 | ||
9bad5658 | 106 | #ifdef CONFIG_PARAVIRT_XXL |
da51da18 | 107 | static inline void load_sp0(unsigned long sp0) |
d3561b7f | 108 | { |
5c83511b | 109 | PVOP_VCALL1(cpu.load_sp0, sp0); |
d3561b7f RR |
110 | } |
111 | ||
d3561b7f RR |
112 | /* The paravirtualized CPUID instruction. */ |
113 | static inline void __cpuid(unsigned int *eax, unsigned int *ebx, | |
114 | unsigned int *ecx, unsigned int *edx) | |
115 | { | |
5c83511b | 116 | PVOP_VCALL4(cpu.cpuid, eax, ebx, ecx, edx); |
d3561b7f RR |
117 | } |
118 | ||
119 | /* | |
120 | * These special macros can be used to get or set a debugging register | |
121 | */ | |
f4afb713 | 122 | static __always_inline unsigned long paravirt_get_debugreg(int reg) |
f8822f42 | 123 | { |
5c83511b | 124 | return PVOP_CALL1(unsigned long, cpu.get_debugreg, reg); |
f8822f42 JF |
125 | } |
126 | #define get_debugreg(var, reg) var = paravirt_get_debugreg(reg) | |
7361fac0 | 127 | static __always_inline void set_debugreg(unsigned long val, int reg) |
f8822f42 | 128 | { |
5c83511b | 129 | PVOP_VCALL2(cpu.set_debugreg, reg, val); |
f8822f42 | 130 | } |
d3561b7f | 131 | |
f8822f42 JF |
132 | static inline unsigned long read_cr0(void) |
133 | { | |
5c83511b | 134 | return PVOP_CALL0(unsigned long, cpu.read_cr0); |
f8822f42 | 135 | } |
d3561b7f | 136 | |
f8822f42 JF |
137 | static inline void write_cr0(unsigned long x) |
138 | { | |
5c83511b | 139 | PVOP_VCALL1(cpu.write_cr0, x); |
f8822f42 JF |
140 | } |
141 | ||
0a53c9ac | 142 | static __always_inline unsigned long read_cr2(void) |
f8822f42 | 143 | { |
fafe5e74 JG |
144 | return PVOP_ALT_CALLEE0(unsigned long, mmu.read_cr2, |
145 | "mov %%cr2, %%rax;", | |
146 | ALT_NOT(X86_FEATURE_XENPV)); | |
f8822f42 JF |
147 | } |
148 | ||
209cfd0c | 149 | static __always_inline void write_cr2(unsigned long x) |
f8822f42 | 150 | { |
5c83511b | 151 | PVOP_VCALL1(mmu.write_cr2, x); |
f8822f42 JF |
152 | } |
153 | ||
6c690ee1 | 154 | static inline unsigned long __read_cr3(void) |
f8822f42 | 155 | { |
fafe5e74 JG |
156 | return PVOP_ALT_CALL0(unsigned long, mmu.read_cr3, |
157 | "mov %%cr3, %%rax;", ALT_NOT(X86_FEATURE_XENPV)); | |
f8822f42 | 158 | } |
d3561b7f | 159 | |
f8822f42 JF |
160 | static inline void write_cr3(unsigned long x) |
161 | { | |
fafe5e74 JG |
162 | PVOP_ALT_VCALL1(mmu.write_cr3, x, |
163 | "mov %%rdi, %%cr3", ALT_NOT(X86_FEATURE_XENPV)); | |
f8822f42 | 164 | } |
d3561b7f | 165 | |
1e02ce4c | 166 | static inline void __write_cr4(unsigned long x) |
f8822f42 | 167 | { |
5c83511b | 168 | PVOP_VCALL1(cpu.write_cr4, x); |
f8822f42 | 169 | } |
3dc494e8 | 170 | |
df9ee292 | 171 | static inline void arch_safe_halt(void) |
d3561b7f | 172 | { |
5c83511b | 173 | PVOP_VCALL0(irq.safe_halt); |
d3561b7f RR |
174 | } |
175 | ||
176 | static inline void halt(void) | |
177 | { | |
5c83511b | 178 | PVOP_VCALL0(irq.halt); |
f8822f42 JF |
179 | } |
180 | ||
181 | static inline void wbinvd(void) | |
182 | { | |
fafe5e74 | 183 | PVOP_ALT_VCALL0(cpu.wbinvd, "wbinvd", ALT_NOT(X86_FEATURE_XENPV)); |
d3561b7f | 184 | } |
d3561b7f | 185 | |
dd2f4a00 AL |
186 | static inline u64 paravirt_read_msr(unsigned msr) |
187 | { | |
5c83511b | 188 | return PVOP_CALL1(u64, cpu.read_msr, msr); |
dd2f4a00 AL |
189 | } |
190 | ||
191 | static inline void paravirt_write_msr(unsigned msr, | |
192 | unsigned low, unsigned high) | |
193 | { | |
5c83511b | 194 | PVOP_VCALL3(cpu.write_msr, msr, low, high); |
dd2f4a00 AL |
195 | } |
196 | ||
c2ee03b2 | 197 | static inline u64 paravirt_read_msr_safe(unsigned msr, int *err) |
f8822f42 | 198 | { |
5c83511b | 199 | return PVOP_CALL2(u64, cpu.read_msr_safe, msr, err); |
f8822f42 | 200 | } |
132ec92f | 201 | |
c2ee03b2 AL |
202 | static inline int paravirt_write_msr_safe(unsigned msr, |
203 | unsigned low, unsigned high) | |
f8822f42 | 204 | { |
5c83511b | 205 | return PVOP_CALL3(int, cpu.write_msr_safe, msr, low, high); |
f8822f42 JF |
206 | } |
207 | ||
49cd740b JP |
208 | #define rdmsr(msr, val1, val2) \ |
209 | do { \ | |
4985ce15 | 210 | u64 _l = paravirt_read_msr(msr); \ |
f8822f42 JF |
211 | val1 = (u32)_l; \ |
212 | val2 = _l >> 32; \ | |
49cd740b | 213 | } while (0) |
d3561b7f | 214 | |
49cd740b JP |
215 | #define wrmsr(msr, val1, val2) \ |
216 | do { \ | |
4985ce15 | 217 | paravirt_write_msr(msr, val1, val2); \ |
49cd740b | 218 | } while (0) |
d3561b7f | 219 | |
49cd740b JP |
220 | #define rdmsrl(msr, val) \ |
221 | do { \ | |
4985ce15 | 222 | val = paravirt_read_msr(msr); \ |
49cd740b | 223 | } while (0) |
d3561b7f | 224 | |
47edb651 AL |
225 | static inline void wrmsrl(unsigned msr, u64 val) |
226 | { | |
227 | wrmsr(msr, (u32)val, (u32)(val>>32)); | |
228 | } | |
229 | ||
c2ee03b2 | 230 | #define wrmsr_safe(msr, a, b) paravirt_write_msr_safe(msr, a, b) |
d3561b7f RR |
231 | |
232 | /* rdmsr with exception handling */ | |
c2ee03b2 AL |
233 | #define rdmsr_safe(msr, a, b) \ |
234 | ({ \ | |
235 | int _err; \ | |
236 | u64 _l = paravirt_read_msr_safe(msr, &_err); \ | |
237 | (*a) = (u32)_l; \ | |
238 | (*b) = _l >> 32; \ | |
239 | _err; \ | |
49cd740b | 240 | }) |
d3561b7f | 241 | |
1de87bd4 AK |
242 | static inline int rdmsrl_safe(unsigned msr, unsigned long long *p) |
243 | { | |
244 | int err; | |
245 | ||
c2ee03b2 | 246 | *p = paravirt_read_msr_safe(msr, &err); |
1de87bd4 AK |
247 | return err; |
248 | } | |
177fed1e | 249 | |
f8822f42 JF |
250 | static inline unsigned long long paravirt_read_pmc(int counter) |
251 | { | |
5c83511b | 252 | return PVOP_CALL1(u64, cpu.read_pmc, counter); |
f8822f42 | 253 | } |
d3561b7f | 254 | |
49cd740b JP |
255 | #define rdpmc(counter, low, high) \ |
256 | do { \ | |
f8822f42 JF |
257 | u64 _l = paravirt_read_pmc(counter); \ |
258 | low = (u32)_l; \ | |
259 | high = _l >> 32; \ | |
49cd740b | 260 | } while (0) |
3dc494e8 | 261 | |
1ff4d58a AK |
262 | #define rdpmcl(counter, val) ((val) = paravirt_read_pmc(counter)) |
263 | ||
38ffbe66 JF |
264 | static inline void paravirt_alloc_ldt(struct desc_struct *ldt, unsigned entries) |
265 | { | |
5c83511b | 266 | PVOP_VCALL2(cpu.alloc_ldt, ldt, entries); |
38ffbe66 JF |
267 | } |
268 | ||
269 | static inline void paravirt_free_ldt(struct desc_struct *ldt, unsigned entries) | |
270 | { | |
5c83511b | 271 | PVOP_VCALL2(cpu.free_ldt, ldt, entries); |
38ffbe66 JF |
272 | } |
273 | ||
f8822f42 JF |
274 | static inline void load_TR_desc(void) |
275 | { | |
5c83511b | 276 | PVOP_VCALL0(cpu.load_tr_desc); |
f8822f42 | 277 | } |
6b68f01b | 278 | static inline void load_gdt(const struct desc_ptr *dtr) |
f8822f42 | 279 | { |
5c83511b | 280 | PVOP_VCALL1(cpu.load_gdt, dtr); |
f8822f42 | 281 | } |
6b68f01b | 282 | static inline void load_idt(const struct desc_ptr *dtr) |
f8822f42 | 283 | { |
5c83511b | 284 | PVOP_VCALL1(cpu.load_idt, dtr); |
f8822f42 JF |
285 | } |
286 | static inline void set_ldt(const void *addr, unsigned entries) | |
287 | { | |
5c83511b | 288 | PVOP_VCALL2(cpu.set_ldt, addr, entries); |
f8822f42 | 289 | } |
f8822f42 JF |
290 | static inline unsigned long paravirt_store_tr(void) |
291 | { | |
5c83511b | 292 | return PVOP_CALL0(unsigned long, cpu.store_tr); |
f8822f42 | 293 | } |
9bad5658 | 294 | |
f8822f42 JF |
295 | #define store_tr(tr) ((tr) = paravirt_store_tr()) |
296 | static inline void load_TLS(struct thread_struct *t, unsigned cpu) | |
297 | { | |
5c83511b | 298 | PVOP_VCALL2(cpu.load_tls, t, cpu); |
f8822f42 | 299 | } |
75b8bb3e | 300 | |
9f9d489a JF |
301 | static inline void load_gs_index(unsigned int gs) |
302 | { | |
5c83511b | 303 | PVOP_VCALL1(cpu.load_gs_index, gs); |
9f9d489a | 304 | } |
9f9d489a | 305 | |
75b8bb3e GOC |
306 | static inline void write_ldt_entry(struct desc_struct *dt, int entry, |
307 | const void *desc) | |
f8822f42 | 308 | { |
5c83511b | 309 | PVOP_VCALL3(cpu.write_ldt_entry, dt, entry, desc); |
f8822f42 | 310 | } |
014b15be GOC |
311 | |
312 | static inline void write_gdt_entry(struct desc_struct *dt, int entry, | |
313 | void *desc, int type) | |
f8822f42 | 314 | { |
5c83511b | 315 | PVOP_VCALL4(cpu.write_gdt_entry, dt, entry, desc, type); |
f8822f42 | 316 | } |
014b15be | 317 | |
8d947344 | 318 | static inline void write_idt_entry(gate_desc *dt, int entry, const gate_desc *g) |
f8822f42 | 319 | { |
5c83511b | 320 | PVOP_VCALL3(cpu.write_idt_entry, dt, entry, g); |
f8822f42 | 321 | } |
d3561b7f | 322 | |
99bcd4a6 | 323 | #ifdef CONFIG_X86_IOPL_IOPERM |
cadfad87 AL |
324 | static inline void tss_invalidate_io_bitmap(void) |
325 | { | |
326 | PVOP_VCALL0(cpu.invalidate_io_bitmap); | |
327 | } | |
328 | ||
99bcd4a6 JG |
329 | static inline void tss_update_io_bitmap(void) |
330 | { | |
331 | PVOP_VCALL0(cpu.update_io_bitmap); | |
332 | } | |
333 | #endif | |
334 | ||
d6dd61c8 JF |
335 | static inline void paravirt_activate_mm(struct mm_struct *prev, |
336 | struct mm_struct *next) | |
337 | { | |
5c83511b | 338 | PVOP_VCALL2(mmu.activate_mm, prev, next); |
d6dd61c8 JF |
339 | } |
340 | ||
a1ea1c03 DH |
341 | static inline void paravirt_arch_dup_mmap(struct mm_struct *oldmm, |
342 | struct mm_struct *mm) | |
d6dd61c8 | 343 | { |
5c83511b | 344 | PVOP_VCALL2(mmu.dup_mmap, oldmm, mm); |
d6dd61c8 JF |
345 | } |
346 | ||
eba0045f JF |
347 | static inline int paravirt_pgd_alloc(struct mm_struct *mm) |
348 | { | |
5c83511b | 349 | return PVOP_CALL1(int, mmu.pgd_alloc, mm); |
eba0045f JF |
350 | } |
351 | ||
352 | static inline void paravirt_pgd_free(struct mm_struct *mm, pgd_t *pgd) | |
353 | { | |
5c83511b | 354 | PVOP_VCALL2(mmu.pgd_free, mm, pgd); |
eba0045f JF |
355 | } |
356 | ||
f8639939 | 357 | static inline void paravirt_alloc_pte(struct mm_struct *mm, unsigned long pfn) |
f8822f42 | 358 | { |
5c83511b | 359 | PVOP_VCALL2(mmu.alloc_pte, mm, pfn); |
f8822f42 | 360 | } |
f8639939 | 361 | static inline void paravirt_release_pte(unsigned long pfn) |
f8822f42 | 362 | { |
5c83511b | 363 | PVOP_VCALL1(mmu.release_pte, pfn); |
f8822f42 | 364 | } |
c119ecce | 365 | |
f8639939 | 366 | static inline void paravirt_alloc_pmd(struct mm_struct *mm, unsigned long pfn) |
f8822f42 | 367 | { |
5c83511b | 368 | PVOP_VCALL2(mmu.alloc_pmd, mm, pfn); |
f8822f42 | 369 | } |
c119ecce | 370 | |
f8639939 | 371 | static inline void paravirt_release_pmd(unsigned long pfn) |
da181a8b | 372 | { |
5c83511b | 373 | PVOP_VCALL1(mmu.release_pmd, pfn); |
da181a8b RR |
374 | } |
375 | ||
f8639939 | 376 | static inline void paravirt_alloc_pud(struct mm_struct *mm, unsigned long pfn) |
2761fa09 | 377 | { |
5c83511b | 378 | PVOP_VCALL2(mmu.alloc_pud, mm, pfn); |
2761fa09 | 379 | } |
f8639939 | 380 | static inline void paravirt_release_pud(unsigned long pfn) |
2761fa09 | 381 | { |
5c83511b | 382 | PVOP_VCALL1(mmu.release_pud, pfn); |
2761fa09 JF |
383 | } |
384 | ||
335437fb KS |
385 | static inline void paravirt_alloc_p4d(struct mm_struct *mm, unsigned long pfn) |
386 | { | |
5c83511b | 387 | PVOP_VCALL2(mmu.alloc_p4d, mm, pfn); |
335437fb KS |
388 | } |
389 | ||
390 | static inline void paravirt_release_p4d(unsigned long pfn) | |
391 | { | |
5c83511b | 392 | PVOP_VCALL1(mmu.release_p4d, pfn); |
335437fb KS |
393 | } |
394 | ||
773221f4 | 395 | static inline pte_t __pte(pteval_t val) |
da181a8b | 396 | { |
fafe5e74 JG |
397 | return (pte_t) { PVOP_ALT_CALLEE1(pteval_t, mmu.make_pte, val, |
398 | "mov %%rdi, %%rax", | |
399 | ALT_NOT(X86_FEATURE_XENPV)) }; | |
da181a8b RR |
400 | } |
401 | ||
773221f4 JF |
402 | static inline pteval_t pte_val(pte_t pte) |
403 | { | |
fafe5e74 JG |
404 | return PVOP_ALT_CALLEE1(pteval_t, mmu.pte_val, pte.pte, |
405 | "mov %%rdi, %%rax", ALT_NOT(X86_FEATURE_XENPV)); | |
773221f4 JF |
406 | } |
407 | ||
ef38503e | 408 | static inline pgd_t __pgd(pgdval_t val) |
da181a8b | 409 | { |
fafe5e74 JG |
410 | return (pgd_t) { PVOP_ALT_CALLEE1(pgdval_t, mmu.make_pgd, val, |
411 | "mov %%rdi, %%rax", | |
412 | ALT_NOT(X86_FEATURE_XENPV)) }; | |
ef38503e JF |
413 | } |
414 | ||
415 | static inline pgdval_t pgd_val(pgd_t pgd) | |
416 | { | |
fafe5e74 JG |
417 | return PVOP_ALT_CALLEE1(pgdval_t, mmu.pgd_val, pgd.pgd, |
418 | "mov %%rdi, %%rax", ALT_NOT(X86_FEATURE_XENPV)); | |
f8822f42 JF |
419 | } |
420 | ||
08b882c6 | 421 | #define __HAVE_ARCH_PTEP_MODIFY_PROT_TRANSACTION |
0cbe3e26 | 422 | static inline pte_t ptep_modify_prot_start(struct vm_area_struct *vma, unsigned long addr, |
08b882c6 JF |
423 | pte_t *ptep) |
424 | { | |
425 | pteval_t ret; | |
426 | ||
0cbe3e26 | 427 | ret = PVOP_CALL3(pteval_t, mmu.ptep_modify_prot_start, vma, addr, ptep); |
08b882c6 JF |
428 | |
429 | return (pte_t) { .pte = ret }; | |
430 | } | |
431 | ||
0cbe3e26 | 432 | static inline void ptep_modify_prot_commit(struct vm_area_struct *vma, unsigned long addr, |
04a86453 | 433 | pte_t *ptep, pte_t old_pte, pte_t pte) |
08b882c6 | 434 | { |
0cbe3e26 | 435 | |
0cabf991 | 436 | PVOP_VCALL4(mmu.ptep_modify_prot_commit, vma, addr, ptep, pte.pte); |
08b882c6 JF |
437 | } |
438 | ||
4eed80cd JF |
439 | static inline void set_pte(pte_t *ptep, pte_t pte) |
440 | { | |
0cabf991 | 441 | PVOP_VCALL2(mmu.set_pte, ptep, pte.pte); |
4eed80cd JF |
442 | } |
443 | ||
60b3f626 JF |
444 | static inline void set_pmd(pmd_t *pmdp, pmd_t pmd) |
445 | { | |
0cabf991 | 446 | PVOP_VCALL2(mmu.set_pmd, pmdp, native_pmd_val(pmd)); |
60b3f626 JF |
447 | } |
448 | ||
1fe91514 GOC |
449 | static inline pmd_t __pmd(pmdval_t val) |
450 | { | |
fafe5e74 JG |
451 | return (pmd_t) { PVOP_ALT_CALLEE1(pmdval_t, mmu.make_pmd, val, |
452 | "mov %%rdi, %%rax", | |
453 | ALT_NOT(X86_FEATURE_XENPV)) }; | |
1fe91514 GOC |
454 | } |
455 | ||
456 | static inline pmdval_t pmd_val(pmd_t pmd) | |
457 | { | |
fafe5e74 JG |
458 | return PVOP_ALT_CALLEE1(pmdval_t, mmu.pmd_val, pmd.pmd, |
459 | "mov %%rdi, %%rax", ALT_NOT(X86_FEATURE_XENPV)); | |
1fe91514 GOC |
460 | } |
461 | ||
462 | static inline void set_pud(pud_t *pudp, pud_t pud) | |
463 | { | |
0cabf991 | 464 | PVOP_VCALL2(mmu.set_pud, pudp, native_pud_val(pud)); |
1fe91514 | 465 | } |
0cabf991 | 466 | |
9042219c EH |
467 | static inline pud_t __pud(pudval_t val) |
468 | { | |
469 | pudval_t ret; | |
470 | ||
fafe5e74 JG |
471 | ret = PVOP_ALT_CALLEE1(pudval_t, mmu.make_pud, val, |
472 | "mov %%rdi, %%rax", ALT_NOT(X86_FEATURE_XENPV)); | |
9042219c EH |
473 | |
474 | return (pud_t) { ret }; | |
475 | } | |
476 | ||
477 | static inline pudval_t pud_val(pud_t pud) | |
478 | { | |
fafe5e74 JG |
479 | return PVOP_ALT_CALLEE1(pudval_t, mmu.pud_val, pud.pud, |
480 | "mov %%rdi, %%rax", ALT_NOT(X86_FEATURE_XENPV)); | |
9042219c EH |
481 | } |
482 | ||
f2a6a705 KS |
483 | static inline void pud_clear(pud_t *pudp) |
484 | { | |
7c9f80cb | 485 | set_pud(pudp, native_make_pud(0)); |
f2a6a705 KS |
486 | } |
487 | ||
488 | static inline void set_p4d(p4d_t *p4dp, p4d_t p4d) | |
489 | { | |
490 | p4dval_t val = native_p4d_val(p4d); | |
491 | ||
495310e4 | 492 | PVOP_VCALL2(mmu.set_p4d, p4dp, val); |
f2a6a705 KS |
493 | } |
494 | ||
335437fb KS |
495 | #if CONFIG_PGTABLE_LEVELS >= 5 |
496 | ||
497 | static inline p4d_t __p4d(p4dval_t val) | |
f2a6a705 | 498 | { |
fafe5e74 JG |
499 | p4dval_t ret = PVOP_ALT_CALLEE1(p4dval_t, mmu.make_p4d, val, |
500 | "mov %%rdi, %%rax", | |
501 | ALT_NOT(X86_FEATURE_XENPV)); | |
f2a6a705 | 502 | |
335437fb KS |
503 | return (p4d_t) { ret }; |
504 | } | |
f2a6a705 | 505 | |
335437fb KS |
506 | static inline p4dval_t p4d_val(p4d_t p4d) |
507 | { | |
fafe5e74 JG |
508 | return PVOP_ALT_CALLEE1(p4dval_t, mmu.p4d_val, p4d.p4d, |
509 | "mov %%rdi, %%rax", ALT_NOT(X86_FEATURE_XENPV)); | |
335437fb | 510 | } |
f2a6a705 | 511 | |
92e1c5b3 | 512 | static inline void __set_pgd(pgd_t *pgdp, pgd_t pgd) |
9042219c | 513 | { |
5c83511b | 514 | PVOP_VCALL2(mmu.set_pgd, pgdp, native_pgd_val(pgd)); |
9042219c EH |
515 | } |
516 | ||
92e1c5b3 | 517 | #define set_pgd(pgdp, pgdval) do { \ |
ed7588d5 | 518 | if (pgtable_l5_enabled()) \ |
92e1c5b3 KS |
519 | __set_pgd(pgdp, pgdval); \ |
520 | else \ | |
521 | set_p4d((p4d_t *)(pgdp), (p4d_t) { (pgdval).pgd }); \ | |
522 | } while (0) | |
523 | ||
524 | #define pgd_clear(pgdp) do { \ | |
7c9f80cb JG |
525 | if (pgtable_l5_enabled()) \ |
526 | set_pgd(pgdp, native_make_pgd(0)); \ | |
92e1c5b3 | 527 | } while (0) |
9042219c | 528 | |
f2a6a705 | 529 | #endif /* CONFIG_PGTABLE_LEVELS == 5 */ |
9042219c | 530 | |
335437fb KS |
531 | static inline void p4d_clear(p4d_t *p4dp) |
532 | { | |
7c9f80cb | 533 | set_p4d(p4dp, native_make_p4d(0)); |
335437fb KS |
534 | } |
535 | ||
4eed80cd JF |
536 | static inline void set_pte_atomic(pte_t *ptep, pte_t pte) |
537 | { | |
538 | set_pte(ptep, pte); | |
539 | } | |
540 | ||
4eed80cd JF |
541 | static inline void pte_clear(struct mm_struct *mm, unsigned long addr, |
542 | pte_t *ptep) | |
543 | { | |
7c9f80cb | 544 | set_pte(ptep, native_make_pte(0)); |
4eed80cd | 545 | } |
60b3f626 JF |
546 | |
547 | static inline void pmd_clear(pmd_t *pmdp) | |
548 | { | |
7c9f80cb | 549 | set_pmd(pmdp, native_make_pmd(0)); |
60b3f626 | 550 | } |
4eed80cd | 551 | |
7fd7d83d | 552 | #define __HAVE_ARCH_START_CONTEXT_SWITCH |
224101ed | 553 | static inline void arch_start_context_switch(struct task_struct *prev) |
f8822f42 | 554 | { |
5c83511b | 555 | PVOP_VCALL1(cpu.start_context_switch, prev); |
f8822f42 JF |
556 | } |
557 | ||
224101ed | 558 | static inline void arch_end_context_switch(struct task_struct *next) |
f8822f42 | 559 | { |
5c83511b | 560 | PVOP_VCALL1(cpu.end_context_switch, next); |
f8822f42 JF |
561 | } |
562 | ||
9226d125 | 563 | #define __HAVE_ARCH_ENTER_LAZY_MMU_MODE |
f8822f42 JF |
564 | static inline void arch_enter_lazy_mmu_mode(void) |
565 | { | |
5c83511b | 566 | PVOP_VCALL0(mmu.lazy_mode.enter); |
f8822f42 JF |
567 | } |
568 | ||
569 | static inline void arch_leave_lazy_mmu_mode(void) | |
570 | { | |
5c83511b | 571 | PVOP_VCALL0(mmu.lazy_mode.leave); |
f8822f42 JF |
572 | } |
573 | ||
511ba86e BO |
574 | static inline void arch_flush_lazy_mmu_mode(void) |
575 | { | |
5c83511b | 576 | PVOP_VCALL0(mmu.lazy_mode.flush); |
511ba86e | 577 | } |
9226d125 | 578 | |
aeaaa59c | 579 | static inline void __set_fixmap(unsigned /* enum fixed_addresses */ idx, |
3b3809ac | 580 | phys_addr_t phys, pgprot_t flags) |
aeaaa59c | 581 | { |
5c83511b | 582 | pv_ops.mmu.set_fixmap(idx, phys, flags); |
aeaaa59c | 583 | } |
fdc0269e | 584 | #endif |
aeaaa59c | 585 | |
b4ecc126 | 586 | #if defined(CONFIG_SMP) && defined(CONFIG_PARAVIRT_SPINLOCKS) |
4bb689ee | 587 | |
f233f7f1 PZI |
588 | static __always_inline void pv_queued_spin_lock_slowpath(struct qspinlock *lock, |
589 | u32 val) | |
590 | { | |
5c83511b | 591 | PVOP_VCALL2(lock.queued_spin_lock_slowpath, lock, val); |
f233f7f1 PZI |
592 | } |
593 | ||
594 | static __always_inline void pv_queued_spin_unlock(struct qspinlock *lock) | |
595 | { | |
fafe5e74 JG |
596 | PVOP_ALT_VCALLEE1(lock.queued_spin_unlock, lock, |
597 | "movb $0, (%%" _ASM_ARG1 ");", | |
598 | ALT_NOT(X86_FEATURE_PVUNLOCK)); | |
f233f7f1 PZI |
599 | } |
600 | ||
601 | static __always_inline void pv_wait(u8 *ptr, u8 val) | |
602 | { | |
5c83511b | 603 | PVOP_VCALL2(lock.wait, ptr, val); |
f233f7f1 PZI |
604 | } |
605 | ||
606 | static __always_inline void pv_kick(int cpu) | |
607 | { | |
5c83511b | 608 | PVOP_VCALL1(lock.kick, cpu); |
f233f7f1 PZI |
609 | } |
610 | ||
6c62985d | 611 | static __always_inline bool pv_vcpu_is_preempted(long cpu) |
3cded417 | 612 | { |
fafe5e74 JG |
613 | return PVOP_ALT_CALLEE1(bool, lock.vcpu_is_preempted, cpu, |
614 | "xor %%" _ASM_AX ", %%" _ASM_AX ";", | |
615 | ALT_NOT(X86_FEATURE_VCPUPREEMPT)); | |
3cded417 PZ |
616 | } |
617 | ||
5c83511b JG |
618 | void __raw_callee_save___native_queued_spin_unlock(struct qspinlock *lock); |
619 | bool __raw_callee_save___native_vcpu_is_preempted(long cpu); | |
620 | ||
f233f7f1 | 621 | #endif /* SMP && PARAVIRT_SPINLOCKS */ |
4bb689ee | 622 | |
2e47d3e6 | 623 | #ifdef CONFIG_X86_32 |
ecb93d1c | 624 | /* save and restore all caller-save registers, except return value */ |
e584f559 JF |
625 | #define PV_SAVE_ALL_CALLER_REGS "pushl %ecx;" |
626 | #define PV_RESTORE_ALL_CALLER_REGS "popl %ecx;" | |
2e47d3e6 | 627 | #else |
ecb93d1c JF |
628 | /* save and restore all caller-save registers, except return value */ |
629 | #define PV_SAVE_ALL_CALLER_REGS \ | |
630 | "push %rcx;" \ | |
631 | "push %rdx;" \ | |
632 | "push %rsi;" \ | |
633 | "push %rdi;" \ | |
634 | "push %r8;" \ | |
635 | "push %r9;" \ | |
636 | "push %r10;" \ | |
637 | "push %r11;" | |
638 | #define PV_RESTORE_ALL_CALLER_REGS \ | |
639 | "pop %r11;" \ | |
640 | "pop %r10;" \ | |
641 | "pop %r9;" \ | |
642 | "pop %r8;" \ | |
643 | "pop %rdi;" \ | |
644 | "pop %rsi;" \ | |
645 | "pop %rdx;" \ | |
646 | "pop %rcx;" | |
2e47d3e6 GOC |
647 | #endif |
648 | ||
ecb93d1c JF |
649 | /* |
650 | * Generate a thunk around a function which saves all caller-save | |
651 | * registers except for the return value. This allows C functions to | |
652 | * be called from assembler code where fewer than normal registers are | |
653 | * available. It may also help code generation around calls from C | |
654 | * code if the common case doesn't use many registers. | |
655 | * | |
656 | * When a callee is wrapped in a thunk, the caller can assume that all | |
657 | * arg regs and all scratch registers are preserved across the | |
658 | * call. The return value in rax/eax will not be saved, even for void | |
659 | * functions. | |
660 | */ | |
87b240cb | 661 | #define PV_THUNK_NAME(func) "__raw_callee_save_" #func |
20125c87 | 662 | #define __PV_CALLEE_SAVE_REGS_THUNK(func, section) \ |
ecb93d1c | 663 | extern typeof(func) __raw_callee_save_##func; \ |
ecb93d1c | 664 | \ |
20125c87 | 665 | asm(".pushsection " section ", \"ax\";" \ |
87b240cb JP |
666 | ".globl " PV_THUNK_NAME(func) ";" \ |
667 | ".type " PV_THUNK_NAME(func) ", @function;" \ | |
1d293758 | 668 | ASM_FUNC_ALIGN \ |
87b240cb | 669 | PV_THUNK_NAME(func) ":" \ |
c3b03791 | 670 | ASM_ENDBR \ |
87b240cb | 671 | FRAME_BEGIN \ |
ecb93d1c JF |
672 | PV_SAVE_ALL_CALLER_REGS \ |
673 | "call " #func ";" \ | |
674 | PV_RESTORE_ALL_CALLER_REGS \ | |
87b240cb | 675 | FRAME_END \ |
b17c2baa | 676 | ASM_RET \ |
083db676 | 677 | ".size " PV_THUNK_NAME(func) ", .-" PV_THUNK_NAME(func) ";" \ |
ecb93d1c JF |
678 | ".popsection") |
679 | ||
20125c87 PZ |
680 | #define PV_CALLEE_SAVE_REGS_THUNK(func) \ |
681 | __PV_CALLEE_SAVE_REGS_THUNK(func, ".text") | |
682 | ||
ecb93d1c JF |
683 | /* Get a reference to a callee-save function */ |
684 | #define PV_CALLEE_SAVE(func) \ | |
685 | ((struct paravirt_callee_save) { __raw_callee_save_##func }) | |
686 | ||
687 | /* Promise that "func" already uses the right calling convention */ | |
688 | #define __PV_IS_CALLEE_SAVE(func) \ | |
689 | ((struct paravirt_callee_save) { func }) | |
690 | ||
6da63eb2 | 691 | #ifdef CONFIG_PARAVIRT_XXL |
e9382440 | 692 | static __always_inline unsigned long arch_local_save_flags(void) |
139ec7c4 | 693 | { |
fafe5e74 JG |
694 | return PVOP_ALT_CALLEE0(unsigned long, irq.save_fl, "pushf; pop %%rax;", |
695 | ALT_NOT(X86_FEATURE_XENPV)); | |
139ec7c4 RR |
696 | } |
697 | ||
e9382440 | 698 | static __always_inline void arch_local_irq_disable(void) |
139ec7c4 | 699 | { |
fafe5e74 | 700 | PVOP_ALT_VCALLEE0(irq.irq_disable, "cli;", ALT_NOT(X86_FEATURE_XENPV)); |
139ec7c4 RR |
701 | } |
702 | ||
e9382440 | 703 | static __always_inline void arch_local_irq_enable(void) |
139ec7c4 | 704 | { |
fafe5e74 | 705 | PVOP_ALT_VCALLEE0(irq.irq_enable, "sti;", ALT_NOT(X86_FEATURE_XENPV)); |
139ec7c4 RR |
706 | } |
707 | ||
e9382440 | 708 | static __always_inline unsigned long arch_local_irq_save(void) |
139ec7c4 RR |
709 | { |
710 | unsigned long f; | |
711 | ||
df9ee292 DH |
712 | f = arch_local_save_flags(); |
713 | arch_local_irq_disable(); | |
139ec7c4 RR |
714 | return f; |
715 | } | |
6da63eb2 | 716 | #endif |
139ec7c4 | 717 | |
74d4affd | 718 | |
294688c0 | 719 | /* Make sure as little as possible of this mess escapes. */ |
d5822035 | 720 | #undef PARAVIRT_CALL |
1a45b7aa JF |
721 | #undef __PVOP_CALL |
722 | #undef __PVOP_VCALL | |
f8822f42 JF |
723 | #undef PVOP_VCALL0 |
724 | #undef PVOP_CALL0 | |
725 | #undef PVOP_VCALL1 | |
726 | #undef PVOP_CALL1 | |
727 | #undef PVOP_VCALL2 | |
728 | #undef PVOP_CALL2 | |
729 | #undef PVOP_VCALL3 | |
730 | #undef PVOP_CALL3 | |
731 | #undef PVOP_VCALL4 | |
732 | #undef PVOP_CALL4 | |
139ec7c4 | 733 | |
f1a033cc JG |
734 | #define DEFINE_PARAVIRT_ASM(func, instr, sec) \ |
735 | asm (".pushsection " #sec ", \"ax\"\n" \ | |
736 | ".global " #func "\n\t" \ | |
737 | ".type " #func ", @function\n\t" \ | |
738 | ASM_FUNC_ALIGN "\n" \ | |
739 | #func ":\n\t" \ | |
740 | ASM_ENDBR \ | |
741 | instr "\n\t" \ | |
742 | ASM_RET \ | |
743 | ".size " #func ", . - " #func "\n\t" \ | |
744 | ".popsection") | |
745 | ||
6f30c1ac TG |
746 | extern void default_banner(void); |
747 | ||
d3561b7f RR |
748 | #else /* __ASSEMBLY__ */ |
749 | ||
27876f38 | 750 | #define _PVSITE(ptype, ops, word, algn) \ |
139ec7c4 RR |
751 | 771:; \ |
752 | ops; \ | |
753 | 772:; \ | |
754 | .pushsection .parainstructions,"a"; \ | |
658be9d3 GOC |
755 | .align algn; \ |
756 | word 771b; \ | |
139ec7c4 RR |
757 | .byte ptype; \ |
758 | .byte 772b-771b; \ | |
f92ff8f5 | 759 | _ASM_ALIGN; \ |
139ec7c4 RR |
760 | .popsection |
761 | ||
658be9d3 GOC |
762 | |
763 | #ifdef CONFIG_X86_64 | |
33634e42 | 764 | #ifdef CONFIG_PARAVIRT_XXL |
9104a18d | 765 | |
5c83511b | 766 | #define PARA_PATCH(off) ((off) / 8) |
27876f38 | 767 | #define PARA_SITE(ptype, ops) _PVSITE(ptype, ops, .quad, 8) |
491eccb7 | 768 | #define PARA_INDIRECT(addr) *addr(%rip) |
658be9d3 | 769 | |
e17f8234 | 770 | #ifdef CONFIG_DEBUG_ENTRY |
fafe5e74 JG |
771 | .macro PARA_IRQ_save_fl |
772 | PARA_SITE(PARA_PATCH(PV_IRQ_save_fl), | |
773 | ANNOTATE_RETPOLINE_SAFE; | |
774 | call PARA_INDIRECT(pv_ops+PV_IRQ_save_fl);) | |
775 | .endm | |
776 | ||
777 | #define SAVE_FLAGS ALTERNATIVE "PARA_IRQ_save_fl;", "pushf; pop %rax;", \ | |
778 | ALT_NOT(X86_FEATURE_XENPV) | |
e17f8234 | 779 | #endif |
55aedddb PZ |
780 | #endif /* CONFIG_PARAVIRT_XXL */ |
781 | #endif /* CONFIG_X86_64 */ | |
782 | ||
d3561b7f | 783 | #endif /* __ASSEMBLY__ */ |
6f30c1ac TG |
784 | #else /* CONFIG_PARAVIRT */ |
785 | # define default_banner x86_init_noop | |
fdc0269e JG |
786 | #endif /* !CONFIG_PARAVIRT */ |
787 | ||
a1ea1c03 | 788 | #ifndef __ASSEMBLY__ |
fdc0269e | 789 | #ifndef CONFIG_PARAVIRT_XXL |
a1ea1c03 DH |
790 | static inline void paravirt_arch_dup_mmap(struct mm_struct *oldmm, |
791 | struct mm_struct *mm) | |
792 | { | |
793 | } | |
fdc0269e | 794 | #endif |
a1ea1c03 | 795 | |
fdc0269e | 796 | #ifndef CONFIG_PARAVIRT |
a1ea1c03 DH |
797 | static inline void paravirt_arch_exit_mmap(struct mm_struct *mm) |
798 | { | |
799 | } | |
fdc0269e | 800 | #endif |
4e629211 JG |
801 | |
802 | #ifndef CONFIG_PARAVIRT_SPINLOCKS | |
803 | static inline void paravirt_set_cap(void) | |
804 | { | |
805 | } | |
806 | #endif | |
a1ea1c03 | 807 | #endif /* __ASSEMBLY__ */ |
1965aae3 | 808 | #endif /* _ASM_X86_PARAVIRT_H */ |