Commit | Line | Data |
---|---|---|
7a338472 | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
783e9e51 | 2 | /* |
cc68765d | 3 | * tools/testing/selftests/kvm/include/x86_64/processor.h |
783e9e51 PB |
4 | * |
5 | * Copyright (C) 2018, Google LLC. | |
783e9e51 PB |
6 | */ |
7 | ||
cc68765d AJ |
8 | #ifndef SELFTEST_KVM_PROCESSOR_H |
9 | #define SELFTEST_KVM_PROCESSOR_H | |
783e9e51 PB |
10 | |
11 | #include <assert.h> | |
12 | #include <stdint.h> | |
13 | ||
14 | #define X86_EFLAGS_FIXED (1u << 1) | |
15 | ||
16 | #define X86_CR4_VME (1ul << 0) | |
17 | #define X86_CR4_PVI (1ul << 1) | |
18 | #define X86_CR4_TSD (1ul << 2) | |
19 | #define X86_CR4_DE (1ul << 3) | |
20 | #define X86_CR4_PSE (1ul << 4) | |
21 | #define X86_CR4_PAE (1ul << 5) | |
22 | #define X86_CR4_MCE (1ul << 6) | |
23 | #define X86_CR4_PGE (1ul << 7) | |
24 | #define X86_CR4_PCE (1ul << 8) | |
25 | #define X86_CR4_OSFXSR (1ul << 9) | |
26 | #define X86_CR4_OSXMMEXCPT (1ul << 10) | |
27 | #define X86_CR4_UMIP (1ul << 11) | |
28 | #define X86_CR4_VMXE (1ul << 13) | |
29 | #define X86_CR4_SMXE (1ul << 14) | |
30 | #define X86_CR4_FSGSBASE (1ul << 16) | |
31 | #define X86_CR4_PCIDE (1ul << 17) | |
32 | #define X86_CR4_OSXSAVE (1ul << 18) | |
33 | #define X86_CR4_SMEP (1ul << 20) | |
34 | #define X86_CR4_SMAP (1ul << 21) | |
35 | #define X86_CR4_PKE (1ul << 22) | |
36 | ||
37 | /* The enum values match the intruction encoding of each register */ | |
38 | enum x86_register { | |
39 | RAX = 0, | |
40 | RCX, | |
41 | RDX, | |
42 | RBX, | |
43 | RSP, | |
44 | RBP, | |
45 | RSI, | |
46 | RDI, | |
47 | R8, | |
48 | R9, | |
49 | R10, | |
50 | R11, | |
51 | R12, | |
52 | R13, | |
53 | R14, | |
54 | R15, | |
55 | }; | |
56 | ||
57 | struct desc64 { | |
58 | uint16_t limit0; | |
59 | uint16_t base0; | |
2305339e PB |
60 | unsigned base1:8, s:1, type:4, dpl:2, p:1; |
61 | unsigned limit1:4, avl:1, l:1, db:1, g:1, base2:8; | |
783e9e51 PB |
62 | uint32_t base3; |
63 | uint32_t zero1; | |
64 | } __attribute__((packed)); | |
65 | ||
66 | struct desc_ptr { | |
67 | uint16_t size; | |
68 | uint64_t address; | |
69 | } __attribute__((packed)); | |
70 | ||
71 | static inline uint64_t get_desc64_base(const struct desc64 *desc) | |
72 | { | |
73 | return ((uint64_t)desc->base3 << 32) | | |
74 | (desc->base0 | ((desc->base1) << 16) | ((desc->base2) << 24)); | |
75 | } | |
76 | ||
77 | static inline uint64_t rdtsc(void) | |
78 | { | |
79 | uint32_t eax, edx; | |
80 | ||
81 | /* | |
82 | * The lfence is to wait (on Intel CPUs) until all previous | |
83 | * instructions have been executed. | |
84 | */ | |
85 | __asm__ __volatile__("lfence; rdtsc" : "=a"(eax), "=d"(edx)); | |
86 | return ((uint64_t)edx) << 32 | eax; | |
87 | } | |
88 | ||
89 | static inline uint64_t rdtscp(uint32_t *aux) | |
90 | { | |
91 | uint32_t eax, edx; | |
92 | ||
93 | __asm__ __volatile__("rdtscp" : "=a"(eax), "=d"(edx), "=c"(*aux)); | |
94 | return ((uint64_t)edx) << 32 | eax; | |
95 | } | |
96 | ||
97 | static inline uint64_t rdmsr(uint32_t msr) | |
98 | { | |
99 | uint32_t a, d; | |
100 | ||
101 | __asm__ __volatile__("rdmsr" : "=a"(a), "=d"(d) : "c"(msr) : "memory"); | |
102 | ||
103 | return a | ((uint64_t) d << 32); | |
104 | } | |
105 | ||
106 | static inline void wrmsr(uint32_t msr, uint64_t value) | |
107 | { | |
108 | uint32_t a = value; | |
109 | uint32_t d = value >> 32; | |
110 | ||
111 | __asm__ __volatile__("wrmsr" :: "a"(a), "d"(d), "c"(msr) : "memory"); | |
112 | } | |
113 | ||
114 | ||
115 | static inline uint16_t inw(uint16_t port) | |
116 | { | |
117 | uint16_t tmp; | |
118 | ||
119 | __asm__ __volatile__("in %%dx, %%ax" | |
120 | : /* output */ "=a" (tmp) | |
121 | : /* input */ "d" (port)); | |
122 | ||
123 | return tmp; | |
124 | } | |
125 | ||
126 | static inline uint16_t get_es(void) | |
127 | { | |
128 | uint16_t es; | |
129 | ||
130 | __asm__ __volatile__("mov %%es, %[es]" | |
131 | : /* output */ [es]"=rm"(es)); | |
132 | return es; | |
133 | } | |
134 | ||
135 | static inline uint16_t get_cs(void) | |
136 | { | |
137 | uint16_t cs; | |
138 | ||
139 | __asm__ __volatile__("mov %%cs, %[cs]" | |
140 | : /* output */ [cs]"=rm"(cs)); | |
141 | return cs; | |
142 | } | |
143 | ||
144 | static inline uint16_t get_ss(void) | |
145 | { | |
146 | uint16_t ss; | |
147 | ||
148 | __asm__ __volatile__("mov %%ss, %[ss]" | |
149 | : /* output */ [ss]"=rm"(ss)); | |
150 | return ss; | |
151 | } | |
152 | ||
153 | static inline uint16_t get_ds(void) | |
154 | { | |
155 | uint16_t ds; | |
156 | ||
157 | __asm__ __volatile__("mov %%ds, %[ds]" | |
158 | : /* output */ [ds]"=rm"(ds)); | |
159 | return ds; | |
160 | } | |
161 | ||
162 | static inline uint16_t get_fs(void) | |
163 | { | |
164 | uint16_t fs; | |
165 | ||
166 | __asm__ __volatile__("mov %%fs, %[fs]" | |
167 | : /* output */ [fs]"=rm"(fs)); | |
168 | return fs; | |
169 | } | |
170 | ||
171 | static inline uint16_t get_gs(void) | |
172 | { | |
173 | uint16_t gs; | |
174 | ||
175 | __asm__ __volatile__("mov %%gs, %[gs]" | |
176 | : /* output */ [gs]"=rm"(gs)); | |
177 | return gs; | |
178 | } | |
179 | ||
180 | static inline uint16_t get_tr(void) | |
181 | { | |
182 | uint16_t tr; | |
183 | ||
184 | __asm__ __volatile__("str %[tr]" | |
185 | : /* output */ [tr]"=rm"(tr)); | |
186 | return tr; | |
187 | } | |
188 | ||
189 | static inline uint64_t get_cr0(void) | |
190 | { | |
191 | uint64_t cr0; | |
192 | ||
193 | __asm__ __volatile__("mov %%cr0, %[cr0]" | |
194 | : /* output */ [cr0]"=r"(cr0)); | |
195 | return cr0; | |
196 | } | |
197 | ||
198 | static inline uint64_t get_cr3(void) | |
199 | { | |
200 | uint64_t cr3; | |
201 | ||
202 | __asm__ __volatile__("mov %%cr3, %[cr3]" | |
203 | : /* output */ [cr3]"=r"(cr3)); | |
204 | return cr3; | |
205 | } | |
206 | ||
207 | static inline uint64_t get_cr4(void) | |
208 | { | |
209 | uint64_t cr4; | |
210 | ||
211 | __asm__ __volatile__("mov %%cr4, %[cr4]" | |
212 | : /* output */ [cr4]"=r"(cr4)); | |
213 | return cr4; | |
214 | } | |
215 | ||
216 | static inline void set_cr4(uint64_t val) | |
217 | { | |
218 | __asm__ __volatile__("mov %0, %%cr4" : : "r" (val) : "memory"); | |
219 | } | |
220 | ||
221 | static inline uint64_t get_gdt_base(void) | |
222 | { | |
223 | struct desc_ptr gdt; | |
224 | __asm__ __volatile__("sgdt %[gdt]" | |
225 | : /* output */ [gdt]"=m"(gdt)); | |
226 | return gdt.address; | |
227 | } | |
228 | ||
229 | static inline uint64_t get_idt_base(void) | |
230 | { | |
231 | struct desc_ptr idt; | |
232 | __asm__ __volatile__("sidt %[idt]" | |
233 | : /* output */ [idt]"=m"(idt)); | |
234 | return idt.address; | |
235 | } | |
236 | ||
237 | #define SET_XMM(__var, __xmm) \ | |
238 | asm volatile("movq %0, %%"#__xmm : : "r"(__var) : #__xmm) | |
239 | ||
240 | static inline void set_xmm(int n, unsigned long val) | |
241 | { | |
242 | switch (n) { | |
243 | case 0: | |
244 | SET_XMM(val, xmm0); | |
245 | break; | |
246 | case 1: | |
247 | SET_XMM(val, xmm1); | |
248 | break; | |
249 | case 2: | |
250 | SET_XMM(val, xmm2); | |
251 | break; | |
252 | case 3: | |
253 | SET_XMM(val, xmm3); | |
254 | break; | |
255 | case 4: | |
256 | SET_XMM(val, xmm4); | |
257 | break; | |
258 | case 5: | |
259 | SET_XMM(val, xmm5); | |
260 | break; | |
261 | case 6: | |
262 | SET_XMM(val, xmm6); | |
263 | break; | |
264 | case 7: | |
265 | SET_XMM(val, xmm7); | |
266 | break; | |
267 | } | |
268 | } | |
269 | ||
270 | typedef unsigned long v1di __attribute__ ((vector_size (8))); | |
271 | static inline unsigned long get_xmm(int n) | |
272 | { | |
273 | assert(n >= 0 && n <= 7); | |
274 | ||
275 | register v1di xmm0 __asm__("%xmm0"); | |
276 | register v1di xmm1 __asm__("%xmm1"); | |
277 | register v1di xmm2 __asm__("%xmm2"); | |
278 | register v1di xmm3 __asm__("%xmm3"); | |
279 | register v1di xmm4 __asm__("%xmm4"); | |
280 | register v1di xmm5 __asm__("%xmm5"); | |
281 | register v1di xmm6 __asm__("%xmm6"); | |
282 | register v1di xmm7 __asm__("%xmm7"); | |
283 | switch (n) { | |
284 | case 0: | |
285 | return (unsigned long)xmm0; | |
286 | case 1: | |
287 | return (unsigned long)xmm1; | |
288 | case 2: | |
289 | return (unsigned long)xmm2; | |
290 | case 3: | |
291 | return (unsigned long)xmm3; | |
292 | case 4: | |
293 | return (unsigned long)xmm4; | |
294 | case 5: | |
295 | return (unsigned long)xmm5; | |
296 | case 6: | |
297 | return (unsigned long)xmm6; | |
298 | case 7: | |
299 | return (unsigned long)xmm7; | |
300 | } | |
301 | return 0; | |
302 | } | |
303 | ||
9dba988e AL |
304 | bool is_intel_cpu(void); |
305 | ||
fa3899ad PB |
306 | struct kvm_x86_state; |
307 | struct kvm_x86_state *vcpu_save_state(struct kvm_vm *vm, uint32_t vcpuid); | |
eabe7881 AJ |
308 | void vcpu_load_state(struct kvm_vm *vm, uint32_t vcpuid, |
309 | struct kvm_x86_state *state); | |
310 | ||
311 | struct kvm_cpuid2 *kvm_get_supported_cpuid(void); | |
312 | void vcpu_set_cpuid(struct kvm_vm *vm, uint32_t vcpuid, | |
313 | struct kvm_cpuid2 *cpuid); | |
314 | ||
315 | struct kvm_cpuid_entry2 * | |
316 | kvm_get_supported_cpuid_index(uint32_t function, uint32_t index); | |
317 | ||
318 | static inline struct kvm_cpuid_entry2 * | |
319 | kvm_get_supported_cpuid_entry(uint32_t function) | |
320 | { | |
321 | return kvm_get_supported_cpuid_index(function, 0); | |
322 | } | |
323 | ||
324 | uint64_t vcpu_get_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index); | |
325 | void vcpu_set_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index, | |
326 | uint64_t msr_value); | |
fa3899ad | 327 | |
567a9f1e PX |
328 | uint32_t kvm_get_cpuid_max(void); |
329 | void kvm_get_cpu_address_width(unsigned int *pa_bits, unsigned int *va_bits); | |
330 | ||
783e9e51 PB |
331 | /* |
332 | * Basic CPU control in CR0 | |
333 | */ | |
334 | #define X86_CR0_PE (1UL<<0) /* Protection Enable */ | |
335 | #define X86_CR0_MP (1UL<<1) /* Monitor Coprocessor */ | |
336 | #define X86_CR0_EM (1UL<<2) /* Emulation */ | |
337 | #define X86_CR0_TS (1UL<<3) /* Task Switched */ | |
338 | #define X86_CR0_ET (1UL<<4) /* Extension Type */ | |
339 | #define X86_CR0_NE (1UL<<5) /* Numeric Error */ | |
340 | #define X86_CR0_WP (1UL<<16) /* Write Protect */ | |
341 | #define X86_CR0_AM (1UL<<18) /* Alignment Mask */ | |
342 | #define X86_CR0_NW (1UL<<29) /* Not Write-through */ | |
343 | #define X86_CR0_CD (1UL<<30) /* Cache Disable */ | |
344 | #define X86_CR0_PG (1UL<<31) /* Paging */ | |
345 | ||
346 | /* | |
347 | * CPU model specific register (MSR) numbers. | |
348 | */ | |
349 | ||
350 | /* x86-64 specific MSRs */ | |
351 | #define MSR_EFER 0xc0000080 /* extended feature register */ | |
352 | #define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */ | |
353 | #define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */ | |
354 | #define MSR_CSTAR 0xc0000083 /* compat mode SYSCALL target */ | |
355 | #define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */ | |
356 | #define MSR_FS_BASE 0xc0000100 /* 64bit FS base */ | |
357 | #define MSR_GS_BASE 0xc0000101 /* 64bit GS base */ | |
358 | #define MSR_KERNEL_GS_BASE 0xc0000102 /* SwapGS GS shadow */ | |
359 | #define MSR_TSC_AUX 0xc0000103 /* Auxiliary TSC */ | |
360 | ||
361 | /* EFER bits: */ | |
362 | #define EFER_SCE (1<<0) /* SYSCALL/SYSRET */ | |
363 | #define EFER_LME (1<<8) /* Long mode enable */ | |
364 | #define EFER_LMA (1<<10) /* Long mode active (read-only) */ | |
365 | #define EFER_NX (1<<11) /* No execute enable */ | |
366 | #define EFER_SVME (1<<12) /* Enable virtualization */ | |
367 | #define EFER_LMSLE (1<<13) /* Long Mode Segment Limit Enable */ | |
368 | #define EFER_FFXSR (1<<14) /* Enable Fast FXSAVE/FXRSTOR */ | |
369 | ||
370 | /* Intel MSRs. Some also available on other CPUs */ | |
371 | ||
372 | #define MSR_PPIN_CTL 0x0000004e | |
373 | #define MSR_PPIN 0x0000004f | |
374 | ||
375 | #define MSR_IA32_PERFCTR0 0x000000c1 | |
376 | #define MSR_IA32_PERFCTR1 0x000000c2 | |
377 | #define MSR_FSB_FREQ 0x000000cd | |
378 | #define MSR_PLATFORM_INFO 0x000000ce | |
379 | #define MSR_PLATFORM_INFO_CPUID_FAULT_BIT 31 | |
380 | #define MSR_PLATFORM_INFO_CPUID_FAULT BIT_ULL(MSR_PLATFORM_INFO_CPUID_FAULT_BIT) | |
381 | ||
382 | #define MSR_PKG_CST_CONFIG_CONTROL 0x000000e2 | |
383 | #define NHM_C3_AUTO_DEMOTE (1UL << 25) | |
384 | #define NHM_C1_AUTO_DEMOTE (1UL << 26) | |
385 | #define ATM_LNC_C6_AUTO_DEMOTE (1UL << 25) | |
386 | #define SNB_C1_AUTO_UNDEMOTE (1UL << 27) | |
387 | #define SNB_C3_AUTO_UNDEMOTE (1UL << 28) | |
388 | ||
389 | #define MSR_MTRRcap 0x000000fe | |
390 | #define MSR_IA32_BBL_CR_CTL 0x00000119 | |
391 | #define MSR_IA32_BBL_CR_CTL3 0x0000011e | |
392 | ||
393 | #define MSR_IA32_SYSENTER_CS 0x00000174 | |
394 | #define MSR_IA32_SYSENTER_ESP 0x00000175 | |
395 | #define MSR_IA32_SYSENTER_EIP 0x00000176 | |
396 | ||
397 | #define MSR_IA32_MCG_CAP 0x00000179 | |
398 | #define MSR_IA32_MCG_STATUS 0x0000017a | |
399 | #define MSR_IA32_MCG_CTL 0x0000017b | |
400 | #define MSR_IA32_MCG_EXT_CTL 0x000004d0 | |
401 | ||
402 | #define MSR_OFFCORE_RSP_0 0x000001a6 | |
403 | #define MSR_OFFCORE_RSP_1 0x000001a7 | |
404 | #define MSR_TURBO_RATIO_LIMIT 0x000001ad | |
405 | #define MSR_TURBO_RATIO_LIMIT1 0x000001ae | |
406 | #define MSR_TURBO_RATIO_LIMIT2 0x000001af | |
407 | ||
408 | #define MSR_LBR_SELECT 0x000001c8 | |
409 | #define MSR_LBR_TOS 0x000001c9 | |
410 | #define MSR_LBR_NHM_FROM 0x00000680 | |
411 | #define MSR_LBR_NHM_TO 0x000006c0 | |
412 | #define MSR_LBR_CORE_FROM 0x00000040 | |
413 | #define MSR_LBR_CORE_TO 0x00000060 | |
414 | ||
415 | #define MSR_LBR_INFO_0 0x00000dc0 /* ... 0xddf for _31 */ | |
416 | #define LBR_INFO_MISPRED BIT_ULL(63) | |
417 | #define LBR_INFO_IN_TX BIT_ULL(62) | |
418 | #define LBR_INFO_ABORT BIT_ULL(61) | |
419 | #define LBR_INFO_CYCLES 0xffff | |
420 | ||
421 | #define MSR_IA32_PEBS_ENABLE 0x000003f1 | |
422 | #define MSR_IA32_DS_AREA 0x00000600 | |
423 | #define MSR_IA32_PERF_CAPABILITIES 0x00000345 | |
424 | #define MSR_PEBS_LD_LAT_THRESHOLD 0x000003f6 | |
425 | ||
426 | #define MSR_IA32_RTIT_CTL 0x00000570 | |
427 | #define MSR_IA32_RTIT_STATUS 0x00000571 | |
428 | #define MSR_IA32_RTIT_ADDR0_A 0x00000580 | |
429 | #define MSR_IA32_RTIT_ADDR0_B 0x00000581 | |
430 | #define MSR_IA32_RTIT_ADDR1_A 0x00000582 | |
431 | #define MSR_IA32_RTIT_ADDR1_B 0x00000583 | |
432 | #define MSR_IA32_RTIT_ADDR2_A 0x00000584 | |
433 | #define MSR_IA32_RTIT_ADDR2_B 0x00000585 | |
434 | #define MSR_IA32_RTIT_ADDR3_A 0x00000586 | |
435 | #define MSR_IA32_RTIT_ADDR3_B 0x00000587 | |
436 | #define MSR_IA32_RTIT_CR3_MATCH 0x00000572 | |
437 | #define MSR_IA32_RTIT_OUTPUT_BASE 0x00000560 | |
438 | #define MSR_IA32_RTIT_OUTPUT_MASK 0x00000561 | |
439 | ||
440 | #define MSR_MTRRfix64K_00000 0x00000250 | |
441 | #define MSR_MTRRfix16K_80000 0x00000258 | |
442 | #define MSR_MTRRfix16K_A0000 0x00000259 | |
443 | #define MSR_MTRRfix4K_C0000 0x00000268 | |
444 | #define MSR_MTRRfix4K_C8000 0x00000269 | |
445 | #define MSR_MTRRfix4K_D0000 0x0000026a | |
446 | #define MSR_MTRRfix4K_D8000 0x0000026b | |
447 | #define MSR_MTRRfix4K_E0000 0x0000026c | |
448 | #define MSR_MTRRfix4K_E8000 0x0000026d | |
449 | #define MSR_MTRRfix4K_F0000 0x0000026e | |
450 | #define MSR_MTRRfix4K_F8000 0x0000026f | |
451 | #define MSR_MTRRdefType 0x000002ff | |
452 | ||
453 | #define MSR_IA32_CR_PAT 0x00000277 | |
454 | ||
455 | #define MSR_IA32_DEBUGCTLMSR 0x000001d9 | |
456 | #define MSR_IA32_LASTBRANCHFROMIP 0x000001db | |
457 | #define MSR_IA32_LASTBRANCHTOIP 0x000001dc | |
458 | #define MSR_IA32_LASTINTFROMIP 0x000001dd | |
459 | #define MSR_IA32_LASTINTTOIP 0x000001de | |
460 | ||
461 | /* DEBUGCTLMSR bits (others vary by model): */ | |
462 | #define DEBUGCTLMSR_LBR (1UL << 0) /* last branch recording */ | |
463 | #define DEBUGCTLMSR_BTF_SHIFT 1 | |
464 | #define DEBUGCTLMSR_BTF (1UL << 1) /* single-step on branches */ | |
465 | #define DEBUGCTLMSR_TR (1UL << 6) | |
466 | #define DEBUGCTLMSR_BTS (1UL << 7) | |
467 | #define DEBUGCTLMSR_BTINT (1UL << 8) | |
468 | #define DEBUGCTLMSR_BTS_OFF_OS (1UL << 9) | |
469 | #define DEBUGCTLMSR_BTS_OFF_USR (1UL << 10) | |
470 | #define DEBUGCTLMSR_FREEZE_LBRS_ON_PMI (1UL << 11) | |
471 | #define DEBUGCTLMSR_FREEZE_IN_SMM_BIT 14 | |
472 | #define DEBUGCTLMSR_FREEZE_IN_SMM (1UL << DEBUGCTLMSR_FREEZE_IN_SMM_BIT) | |
473 | ||
474 | #define MSR_PEBS_FRONTEND 0x000003f7 | |
475 | ||
476 | #define MSR_IA32_POWER_CTL 0x000001fc | |
477 | ||
478 | #define MSR_IA32_MC0_CTL 0x00000400 | |
479 | #define MSR_IA32_MC0_STATUS 0x00000401 | |
480 | #define MSR_IA32_MC0_ADDR 0x00000402 | |
481 | #define MSR_IA32_MC0_MISC 0x00000403 | |
482 | ||
483 | /* C-state Residency Counters */ | |
484 | #define MSR_PKG_C3_RESIDENCY 0x000003f8 | |
485 | #define MSR_PKG_C6_RESIDENCY 0x000003f9 | |
486 | #define MSR_ATOM_PKG_C6_RESIDENCY 0x000003fa | |
487 | #define MSR_PKG_C7_RESIDENCY 0x000003fa | |
488 | #define MSR_CORE_C3_RESIDENCY 0x000003fc | |
489 | #define MSR_CORE_C6_RESIDENCY 0x000003fd | |
490 | #define MSR_CORE_C7_RESIDENCY 0x000003fe | |
491 | #define MSR_KNL_CORE_C6_RESIDENCY 0x000003ff | |
492 | #define MSR_PKG_C2_RESIDENCY 0x0000060d | |
493 | #define MSR_PKG_C8_RESIDENCY 0x00000630 | |
494 | #define MSR_PKG_C9_RESIDENCY 0x00000631 | |
495 | #define MSR_PKG_C10_RESIDENCY 0x00000632 | |
496 | ||
497 | /* Interrupt Response Limit */ | |
498 | #define MSR_PKGC3_IRTL 0x0000060a | |
499 | #define MSR_PKGC6_IRTL 0x0000060b | |
500 | #define MSR_PKGC7_IRTL 0x0000060c | |
501 | #define MSR_PKGC8_IRTL 0x00000633 | |
502 | #define MSR_PKGC9_IRTL 0x00000634 | |
503 | #define MSR_PKGC10_IRTL 0x00000635 | |
504 | ||
505 | /* Run Time Average Power Limiting (RAPL) Interface */ | |
506 | ||
507 | #define MSR_RAPL_POWER_UNIT 0x00000606 | |
508 | ||
509 | #define MSR_PKG_POWER_LIMIT 0x00000610 | |
510 | #define MSR_PKG_ENERGY_STATUS 0x00000611 | |
511 | #define MSR_PKG_PERF_STATUS 0x00000613 | |
512 | #define MSR_PKG_POWER_INFO 0x00000614 | |
513 | ||
514 | #define MSR_DRAM_POWER_LIMIT 0x00000618 | |
515 | #define MSR_DRAM_ENERGY_STATUS 0x00000619 | |
516 | #define MSR_DRAM_PERF_STATUS 0x0000061b | |
517 | #define MSR_DRAM_POWER_INFO 0x0000061c | |
518 | ||
519 | #define MSR_PP0_POWER_LIMIT 0x00000638 | |
520 | #define MSR_PP0_ENERGY_STATUS 0x00000639 | |
521 | #define MSR_PP0_POLICY 0x0000063a | |
522 | #define MSR_PP0_PERF_STATUS 0x0000063b | |
523 | ||
524 | #define MSR_PP1_POWER_LIMIT 0x00000640 | |
525 | #define MSR_PP1_ENERGY_STATUS 0x00000641 | |
526 | #define MSR_PP1_POLICY 0x00000642 | |
527 | ||
528 | /* Config TDP MSRs */ | |
529 | #define MSR_CONFIG_TDP_NOMINAL 0x00000648 | |
530 | #define MSR_CONFIG_TDP_LEVEL_1 0x00000649 | |
531 | #define MSR_CONFIG_TDP_LEVEL_2 0x0000064A | |
532 | #define MSR_CONFIG_TDP_CONTROL 0x0000064B | |
533 | #define MSR_TURBO_ACTIVATION_RATIO 0x0000064C | |
534 | ||
535 | #define MSR_PLATFORM_ENERGY_STATUS 0x0000064D | |
536 | ||
537 | #define MSR_PKG_WEIGHTED_CORE_C0_RES 0x00000658 | |
538 | #define MSR_PKG_ANY_CORE_C0_RES 0x00000659 | |
539 | #define MSR_PKG_ANY_GFXE_C0_RES 0x0000065A | |
540 | #define MSR_PKG_BOTH_CORE_GFXE_C0_RES 0x0000065B | |
541 | ||
542 | #define MSR_CORE_C1_RES 0x00000660 | |
543 | #define MSR_MODULE_C6_RES_MS 0x00000664 | |
544 | ||
545 | #define MSR_CC6_DEMOTION_POLICY_CONFIG 0x00000668 | |
546 | #define MSR_MC6_DEMOTION_POLICY_CONFIG 0x00000669 | |
547 | ||
548 | #define MSR_ATOM_CORE_RATIOS 0x0000066a | |
549 | #define MSR_ATOM_CORE_VIDS 0x0000066b | |
550 | #define MSR_ATOM_CORE_TURBO_RATIOS 0x0000066c | |
551 | #define MSR_ATOM_CORE_TURBO_VIDS 0x0000066d | |
552 | ||
553 | ||
554 | #define MSR_CORE_PERF_LIMIT_REASONS 0x00000690 | |
555 | #define MSR_GFX_PERF_LIMIT_REASONS 0x000006B0 | |
556 | #define MSR_RING_PERF_LIMIT_REASONS 0x000006B1 | |
557 | ||
558 | /* Hardware P state interface */ | |
559 | #define MSR_PPERF 0x0000064e | |
560 | #define MSR_PERF_LIMIT_REASONS 0x0000064f | |
561 | #define MSR_PM_ENABLE 0x00000770 | |
562 | #define MSR_HWP_CAPABILITIES 0x00000771 | |
563 | #define MSR_HWP_REQUEST_PKG 0x00000772 | |
564 | #define MSR_HWP_INTERRUPT 0x00000773 | |
565 | #define MSR_HWP_REQUEST 0x00000774 | |
566 | #define MSR_HWP_STATUS 0x00000777 | |
567 | ||
568 | /* CPUID.6.EAX */ | |
569 | #define HWP_BASE_BIT (1<<7) | |
570 | #define HWP_NOTIFICATIONS_BIT (1<<8) | |
571 | #define HWP_ACTIVITY_WINDOW_BIT (1<<9) | |
572 | #define HWP_ENERGY_PERF_PREFERENCE_BIT (1<<10) | |
573 | #define HWP_PACKAGE_LEVEL_REQUEST_BIT (1<<11) | |
574 | ||
575 | /* IA32_HWP_CAPABILITIES */ | |
576 | #define HWP_HIGHEST_PERF(x) (((x) >> 0) & 0xff) | |
577 | #define HWP_GUARANTEED_PERF(x) (((x) >> 8) & 0xff) | |
578 | #define HWP_MOSTEFFICIENT_PERF(x) (((x) >> 16) & 0xff) | |
579 | #define HWP_LOWEST_PERF(x) (((x) >> 24) & 0xff) | |
580 | ||
581 | /* IA32_HWP_REQUEST */ | |
582 | #define HWP_MIN_PERF(x) (x & 0xff) | |
583 | #define HWP_MAX_PERF(x) ((x & 0xff) << 8) | |
584 | #define HWP_DESIRED_PERF(x) ((x & 0xff) << 16) | |
585 | #define HWP_ENERGY_PERF_PREFERENCE(x) (((unsigned long long) x & 0xff) << 24) | |
586 | #define HWP_EPP_PERFORMANCE 0x00 | |
587 | #define HWP_EPP_BALANCE_PERFORMANCE 0x80 | |
588 | #define HWP_EPP_BALANCE_POWERSAVE 0xC0 | |
589 | #define HWP_EPP_POWERSAVE 0xFF | |
590 | #define HWP_ACTIVITY_WINDOW(x) ((unsigned long long)(x & 0xff3) << 32) | |
591 | #define HWP_PACKAGE_CONTROL(x) ((unsigned long long)(x & 0x1) << 42) | |
592 | ||
593 | /* IA32_HWP_STATUS */ | |
594 | #define HWP_GUARANTEED_CHANGE(x) (x & 0x1) | |
595 | #define HWP_EXCURSION_TO_MINIMUM(x) (x & 0x4) | |
596 | ||
597 | /* IA32_HWP_INTERRUPT */ | |
598 | #define HWP_CHANGE_TO_GUARANTEED_INT(x) (x & 0x1) | |
599 | #define HWP_EXCURSION_TO_MINIMUM_INT(x) (x & 0x2) | |
600 | ||
601 | #define MSR_AMD64_MC0_MASK 0xc0010044 | |
602 | ||
603 | #define MSR_IA32_MCx_CTL(x) (MSR_IA32_MC0_CTL + 4*(x)) | |
604 | #define MSR_IA32_MCx_STATUS(x) (MSR_IA32_MC0_STATUS + 4*(x)) | |
605 | #define MSR_IA32_MCx_ADDR(x) (MSR_IA32_MC0_ADDR + 4*(x)) | |
606 | #define MSR_IA32_MCx_MISC(x) (MSR_IA32_MC0_MISC + 4*(x)) | |
607 | ||
608 | #define MSR_AMD64_MCx_MASK(x) (MSR_AMD64_MC0_MASK + (x)) | |
609 | ||
610 | /* These are consecutive and not in the normal 4er MCE bank block */ | |
611 | #define MSR_IA32_MC0_CTL2 0x00000280 | |
612 | #define MSR_IA32_MCx_CTL2(x) (MSR_IA32_MC0_CTL2 + (x)) | |
613 | ||
614 | #define MSR_P6_PERFCTR0 0x000000c1 | |
615 | #define MSR_P6_PERFCTR1 0x000000c2 | |
616 | #define MSR_P6_EVNTSEL0 0x00000186 | |
617 | #define MSR_P6_EVNTSEL1 0x00000187 | |
618 | ||
619 | #define MSR_KNC_PERFCTR0 0x00000020 | |
620 | #define MSR_KNC_PERFCTR1 0x00000021 | |
621 | #define MSR_KNC_EVNTSEL0 0x00000028 | |
622 | #define MSR_KNC_EVNTSEL1 0x00000029 | |
623 | ||
624 | /* Alternative perfctr range with full access. */ | |
625 | #define MSR_IA32_PMC0 0x000004c1 | |
626 | ||
627 | /* AMD64 MSRs. Not complete. See the architecture manual for a more | |
628 | complete list. */ | |
629 | ||
630 | #define MSR_AMD64_PATCH_LEVEL 0x0000008b | |
631 | #define MSR_AMD64_TSC_RATIO 0xc0000104 | |
632 | #define MSR_AMD64_NB_CFG 0xc001001f | |
633 | #define MSR_AMD64_PATCH_LOADER 0xc0010020 | |
634 | #define MSR_AMD64_OSVW_ID_LENGTH 0xc0010140 | |
635 | #define MSR_AMD64_OSVW_STATUS 0xc0010141 | |
636 | #define MSR_AMD64_LS_CFG 0xc0011020 | |
637 | #define MSR_AMD64_DC_CFG 0xc0011022 | |
638 | #define MSR_AMD64_BU_CFG2 0xc001102a | |
639 | #define MSR_AMD64_IBSFETCHCTL 0xc0011030 | |
640 | #define MSR_AMD64_IBSFETCHLINAD 0xc0011031 | |
641 | #define MSR_AMD64_IBSFETCHPHYSAD 0xc0011032 | |
642 | #define MSR_AMD64_IBSFETCH_REG_COUNT 3 | |
643 | #define MSR_AMD64_IBSFETCH_REG_MASK ((1UL<<MSR_AMD64_IBSFETCH_REG_COUNT)-1) | |
644 | #define MSR_AMD64_IBSOPCTL 0xc0011033 | |
645 | #define MSR_AMD64_IBSOPRIP 0xc0011034 | |
646 | #define MSR_AMD64_IBSOPDATA 0xc0011035 | |
647 | #define MSR_AMD64_IBSOPDATA2 0xc0011036 | |
648 | #define MSR_AMD64_IBSOPDATA3 0xc0011037 | |
649 | #define MSR_AMD64_IBSDCLINAD 0xc0011038 | |
650 | #define MSR_AMD64_IBSDCPHYSAD 0xc0011039 | |
651 | #define MSR_AMD64_IBSOP_REG_COUNT 7 | |
652 | #define MSR_AMD64_IBSOP_REG_MASK ((1UL<<MSR_AMD64_IBSOP_REG_COUNT)-1) | |
653 | #define MSR_AMD64_IBSCTL 0xc001103a | |
654 | #define MSR_AMD64_IBSBRTARGET 0xc001103b | |
655 | #define MSR_AMD64_IBSOPDATA4 0xc001103d | |
656 | #define MSR_AMD64_IBS_REG_COUNT_MAX 8 /* includes MSR_AMD64_IBSBRTARGET */ | |
657 | #define MSR_AMD64_SEV 0xc0010131 | |
658 | #define MSR_AMD64_SEV_ENABLED_BIT 0 | |
659 | #define MSR_AMD64_SEV_ENABLED BIT_ULL(MSR_AMD64_SEV_ENABLED_BIT) | |
660 | ||
661 | /* Fam 17h MSRs */ | |
662 | #define MSR_F17H_IRPERF 0xc00000e9 | |
663 | ||
664 | /* Fam 16h MSRs */ | |
665 | #define MSR_F16H_L2I_PERF_CTL 0xc0010230 | |
666 | #define MSR_F16H_L2I_PERF_CTR 0xc0010231 | |
667 | #define MSR_F16H_DR1_ADDR_MASK 0xc0011019 | |
668 | #define MSR_F16H_DR2_ADDR_MASK 0xc001101a | |
669 | #define MSR_F16H_DR3_ADDR_MASK 0xc001101b | |
670 | #define MSR_F16H_DR0_ADDR_MASK 0xc0011027 | |
671 | ||
672 | /* Fam 15h MSRs */ | |
673 | #define MSR_F15H_PERF_CTL 0xc0010200 | |
674 | #define MSR_F15H_PERF_CTR 0xc0010201 | |
675 | #define MSR_F15H_NB_PERF_CTL 0xc0010240 | |
676 | #define MSR_F15H_NB_PERF_CTR 0xc0010241 | |
677 | #define MSR_F15H_PTSC 0xc0010280 | |
678 | #define MSR_F15H_IC_CFG 0xc0011021 | |
679 | ||
680 | /* Fam 10h MSRs */ | |
681 | #define MSR_FAM10H_MMIO_CONF_BASE 0xc0010058 | |
682 | #define FAM10H_MMIO_CONF_ENABLE (1<<0) | |
683 | #define FAM10H_MMIO_CONF_BUSRANGE_MASK 0xf | |
684 | #define FAM10H_MMIO_CONF_BUSRANGE_SHIFT 2 | |
685 | #define FAM10H_MMIO_CONF_BASE_MASK 0xfffffffULL | |
686 | #define FAM10H_MMIO_CONF_BASE_SHIFT 20 | |
687 | #define MSR_FAM10H_NODE_ID 0xc001100c | |
688 | #define MSR_F10H_DECFG 0xc0011029 | |
689 | #define MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT 1 | |
690 | #define MSR_F10H_DECFG_LFENCE_SERIALIZE BIT_ULL(MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT) | |
691 | ||
692 | /* K8 MSRs */ | |
693 | #define MSR_K8_TOP_MEM1 0xc001001a | |
694 | #define MSR_K8_TOP_MEM2 0xc001001d | |
695 | #define MSR_K8_SYSCFG 0xc0010010 | |
696 | #define MSR_K8_SYSCFG_MEM_ENCRYPT_BIT 23 | |
697 | #define MSR_K8_SYSCFG_MEM_ENCRYPT BIT_ULL(MSR_K8_SYSCFG_MEM_ENCRYPT_BIT) | |
698 | #define MSR_K8_INT_PENDING_MSG 0xc0010055 | |
699 | /* C1E active bits in int pending message */ | |
700 | #define K8_INTP_C1E_ACTIVE_MASK 0x18000000 | |
701 | #define MSR_K8_TSEG_ADDR 0xc0010112 | |
702 | #define MSR_K8_TSEG_MASK 0xc0010113 | |
703 | #define K8_MTRRFIXRANGE_DRAM_ENABLE 0x00040000 /* MtrrFixDramEn bit */ | |
704 | #define K8_MTRRFIXRANGE_DRAM_MODIFY 0x00080000 /* MtrrFixDramModEn bit */ | |
705 | #define K8_MTRR_RDMEM_WRMEM_MASK 0x18181818 /* Mask: RdMem|WrMem */ | |
706 | ||
707 | /* K7 MSRs */ | |
708 | #define MSR_K7_EVNTSEL0 0xc0010000 | |
709 | #define MSR_K7_PERFCTR0 0xc0010004 | |
710 | #define MSR_K7_EVNTSEL1 0xc0010001 | |
711 | #define MSR_K7_PERFCTR1 0xc0010005 | |
712 | #define MSR_K7_EVNTSEL2 0xc0010002 | |
713 | #define MSR_K7_PERFCTR2 0xc0010006 | |
714 | #define MSR_K7_EVNTSEL3 0xc0010003 | |
715 | #define MSR_K7_PERFCTR3 0xc0010007 | |
716 | #define MSR_K7_CLK_CTL 0xc001001b | |
717 | #define MSR_K7_HWCR 0xc0010015 | |
718 | #define MSR_K7_HWCR_SMMLOCK_BIT 0 | |
719 | #define MSR_K7_HWCR_SMMLOCK BIT_ULL(MSR_K7_HWCR_SMMLOCK_BIT) | |
720 | #define MSR_K7_FID_VID_CTL 0xc0010041 | |
721 | #define MSR_K7_FID_VID_STATUS 0xc0010042 | |
722 | ||
723 | /* K6 MSRs */ | |
724 | #define MSR_K6_WHCR 0xc0000082 | |
725 | #define MSR_K6_UWCCR 0xc0000085 | |
726 | #define MSR_K6_EPMR 0xc0000086 | |
727 | #define MSR_K6_PSOR 0xc0000087 | |
728 | #define MSR_K6_PFIR 0xc0000088 | |
729 | ||
730 | /* Centaur-Hauls/IDT defined MSRs. */ | |
731 | #define MSR_IDT_FCR1 0x00000107 | |
732 | #define MSR_IDT_FCR2 0x00000108 | |
733 | #define MSR_IDT_FCR3 0x00000109 | |
734 | #define MSR_IDT_FCR4 0x0000010a | |
735 | ||
736 | #define MSR_IDT_MCR0 0x00000110 | |
737 | #define MSR_IDT_MCR1 0x00000111 | |
738 | #define MSR_IDT_MCR2 0x00000112 | |
739 | #define MSR_IDT_MCR3 0x00000113 | |
740 | #define MSR_IDT_MCR4 0x00000114 | |
741 | #define MSR_IDT_MCR5 0x00000115 | |
742 | #define MSR_IDT_MCR6 0x00000116 | |
743 | #define MSR_IDT_MCR7 0x00000117 | |
744 | #define MSR_IDT_MCR_CTRL 0x00000120 | |
745 | ||
746 | /* VIA Cyrix defined MSRs*/ | |
747 | #define MSR_VIA_FCR 0x00001107 | |
748 | #define MSR_VIA_LONGHAUL 0x0000110a | |
749 | #define MSR_VIA_RNG 0x0000110b | |
750 | #define MSR_VIA_BCR2 0x00001147 | |
751 | ||
752 | /* Transmeta defined MSRs */ | |
753 | #define MSR_TMTA_LONGRUN_CTRL 0x80868010 | |
754 | #define MSR_TMTA_LONGRUN_FLAGS 0x80868011 | |
755 | #define MSR_TMTA_LRTI_READOUT 0x80868018 | |
756 | #define MSR_TMTA_LRTI_VOLT_MHZ 0x8086801a | |
757 | ||
758 | /* Intel defined MSRs. */ | |
759 | #define MSR_IA32_P5_MC_ADDR 0x00000000 | |
760 | #define MSR_IA32_P5_MC_TYPE 0x00000001 | |
761 | #define MSR_IA32_TSC 0x00000010 | |
762 | #define MSR_IA32_PLATFORM_ID 0x00000017 | |
763 | #define MSR_IA32_EBL_CR_POWERON 0x0000002a | |
764 | #define MSR_EBC_FREQUENCY_ID 0x0000002c | |
765 | #define MSR_SMI_COUNT 0x00000034 | |
766 | #define MSR_IA32_FEATURE_CONTROL 0x0000003a | |
767 | #define MSR_IA32_TSC_ADJUST 0x0000003b | |
768 | #define MSR_IA32_BNDCFGS 0x00000d90 | |
769 | ||
770 | #define MSR_IA32_BNDCFGS_RSVD 0x00000ffc | |
771 | ||
772 | #define MSR_IA32_XSS 0x00000da0 | |
773 | ||
774 | #define FEATURE_CONTROL_LOCKED (1<<0) | |
775 | #define FEATURE_CONTROL_VMXON_ENABLED_INSIDE_SMX (1<<1) | |
776 | #define FEATURE_CONTROL_VMXON_ENABLED_OUTSIDE_SMX (1<<2) | |
777 | #define FEATURE_CONTROL_LMCE (1<<20) | |
778 | ||
779 | #define MSR_IA32_APICBASE 0x0000001b | |
780 | #define MSR_IA32_APICBASE_BSP (1<<8) | |
781 | #define MSR_IA32_APICBASE_ENABLE (1<<11) | |
782 | #define MSR_IA32_APICBASE_BASE (0xfffff<<12) | |
783 | ||
79904c9d VK |
784 | #define APIC_BASE_MSR 0x800 |
785 | #define X2APIC_ENABLE (1UL << 10) | |
786 | #define APIC_ICR 0x300 | |
787 | #define APIC_DEST_SELF 0x40000 | |
788 | #define APIC_DEST_ALLINC 0x80000 | |
789 | #define APIC_DEST_ALLBUT 0xC0000 | |
790 | #define APIC_ICR_RR_MASK 0x30000 | |
791 | #define APIC_ICR_RR_INVALID 0x00000 | |
792 | #define APIC_ICR_RR_INPROG 0x10000 | |
793 | #define APIC_ICR_RR_VALID 0x20000 | |
794 | #define APIC_INT_LEVELTRIG 0x08000 | |
795 | #define APIC_INT_ASSERT 0x04000 | |
796 | #define APIC_ICR_BUSY 0x01000 | |
797 | #define APIC_DEST_LOGICAL 0x00800 | |
798 | #define APIC_DEST_PHYSICAL 0x00000 | |
799 | #define APIC_DM_FIXED 0x00000 | |
800 | #define APIC_DM_FIXED_MASK 0x00700 | |
801 | #define APIC_DM_LOWEST 0x00100 | |
802 | #define APIC_DM_SMI 0x00200 | |
803 | #define APIC_DM_REMRD 0x00300 | |
804 | #define APIC_DM_NMI 0x00400 | |
805 | #define APIC_DM_INIT 0x00500 | |
806 | #define APIC_DM_STARTUP 0x00600 | |
807 | #define APIC_DM_EXTINT 0x00700 | |
808 | #define APIC_VECTOR_MASK 0x000FF | |
809 | #define APIC_ICR2 0x310 | |
810 | ||
783e9e51 PB |
811 | #define MSR_IA32_TSCDEADLINE 0x000006e0 |
812 | ||
813 | #define MSR_IA32_UCODE_WRITE 0x00000079 | |
814 | #define MSR_IA32_UCODE_REV 0x0000008b | |
815 | ||
816 | #define MSR_IA32_SMM_MONITOR_CTL 0x0000009b | |
817 | #define MSR_IA32_SMBASE 0x0000009e | |
818 | ||
819 | #define MSR_IA32_PERF_STATUS 0x00000198 | |
820 | #define MSR_IA32_PERF_CTL 0x00000199 | |
821 | #define INTEL_PERF_CTL_MASK 0xffff | |
822 | #define MSR_AMD_PSTATE_DEF_BASE 0xc0010064 | |
823 | #define MSR_AMD_PERF_STATUS 0xc0010063 | |
824 | #define MSR_AMD_PERF_CTL 0xc0010062 | |
825 | ||
826 | #define MSR_IA32_MPERF 0x000000e7 | |
827 | #define MSR_IA32_APERF 0x000000e8 | |
828 | ||
829 | #define MSR_IA32_THERM_CONTROL 0x0000019a | |
830 | #define MSR_IA32_THERM_INTERRUPT 0x0000019b | |
831 | ||
832 | #define THERM_INT_HIGH_ENABLE (1 << 0) | |
833 | #define THERM_INT_LOW_ENABLE (1 << 1) | |
834 | #define THERM_INT_PLN_ENABLE (1 << 24) | |
835 | ||
836 | #define MSR_IA32_THERM_STATUS 0x0000019c | |
837 | ||
838 | #define THERM_STATUS_PROCHOT (1 << 0) | |
839 | #define THERM_STATUS_POWER_LIMIT (1 << 10) | |
840 | ||
841 | #define MSR_THERM2_CTL 0x0000019d | |
842 | ||
843 | #define MSR_THERM2_CTL_TM_SELECT (1ULL << 16) | |
844 | ||
845 | #define MSR_IA32_MISC_ENABLE 0x000001a0 | |
846 | ||
847 | #define MSR_IA32_TEMPERATURE_TARGET 0x000001a2 | |
848 | ||
849 | #define MSR_MISC_FEATURE_CONTROL 0x000001a4 | |
850 | #define MSR_MISC_PWR_MGMT 0x000001aa | |
851 | ||
852 | #define MSR_IA32_ENERGY_PERF_BIAS 0x000001b0 | |
853 | #define ENERGY_PERF_BIAS_PERFORMANCE 0 | |
854 | #define ENERGY_PERF_BIAS_BALANCE_PERFORMANCE 4 | |
855 | #define ENERGY_PERF_BIAS_NORMAL 6 | |
856 | #define ENERGY_PERF_BIAS_BALANCE_POWERSAVE 8 | |
857 | #define ENERGY_PERF_BIAS_POWERSAVE 15 | |
858 | ||
859 | #define MSR_IA32_PACKAGE_THERM_STATUS 0x000001b1 | |
860 | ||
861 | #define PACKAGE_THERM_STATUS_PROCHOT (1 << 0) | |
862 | #define PACKAGE_THERM_STATUS_POWER_LIMIT (1 << 10) | |
863 | ||
864 | #define MSR_IA32_PACKAGE_THERM_INTERRUPT 0x000001b2 | |
865 | ||
866 | #define PACKAGE_THERM_INT_HIGH_ENABLE (1 << 0) | |
867 | #define PACKAGE_THERM_INT_LOW_ENABLE (1 << 1) | |
868 | #define PACKAGE_THERM_INT_PLN_ENABLE (1 << 24) | |
869 | ||
870 | /* Thermal Thresholds Support */ | |
871 | #define THERM_INT_THRESHOLD0_ENABLE (1 << 15) | |
872 | #define THERM_SHIFT_THRESHOLD0 8 | |
873 | #define THERM_MASK_THRESHOLD0 (0x7f << THERM_SHIFT_THRESHOLD0) | |
874 | #define THERM_INT_THRESHOLD1_ENABLE (1 << 23) | |
875 | #define THERM_SHIFT_THRESHOLD1 16 | |
876 | #define THERM_MASK_THRESHOLD1 (0x7f << THERM_SHIFT_THRESHOLD1) | |
877 | #define THERM_STATUS_THRESHOLD0 (1 << 6) | |
878 | #define THERM_LOG_THRESHOLD0 (1 << 7) | |
879 | #define THERM_STATUS_THRESHOLD1 (1 << 8) | |
880 | #define THERM_LOG_THRESHOLD1 (1 << 9) | |
881 | ||
882 | /* MISC_ENABLE bits: architectural */ | |
883 | #define MSR_IA32_MISC_ENABLE_FAST_STRING_BIT 0 | |
884 | #define MSR_IA32_MISC_ENABLE_FAST_STRING (1ULL << MSR_IA32_MISC_ENABLE_FAST_STRING_BIT) | |
885 | #define MSR_IA32_MISC_ENABLE_TCC_BIT 1 | |
886 | #define MSR_IA32_MISC_ENABLE_TCC (1ULL << MSR_IA32_MISC_ENABLE_TCC_BIT) | |
887 | #define MSR_IA32_MISC_ENABLE_EMON_BIT 7 | |
888 | #define MSR_IA32_MISC_ENABLE_EMON (1ULL << MSR_IA32_MISC_ENABLE_EMON_BIT) | |
889 | #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT 11 | |
890 | #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT) | |
891 | #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT 12 | |
892 | #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT) | |
893 | #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT 16 | |
894 | #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP (1ULL << MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT) | |
895 | #define MSR_IA32_MISC_ENABLE_MWAIT_BIT 18 | |
896 | #define MSR_IA32_MISC_ENABLE_MWAIT (1ULL << MSR_IA32_MISC_ENABLE_MWAIT_BIT) | |
897 | #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT 22 | |
898 | #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID (1ULL << MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT) | |
899 | #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT 23 | |
900 | #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT) | |
901 | #define MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT 34 | |
902 | #define MSR_IA32_MISC_ENABLE_XD_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT) | |
903 | ||
904 | /* MISC_ENABLE bits: model-specific, meaning may vary from core to core */ | |
905 | #define MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT 2 | |
906 | #define MSR_IA32_MISC_ENABLE_X87_COMPAT (1ULL << MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT) | |
907 | #define MSR_IA32_MISC_ENABLE_TM1_BIT 3 | |
908 | #define MSR_IA32_MISC_ENABLE_TM1 (1ULL << MSR_IA32_MISC_ENABLE_TM1_BIT) | |
909 | #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT 4 | |
910 | #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT) | |
911 | #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT 6 | |
912 | #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT) | |
913 | #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT 8 | |
914 | #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT) | |
915 | #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT 9 | |
916 | #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT) | |
917 | #define MSR_IA32_MISC_ENABLE_FERR_BIT 10 | |
918 | #define MSR_IA32_MISC_ENABLE_FERR (1ULL << MSR_IA32_MISC_ENABLE_FERR_BIT) | |
919 | #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT 10 | |
920 | #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX (1ULL << MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT) | |
921 | #define MSR_IA32_MISC_ENABLE_TM2_BIT 13 | |
922 | #define MSR_IA32_MISC_ENABLE_TM2 (1ULL << MSR_IA32_MISC_ENABLE_TM2_BIT) | |
923 | #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT 19 | |
924 | #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT) | |
925 | #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT 20 | |
926 | #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT) | |
927 | #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT 24 | |
928 | #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT (1ULL << MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT) | |
929 | #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT 37 | |
930 | #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT) | |
931 | #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT 38 | |
932 | #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT) | |
933 | #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT 39 | |
934 | #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT) | |
935 | ||
936 | /* MISC_FEATURES_ENABLES non-architectural features */ | |
937 | #define MSR_MISC_FEATURES_ENABLES 0x00000140 | |
938 | ||
939 | #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT 0 | |
940 | #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT BIT_ULL(MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT) | |
941 | #define MSR_MISC_FEATURES_ENABLES_RING3MWAIT_BIT 1 | |
942 | ||
943 | #define MSR_IA32_TSC_DEADLINE 0x000006E0 | |
944 | ||
945 | /* P4/Xeon+ specific */ | |
946 | #define MSR_IA32_MCG_EAX 0x00000180 | |
947 | #define MSR_IA32_MCG_EBX 0x00000181 | |
948 | #define MSR_IA32_MCG_ECX 0x00000182 | |
949 | #define MSR_IA32_MCG_EDX 0x00000183 | |
950 | #define MSR_IA32_MCG_ESI 0x00000184 | |
951 | #define MSR_IA32_MCG_EDI 0x00000185 | |
952 | #define MSR_IA32_MCG_EBP 0x00000186 | |
953 | #define MSR_IA32_MCG_ESP 0x00000187 | |
954 | #define MSR_IA32_MCG_EFLAGS 0x00000188 | |
955 | #define MSR_IA32_MCG_EIP 0x00000189 | |
956 | #define MSR_IA32_MCG_RESERVED 0x0000018a | |
957 | ||
958 | /* Pentium IV performance counter MSRs */ | |
959 | #define MSR_P4_BPU_PERFCTR0 0x00000300 | |
960 | #define MSR_P4_BPU_PERFCTR1 0x00000301 | |
961 | #define MSR_P4_BPU_PERFCTR2 0x00000302 | |
962 | #define MSR_P4_BPU_PERFCTR3 0x00000303 | |
963 | #define MSR_P4_MS_PERFCTR0 0x00000304 | |
964 | #define MSR_P4_MS_PERFCTR1 0x00000305 | |
965 | #define MSR_P4_MS_PERFCTR2 0x00000306 | |
966 | #define MSR_P4_MS_PERFCTR3 0x00000307 | |
967 | #define MSR_P4_FLAME_PERFCTR0 0x00000308 | |
968 | #define MSR_P4_FLAME_PERFCTR1 0x00000309 | |
969 | #define MSR_P4_FLAME_PERFCTR2 0x0000030a | |
970 | #define MSR_P4_FLAME_PERFCTR3 0x0000030b | |
971 | #define MSR_P4_IQ_PERFCTR0 0x0000030c | |
972 | #define MSR_P4_IQ_PERFCTR1 0x0000030d | |
973 | #define MSR_P4_IQ_PERFCTR2 0x0000030e | |
974 | #define MSR_P4_IQ_PERFCTR3 0x0000030f | |
975 | #define MSR_P4_IQ_PERFCTR4 0x00000310 | |
976 | #define MSR_P4_IQ_PERFCTR5 0x00000311 | |
977 | #define MSR_P4_BPU_CCCR0 0x00000360 | |
978 | #define MSR_P4_BPU_CCCR1 0x00000361 | |
979 | #define MSR_P4_BPU_CCCR2 0x00000362 | |
980 | #define MSR_P4_BPU_CCCR3 0x00000363 | |
981 | #define MSR_P4_MS_CCCR0 0x00000364 | |
982 | #define MSR_P4_MS_CCCR1 0x00000365 | |
983 | #define MSR_P4_MS_CCCR2 0x00000366 | |
984 | #define MSR_P4_MS_CCCR3 0x00000367 | |
985 | #define MSR_P4_FLAME_CCCR0 0x00000368 | |
986 | #define MSR_P4_FLAME_CCCR1 0x00000369 | |
987 | #define MSR_P4_FLAME_CCCR2 0x0000036a | |
988 | #define MSR_P4_FLAME_CCCR3 0x0000036b | |
989 | #define MSR_P4_IQ_CCCR0 0x0000036c | |
990 | #define MSR_P4_IQ_CCCR1 0x0000036d | |
991 | #define MSR_P4_IQ_CCCR2 0x0000036e | |
992 | #define MSR_P4_IQ_CCCR3 0x0000036f | |
993 | #define MSR_P4_IQ_CCCR4 0x00000370 | |
994 | #define MSR_P4_IQ_CCCR5 0x00000371 | |
995 | #define MSR_P4_ALF_ESCR0 0x000003ca | |
996 | #define MSR_P4_ALF_ESCR1 0x000003cb | |
997 | #define MSR_P4_BPU_ESCR0 0x000003b2 | |
998 | #define MSR_P4_BPU_ESCR1 0x000003b3 | |
999 | #define MSR_P4_BSU_ESCR0 0x000003a0 | |
1000 | #define MSR_P4_BSU_ESCR1 0x000003a1 | |
1001 | #define MSR_P4_CRU_ESCR0 0x000003b8 | |
1002 | #define MSR_P4_CRU_ESCR1 0x000003b9 | |
1003 | #define MSR_P4_CRU_ESCR2 0x000003cc | |
1004 | #define MSR_P4_CRU_ESCR3 0x000003cd | |
1005 | #define MSR_P4_CRU_ESCR4 0x000003e0 | |
1006 | #define MSR_P4_CRU_ESCR5 0x000003e1 | |
1007 | #define MSR_P4_DAC_ESCR0 0x000003a8 | |
1008 | #define MSR_P4_DAC_ESCR1 0x000003a9 | |
1009 | #define MSR_P4_FIRM_ESCR0 0x000003a4 | |
1010 | #define MSR_P4_FIRM_ESCR1 0x000003a5 | |
1011 | #define MSR_P4_FLAME_ESCR0 0x000003a6 | |
1012 | #define MSR_P4_FLAME_ESCR1 0x000003a7 | |
1013 | #define MSR_P4_FSB_ESCR0 0x000003a2 | |
1014 | #define MSR_P4_FSB_ESCR1 0x000003a3 | |
1015 | #define MSR_P4_IQ_ESCR0 0x000003ba | |
1016 | #define MSR_P4_IQ_ESCR1 0x000003bb | |
1017 | #define MSR_P4_IS_ESCR0 0x000003b4 | |
1018 | #define MSR_P4_IS_ESCR1 0x000003b5 | |
1019 | #define MSR_P4_ITLB_ESCR0 0x000003b6 | |
1020 | #define MSR_P4_ITLB_ESCR1 0x000003b7 | |
1021 | #define MSR_P4_IX_ESCR0 0x000003c8 | |
1022 | #define MSR_P4_IX_ESCR1 0x000003c9 | |
1023 | #define MSR_P4_MOB_ESCR0 0x000003aa | |
1024 | #define MSR_P4_MOB_ESCR1 0x000003ab | |
1025 | #define MSR_P4_MS_ESCR0 0x000003c0 | |
1026 | #define MSR_P4_MS_ESCR1 0x000003c1 | |
1027 | #define MSR_P4_PMH_ESCR0 0x000003ac | |
1028 | #define MSR_P4_PMH_ESCR1 0x000003ad | |
1029 | #define MSR_P4_RAT_ESCR0 0x000003bc | |
1030 | #define MSR_P4_RAT_ESCR1 0x000003bd | |
1031 | #define MSR_P4_SAAT_ESCR0 0x000003ae | |
1032 | #define MSR_P4_SAAT_ESCR1 0x000003af | |
1033 | #define MSR_P4_SSU_ESCR0 0x000003be | |
1034 | #define MSR_P4_SSU_ESCR1 0x000003bf /* guess: not in manual */ | |
1035 | ||
1036 | #define MSR_P4_TBPU_ESCR0 0x000003c2 | |
1037 | #define MSR_P4_TBPU_ESCR1 0x000003c3 | |
1038 | #define MSR_P4_TC_ESCR0 0x000003c4 | |
1039 | #define MSR_P4_TC_ESCR1 0x000003c5 | |
1040 | #define MSR_P4_U2L_ESCR0 0x000003b0 | |
1041 | #define MSR_P4_U2L_ESCR1 0x000003b1 | |
1042 | ||
1043 | #define MSR_P4_PEBS_MATRIX_VERT 0x000003f2 | |
1044 | ||
1045 | /* Intel Core-based CPU performance counters */ | |
1046 | #define MSR_CORE_PERF_FIXED_CTR0 0x00000309 | |
1047 | #define MSR_CORE_PERF_FIXED_CTR1 0x0000030a | |
1048 | #define MSR_CORE_PERF_FIXED_CTR2 0x0000030b | |
1049 | #define MSR_CORE_PERF_FIXED_CTR_CTRL 0x0000038d | |
1050 | #define MSR_CORE_PERF_GLOBAL_STATUS 0x0000038e | |
1051 | #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f | |
1052 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL 0x00000390 | |
1053 | ||
1054 | /* Geode defined MSRs */ | |
1055 | #define MSR_GEODE_BUSCONT_CONF0 0x00001900 | |
1056 | ||
1057 | /* Intel VT MSRs */ | |
1058 | #define MSR_IA32_VMX_BASIC 0x00000480 | |
1059 | #define MSR_IA32_VMX_PINBASED_CTLS 0x00000481 | |
1060 | #define MSR_IA32_VMX_PROCBASED_CTLS 0x00000482 | |
1061 | #define MSR_IA32_VMX_EXIT_CTLS 0x00000483 | |
1062 | #define MSR_IA32_VMX_ENTRY_CTLS 0x00000484 | |
1063 | #define MSR_IA32_VMX_MISC 0x00000485 | |
1064 | #define MSR_IA32_VMX_CR0_FIXED0 0x00000486 | |
1065 | #define MSR_IA32_VMX_CR0_FIXED1 0x00000487 | |
1066 | #define MSR_IA32_VMX_CR4_FIXED0 0x00000488 | |
1067 | #define MSR_IA32_VMX_CR4_FIXED1 0x00000489 | |
1068 | #define MSR_IA32_VMX_VMCS_ENUM 0x0000048a | |
1069 | #define MSR_IA32_VMX_PROCBASED_CTLS2 0x0000048b | |
1070 | #define MSR_IA32_VMX_EPT_VPID_CAP 0x0000048c | |
1071 | #define MSR_IA32_VMX_TRUE_PINBASED_CTLS 0x0000048d | |
1072 | #define MSR_IA32_VMX_TRUE_PROCBASED_CTLS 0x0000048e | |
1073 | #define MSR_IA32_VMX_TRUE_EXIT_CTLS 0x0000048f | |
1074 | #define MSR_IA32_VMX_TRUE_ENTRY_CTLS 0x00000490 | |
1075 | #define MSR_IA32_VMX_VMFUNC 0x00000491 | |
1076 | ||
1077 | /* VMX_BASIC bits and bitmasks */ | |
1078 | #define VMX_BASIC_VMCS_SIZE_SHIFT 32 | |
1079 | #define VMX_BASIC_TRUE_CTLS (1ULL << 55) | |
1080 | #define VMX_BASIC_64 0x0001000000000000LLU | |
1081 | #define VMX_BASIC_MEM_TYPE_SHIFT 50 | |
1082 | #define VMX_BASIC_MEM_TYPE_MASK 0x003c000000000000LLU | |
1083 | #define VMX_BASIC_MEM_TYPE_WB 6LLU | |
1084 | #define VMX_BASIC_INOUT 0x0040000000000000LLU | |
1085 | ||
09444420 PB |
1086 | /* VMX_EPT_VPID_CAP bits */ |
1087 | #define VMX_EPT_VPID_CAP_AD_BITS (1ULL << 21) | |
1088 | ||
783e9e51 PB |
1089 | /* MSR_IA32_VMX_MISC bits */ |
1090 | #define MSR_IA32_VMX_MISC_VMWRITE_SHADOW_RO_FIELDS (1ULL << 29) | |
1091 | #define MSR_IA32_VMX_MISC_PREEMPTION_TIMER_SCALE 0x1F | |
1092 | /* AMD-V MSRs */ | |
1093 | ||
1094 | #define MSR_VM_CR 0xc0010114 | |
1095 | #define MSR_VM_IGNNE 0xc0010115 | |
1096 | #define MSR_VM_HSAVE_PA 0xc0010117 | |
1097 | ||
cc68765d | 1098 | #endif /* SELFTEST_KVM_PROCESSOR_H */ |