| 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _ASM_X86_MSR_INDEX_H |
| 3 | #define _ASM_X86_MSR_INDEX_H |
| 4 | |
| 5 | #include <linux/bits.h> |
| 6 | |
| 7 | /* CPU model specific register (MSR) numbers. */ |
| 8 | |
| 9 | /* x86-64 specific MSRs */ |
| 10 | #define MSR_EFER 0xc0000080 /* extended feature register */ |
| 11 | #define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */ |
| 12 | #define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */ |
| 13 | #define MSR_CSTAR 0xc0000083 /* compat mode SYSCALL target */ |
| 14 | #define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */ |
| 15 | #define MSR_FS_BASE 0xc0000100 /* 64bit FS base */ |
| 16 | #define MSR_GS_BASE 0xc0000101 /* 64bit GS base */ |
| 17 | #define MSR_KERNEL_GS_BASE 0xc0000102 /* SwapGS GS shadow */ |
| 18 | #define MSR_TSC_AUX 0xc0000103 /* Auxiliary TSC */ |
| 19 | |
| 20 | /* EFER bits: */ |
| 21 | #define _EFER_SCE 0 /* SYSCALL/SYSRET */ |
| 22 | #define _EFER_LME 8 /* Long mode enable */ |
| 23 | #define _EFER_LMA 10 /* Long mode active (read-only) */ |
| 24 | #define _EFER_NX 11 /* No execute enable */ |
| 25 | #define _EFER_SVME 12 /* Enable virtualization */ |
| 26 | #define _EFER_LMSLE 13 /* Long Mode Segment Limit Enable */ |
| 27 | #define _EFER_FFXSR 14 /* Enable Fast FXSAVE/FXRSTOR */ |
| 28 | #define _EFER_TCE 15 /* Enable Translation Cache Extensions */ |
| 29 | #define _EFER_AUTOIBRS 21 /* Enable Automatic IBRS */ |
| 30 | |
| 31 | #define EFER_SCE (1<<_EFER_SCE) |
| 32 | #define EFER_LME (1<<_EFER_LME) |
| 33 | #define EFER_LMA (1<<_EFER_LMA) |
| 34 | #define EFER_NX (1<<_EFER_NX) |
| 35 | #define EFER_SVME (1<<_EFER_SVME) |
| 36 | #define EFER_LMSLE (1<<_EFER_LMSLE) |
| 37 | #define EFER_FFXSR (1<<_EFER_FFXSR) |
| 38 | #define EFER_TCE (1<<_EFER_TCE) |
| 39 | #define EFER_AUTOIBRS (1<<_EFER_AUTOIBRS) |
| 40 | |
| 41 | /* |
| 42 | * Architectural memory types that are common to MTRRs, PAT, VMX MSRs, etc. |
| 43 | * Most MSRs support/allow only a subset of memory types, but the values |
| 44 | * themselves are common across all relevant MSRs. |
| 45 | */ |
| 46 | #define X86_MEMTYPE_UC 0ull /* Uncacheable, a.k.a. Strong Uncacheable */ |
| 47 | #define X86_MEMTYPE_WC 1ull /* Write Combining */ |
| 48 | /* RESERVED 2 */ |
| 49 | /* RESERVED 3 */ |
| 50 | #define X86_MEMTYPE_WT 4ull /* Write Through */ |
| 51 | #define X86_MEMTYPE_WP 5ull /* Write Protected */ |
| 52 | #define X86_MEMTYPE_WB 6ull /* Write Back */ |
| 53 | #define X86_MEMTYPE_UC_MINUS 7ull /* Weak Uncacheabled (PAT only) */ |
| 54 | |
| 55 | /* FRED MSRs */ |
| 56 | #define MSR_IA32_FRED_RSP0 0x1cc /* Level 0 stack pointer */ |
| 57 | #define MSR_IA32_FRED_RSP1 0x1cd /* Level 1 stack pointer */ |
| 58 | #define MSR_IA32_FRED_RSP2 0x1ce /* Level 2 stack pointer */ |
| 59 | #define MSR_IA32_FRED_RSP3 0x1cf /* Level 3 stack pointer */ |
| 60 | #define MSR_IA32_FRED_STKLVLS 0x1d0 /* Exception stack levels */ |
| 61 | #define MSR_IA32_FRED_SSP0 MSR_IA32_PL0_SSP /* Level 0 shadow stack pointer */ |
| 62 | #define MSR_IA32_FRED_SSP1 0x1d1 /* Level 1 shadow stack pointer */ |
| 63 | #define MSR_IA32_FRED_SSP2 0x1d2 /* Level 2 shadow stack pointer */ |
| 64 | #define MSR_IA32_FRED_SSP3 0x1d3 /* Level 3 shadow stack pointer */ |
| 65 | #define MSR_IA32_FRED_CONFIG 0x1d4 /* Entrypoint and interrupt stack level */ |
| 66 | |
| 67 | /* Intel MSRs. Some also available on other CPUs */ |
| 68 | #define MSR_TEST_CTRL 0x00000033 |
| 69 | #define MSR_TEST_CTRL_SPLIT_LOCK_DETECT_BIT 29 |
| 70 | #define MSR_TEST_CTRL_SPLIT_LOCK_DETECT BIT(MSR_TEST_CTRL_SPLIT_LOCK_DETECT_BIT) |
| 71 | |
| 72 | #define MSR_IA32_SPEC_CTRL 0x00000048 /* Speculation Control */ |
| 73 | #define SPEC_CTRL_IBRS BIT(0) /* Indirect Branch Restricted Speculation */ |
| 74 | #define SPEC_CTRL_STIBP_SHIFT 1 /* Single Thread Indirect Branch Predictor (STIBP) bit */ |
| 75 | #define SPEC_CTRL_STIBP BIT(SPEC_CTRL_STIBP_SHIFT) /* STIBP mask */ |
| 76 | #define SPEC_CTRL_SSBD_SHIFT 2 /* Speculative Store Bypass Disable bit */ |
| 77 | #define SPEC_CTRL_SSBD BIT(SPEC_CTRL_SSBD_SHIFT) /* Speculative Store Bypass Disable */ |
| 78 | #define SPEC_CTRL_RRSBA_DIS_S_SHIFT 6 /* Disable RRSBA behavior */ |
| 79 | #define SPEC_CTRL_RRSBA_DIS_S BIT(SPEC_CTRL_RRSBA_DIS_S_SHIFT) |
| 80 | #define SPEC_CTRL_BHI_DIS_S_SHIFT 10 /* Disable Branch History Injection behavior */ |
| 81 | #define SPEC_CTRL_BHI_DIS_S BIT(SPEC_CTRL_BHI_DIS_S_SHIFT) |
| 82 | |
| 83 | /* A mask for bits which the kernel toggles when controlling mitigations */ |
| 84 | #define SPEC_CTRL_MITIGATIONS_MASK (SPEC_CTRL_IBRS | SPEC_CTRL_STIBP | SPEC_CTRL_SSBD \ |
| 85 | | SPEC_CTRL_RRSBA_DIS_S \ |
| 86 | | SPEC_CTRL_BHI_DIS_S) |
| 87 | |
| 88 | #define MSR_IA32_PRED_CMD 0x00000049 /* Prediction Command */ |
| 89 | #define PRED_CMD_IBPB BIT(0) /* Indirect Branch Prediction Barrier */ |
| 90 | #define PRED_CMD_SBPB BIT(7) /* Selective Branch Prediction Barrier */ |
| 91 | |
| 92 | #define MSR_PPIN_CTL 0x0000004e |
| 93 | #define MSR_PPIN 0x0000004f |
| 94 | |
| 95 | #define MSR_IA32_PERFCTR0 0x000000c1 |
| 96 | #define MSR_IA32_PERFCTR1 0x000000c2 |
| 97 | #define MSR_FSB_FREQ 0x000000cd |
| 98 | #define MSR_PLATFORM_INFO 0x000000ce |
| 99 | #define MSR_PLATFORM_INFO_CPUID_FAULT_BIT 31 |
| 100 | #define MSR_PLATFORM_INFO_CPUID_FAULT BIT_ULL(MSR_PLATFORM_INFO_CPUID_FAULT_BIT) |
| 101 | |
| 102 | #define MSR_IA32_UMWAIT_CONTROL 0xe1 |
| 103 | #define MSR_IA32_UMWAIT_CONTROL_C02_DISABLE BIT(0) |
| 104 | #define MSR_IA32_UMWAIT_CONTROL_RESERVED BIT(1) |
| 105 | /* |
| 106 | * The time field is bit[31:2], but representing a 32bit value with |
| 107 | * bit[1:0] zero. |
| 108 | */ |
| 109 | #define MSR_IA32_UMWAIT_CONTROL_TIME_MASK (~0x03U) |
| 110 | |
| 111 | /* Abbreviated from Intel SDM name IA32_CORE_CAPABILITIES */ |
| 112 | #define MSR_IA32_CORE_CAPS 0x000000cf |
| 113 | #define MSR_IA32_CORE_CAPS_INTEGRITY_CAPS_BIT 2 |
| 114 | #define MSR_IA32_CORE_CAPS_INTEGRITY_CAPS BIT(MSR_IA32_CORE_CAPS_INTEGRITY_CAPS_BIT) |
| 115 | #define MSR_IA32_CORE_CAPS_SPLIT_LOCK_DETECT_BIT 5 |
| 116 | #define MSR_IA32_CORE_CAPS_SPLIT_LOCK_DETECT BIT(MSR_IA32_CORE_CAPS_SPLIT_LOCK_DETECT_BIT) |
| 117 | |
| 118 | #define MSR_PKG_CST_CONFIG_CONTROL 0x000000e2 |
| 119 | #define NHM_C3_AUTO_DEMOTE (1UL << 25) |
| 120 | #define NHM_C1_AUTO_DEMOTE (1UL << 26) |
| 121 | #define ATM_LNC_C6_AUTO_DEMOTE (1UL << 25) |
| 122 | #define SNB_C3_AUTO_UNDEMOTE (1UL << 27) |
| 123 | #define SNB_C1_AUTO_UNDEMOTE (1UL << 28) |
| 124 | |
| 125 | #define MSR_MTRRcap 0x000000fe |
| 126 | |
| 127 | #define MSR_IA32_ARCH_CAPABILITIES 0x0000010a |
| 128 | #define ARCH_CAP_RDCL_NO BIT(0) /* Not susceptible to Meltdown */ |
| 129 | #define ARCH_CAP_IBRS_ALL BIT(1) /* Enhanced IBRS support */ |
| 130 | #define ARCH_CAP_RSBA BIT(2) /* RET may use alternative branch predictors */ |
| 131 | #define ARCH_CAP_SKIP_VMENTRY_L1DFLUSH BIT(3) /* Skip L1D flush on vmentry */ |
| 132 | #define ARCH_CAP_SSB_NO BIT(4) /* |
| 133 | * Not susceptible to Speculative Store Bypass |
| 134 | * attack, so no Speculative Store Bypass |
| 135 | * control required. |
| 136 | */ |
| 137 | #define ARCH_CAP_MDS_NO BIT(5) /* |
| 138 | * Not susceptible to |
| 139 | * Microarchitectural Data |
| 140 | * Sampling (MDS) vulnerabilities. |
| 141 | */ |
| 142 | #define ARCH_CAP_PSCHANGE_MC_NO BIT(6) /* |
| 143 | * The processor is not susceptible to a |
| 144 | * machine check error due to modifying the |
| 145 | * code page size along with either the |
| 146 | * physical address or cache type |
| 147 | * without TLB invalidation. |
| 148 | */ |
| 149 | #define ARCH_CAP_TSX_CTRL_MSR BIT(7) /* MSR for TSX control is available. */ |
| 150 | #define ARCH_CAP_TAA_NO BIT(8) /* |
| 151 | * Not susceptible to |
| 152 | * TSX Async Abort (TAA) vulnerabilities. |
| 153 | */ |
| 154 | #define ARCH_CAP_SBDR_SSDP_NO BIT(13) /* |
| 155 | * Not susceptible to SBDR and SSDP |
| 156 | * variants of Processor MMIO stale data |
| 157 | * vulnerabilities. |
| 158 | */ |
| 159 | #define ARCH_CAP_FBSDP_NO BIT(14) /* |
| 160 | * Not susceptible to FBSDP variant of |
| 161 | * Processor MMIO stale data |
| 162 | * vulnerabilities. |
| 163 | */ |
| 164 | #define ARCH_CAP_PSDP_NO BIT(15) /* |
| 165 | * Not susceptible to PSDP variant of |
| 166 | * Processor MMIO stale data |
| 167 | * vulnerabilities. |
| 168 | */ |
| 169 | #define ARCH_CAP_FB_CLEAR BIT(17) /* |
| 170 | * VERW clears CPU fill buffer |
| 171 | * even on MDS_NO CPUs. |
| 172 | */ |
| 173 | #define ARCH_CAP_FB_CLEAR_CTRL BIT(18) /* |
| 174 | * MSR_IA32_MCU_OPT_CTRL[FB_CLEAR_DIS] |
| 175 | * bit available to control VERW |
| 176 | * behavior. |
| 177 | */ |
| 178 | #define ARCH_CAP_RRSBA BIT(19) /* |
| 179 | * Indicates RET may use predictors |
| 180 | * other than the RSB. With eIBRS |
| 181 | * enabled predictions in kernel mode |
| 182 | * are restricted to targets in |
| 183 | * kernel. |
| 184 | */ |
| 185 | #define ARCH_CAP_BHI_NO BIT(20) /* |
| 186 | * CPU is not affected by Branch |
| 187 | * History Injection. |
| 188 | */ |
| 189 | #define ARCH_CAP_XAPIC_DISABLE BIT(21) /* |
| 190 | * IA32_XAPIC_DISABLE_STATUS MSR |
| 191 | * supported |
| 192 | */ |
| 193 | #define ARCH_CAP_PBRSB_NO BIT(24) /* |
| 194 | * Not susceptible to Post-Barrier |
| 195 | * Return Stack Buffer Predictions. |
| 196 | */ |
| 197 | #define ARCH_CAP_GDS_CTRL BIT(25) /* |
| 198 | * CPU is vulnerable to Gather |
| 199 | * Data Sampling (GDS) and |
| 200 | * has controls for mitigation. |
| 201 | */ |
| 202 | #define ARCH_CAP_GDS_NO BIT(26) /* |
| 203 | * CPU is not vulnerable to Gather |
| 204 | * Data Sampling (GDS). |
| 205 | */ |
| 206 | #define ARCH_CAP_RFDS_NO BIT(27) /* |
| 207 | * Not susceptible to Register |
| 208 | * File Data Sampling. |
| 209 | */ |
| 210 | #define ARCH_CAP_RFDS_CLEAR BIT(28) /* |
| 211 | * VERW clears CPU Register |
| 212 | * File. |
| 213 | */ |
| 214 | #define ARCH_CAP_ITS_NO BIT_ULL(62) /* |
| 215 | * Not susceptible to |
| 216 | * Indirect Target Selection. |
| 217 | * This bit is not set by |
| 218 | * HW, but is synthesized by |
| 219 | * VMMs for guests to know |
| 220 | * their affected status. |
| 221 | */ |
| 222 | |
| 223 | #define MSR_IA32_FLUSH_CMD 0x0000010b |
| 224 | #define L1D_FLUSH BIT(0) /* |
| 225 | * Writeback and invalidate the |
| 226 | * L1 data cache. |
| 227 | */ |
| 228 | |
| 229 | #define MSR_IA32_BBL_CR_CTL 0x00000119 |
| 230 | #define MSR_IA32_BBL_CR_CTL3 0x0000011e |
| 231 | |
| 232 | #define MSR_IA32_TSX_CTRL 0x00000122 |
| 233 | #define TSX_CTRL_RTM_DISABLE BIT(0) /* Disable RTM feature */ |
| 234 | #define TSX_CTRL_CPUID_CLEAR BIT(1) /* Disable TSX enumeration */ |
| 235 | |
| 236 | #define MSR_IA32_MCU_OPT_CTRL 0x00000123 |
| 237 | #define RNGDS_MITG_DIS BIT(0) /* SRBDS support */ |
| 238 | #define RTM_ALLOW BIT(1) /* TSX development mode */ |
| 239 | #define FB_CLEAR_DIS BIT(3) /* CPU Fill buffer clear disable */ |
| 240 | #define GDS_MITG_DIS BIT(4) /* Disable GDS mitigation */ |
| 241 | #define GDS_MITG_LOCKED BIT(5) /* GDS mitigation locked */ |
| 242 | |
| 243 | #define MSR_IA32_SYSENTER_CS 0x00000174 |
| 244 | #define MSR_IA32_SYSENTER_ESP 0x00000175 |
| 245 | #define MSR_IA32_SYSENTER_EIP 0x00000176 |
| 246 | |
| 247 | #define MSR_IA32_MCG_CAP 0x00000179 |
| 248 | #define MSR_IA32_MCG_STATUS 0x0000017a |
| 249 | #define MSR_IA32_MCG_CTL 0x0000017b |
| 250 | #define MSR_ERROR_CONTROL 0x0000017f |
| 251 | #define MSR_IA32_MCG_EXT_CTL 0x000004d0 |
| 252 | |
| 253 | #define MSR_OFFCORE_RSP_0 0x000001a6 |
| 254 | #define MSR_OFFCORE_RSP_1 0x000001a7 |
| 255 | #define MSR_TURBO_RATIO_LIMIT 0x000001ad |
| 256 | #define MSR_TURBO_RATIO_LIMIT1 0x000001ae |
| 257 | #define MSR_TURBO_RATIO_LIMIT2 0x000001af |
| 258 | |
| 259 | #define MSR_SNOOP_RSP_0 0x00001328 |
| 260 | #define MSR_SNOOP_RSP_1 0x00001329 |
| 261 | |
| 262 | #define MSR_LBR_SELECT 0x000001c8 |
| 263 | #define MSR_LBR_TOS 0x000001c9 |
| 264 | |
| 265 | #define MSR_IA32_POWER_CTL 0x000001fc |
| 266 | #define MSR_IA32_POWER_CTL_BIT_EE 19 |
| 267 | |
| 268 | /* Abbreviated from Intel SDM name IA32_INTEGRITY_CAPABILITIES */ |
| 269 | #define MSR_INTEGRITY_CAPS 0x000002d9 |
| 270 | #define MSR_INTEGRITY_CAPS_ARRAY_BIST_BIT 2 |
| 271 | #define MSR_INTEGRITY_CAPS_ARRAY_BIST BIT(MSR_INTEGRITY_CAPS_ARRAY_BIST_BIT) |
| 272 | #define MSR_INTEGRITY_CAPS_PERIODIC_BIST_BIT 4 |
| 273 | #define MSR_INTEGRITY_CAPS_PERIODIC_BIST BIT(MSR_INTEGRITY_CAPS_PERIODIC_BIST_BIT) |
| 274 | #define MSR_INTEGRITY_CAPS_SBAF_BIT 8 |
| 275 | #define MSR_INTEGRITY_CAPS_SBAF BIT(MSR_INTEGRITY_CAPS_SBAF_BIT) |
| 276 | #define MSR_INTEGRITY_CAPS_SAF_GEN_MASK GENMASK_ULL(10, 9) |
| 277 | |
| 278 | #define MSR_LBR_NHM_FROM 0x00000680 |
| 279 | #define MSR_LBR_NHM_TO 0x000006c0 |
| 280 | #define MSR_LBR_CORE_FROM 0x00000040 |
| 281 | #define MSR_LBR_CORE_TO 0x00000060 |
| 282 | |
| 283 | #define MSR_LBR_INFO_0 0x00000dc0 /* ... 0xddf for _31 */ |
| 284 | #define LBR_INFO_MISPRED BIT_ULL(63) |
| 285 | #define LBR_INFO_IN_TX BIT_ULL(62) |
| 286 | #define LBR_INFO_ABORT BIT_ULL(61) |
| 287 | #define LBR_INFO_CYC_CNT_VALID BIT_ULL(60) |
| 288 | #define LBR_INFO_CYCLES 0xffff |
| 289 | #define LBR_INFO_BR_TYPE_OFFSET 56 |
| 290 | #define LBR_INFO_BR_TYPE (0xfull << LBR_INFO_BR_TYPE_OFFSET) |
| 291 | #define LBR_INFO_BR_CNTR_OFFSET 32 |
| 292 | #define LBR_INFO_BR_CNTR_NUM 4 |
| 293 | #define LBR_INFO_BR_CNTR_BITS 2 |
| 294 | #define LBR_INFO_BR_CNTR_MASK GENMASK_ULL(LBR_INFO_BR_CNTR_BITS - 1, 0) |
| 295 | #define LBR_INFO_BR_CNTR_FULL_MASK GENMASK_ULL(LBR_INFO_BR_CNTR_NUM * LBR_INFO_BR_CNTR_BITS - 1, 0) |
| 296 | |
| 297 | #define MSR_ARCH_LBR_CTL 0x000014ce |
| 298 | #define ARCH_LBR_CTL_LBREN BIT(0) |
| 299 | #define ARCH_LBR_CTL_CPL_OFFSET 1 |
| 300 | #define ARCH_LBR_CTL_CPL (0x3ull << ARCH_LBR_CTL_CPL_OFFSET) |
| 301 | #define ARCH_LBR_CTL_STACK_OFFSET 3 |
| 302 | #define ARCH_LBR_CTL_STACK (0x1ull << ARCH_LBR_CTL_STACK_OFFSET) |
| 303 | #define ARCH_LBR_CTL_FILTER_OFFSET 16 |
| 304 | #define ARCH_LBR_CTL_FILTER (0x7full << ARCH_LBR_CTL_FILTER_OFFSET) |
| 305 | #define MSR_ARCH_LBR_DEPTH 0x000014cf |
| 306 | #define MSR_ARCH_LBR_FROM_0 0x00001500 |
| 307 | #define MSR_ARCH_LBR_TO_0 0x00001600 |
| 308 | #define MSR_ARCH_LBR_INFO_0 0x00001200 |
| 309 | |
| 310 | #define MSR_IA32_PEBS_ENABLE 0x000003f1 |
| 311 | #define MSR_PEBS_DATA_CFG 0x000003f2 |
| 312 | #define MSR_IA32_DS_AREA 0x00000600 |
| 313 | #define MSR_IA32_PERF_CAPABILITIES 0x00000345 |
| 314 | #define PERF_CAP_METRICS_IDX 15 |
| 315 | #define PERF_CAP_PT_IDX 16 |
| 316 | |
| 317 | #define MSR_PEBS_LD_LAT_THRESHOLD 0x000003f6 |
| 318 | #define PERF_CAP_PEBS_TRAP BIT_ULL(6) |
| 319 | #define PERF_CAP_ARCH_REG BIT_ULL(7) |
| 320 | #define PERF_CAP_PEBS_FORMAT 0xf00 |
| 321 | #define PERF_CAP_PEBS_BASELINE BIT_ULL(14) |
| 322 | #define PERF_CAP_PEBS_MASK (PERF_CAP_PEBS_TRAP | PERF_CAP_ARCH_REG | \ |
| 323 | PERF_CAP_PEBS_FORMAT | PERF_CAP_PEBS_BASELINE) |
| 324 | |
| 325 | #define MSR_IA32_RTIT_CTL 0x00000570 |
| 326 | #define RTIT_CTL_TRACEEN BIT(0) |
| 327 | #define RTIT_CTL_CYCLEACC BIT(1) |
| 328 | #define RTIT_CTL_OS BIT(2) |
| 329 | #define RTIT_CTL_USR BIT(3) |
| 330 | #define RTIT_CTL_PWR_EVT_EN BIT(4) |
| 331 | #define RTIT_CTL_FUP_ON_PTW BIT(5) |
| 332 | #define RTIT_CTL_FABRIC_EN BIT(6) |
| 333 | #define RTIT_CTL_CR3EN BIT(7) |
| 334 | #define RTIT_CTL_TOPA BIT(8) |
| 335 | #define RTIT_CTL_MTC_EN BIT(9) |
| 336 | #define RTIT_CTL_TSC_EN BIT(10) |
| 337 | #define RTIT_CTL_DISRETC BIT(11) |
| 338 | #define RTIT_CTL_PTW_EN BIT(12) |
| 339 | #define RTIT_CTL_BRANCH_EN BIT(13) |
| 340 | #define RTIT_CTL_EVENT_EN BIT(31) |
| 341 | #define RTIT_CTL_NOTNT BIT_ULL(55) |
| 342 | #define RTIT_CTL_MTC_RANGE_OFFSET 14 |
| 343 | #define RTIT_CTL_MTC_RANGE (0x0full << RTIT_CTL_MTC_RANGE_OFFSET) |
| 344 | #define RTIT_CTL_CYC_THRESH_OFFSET 19 |
| 345 | #define RTIT_CTL_CYC_THRESH (0x0full << RTIT_CTL_CYC_THRESH_OFFSET) |
| 346 | #define RTIT_CTL_PSB_FREQ_OFFSET 24 |
| 347 | #define RTIT_CTL_PSB_FREQ (0x0full << RTIT_CTL_PSB_FREQ_OFFSET) |
| 348 | #define RTIT_CTL_ADDR0_OFFSET 32 |
| 349 | #define RTIT_CTL_ADDR0 (0x0full << RTIT_CTL_ADDR0_OFFSET) |
| 350 | #define RTIT_CTL_ADDR1_OFFSET 36 |
| 351 | #define RTIT_CTL_ADDR1 (0x0full << RTIT_CTL_ADDR1_OFFSET) |
| 352 | #define RTIT_CTL_ADDR2_OFFSET 40 |
| 353 | #define RTIT_CTL_ADDR2 (0x0full << RTIT_CTL_ADDR2_OFFSET) |
| 354 | #define RTIT_CTL_ADDR3_OFFSET 44 |
| 355 | #define RTIT_CTL_ADDR3 (0x0full << RTIT_CTL_ADDR3_OFFSET) |
| 356 | #define MSR_IA32_RTIT_STATUS 0x00000571 |
| 357 | #define RTIT_STATUS_FILTEREN BIT(0) |
| 358 | #define RTIT_STATUS_CONTEXTEN BIT(1) |
| 359 | #define RTIT_STATUS_TRIGGEREN BIT(2) |
| 360 | #define RTIT_STATUS_BUFFOVF BIT(3) |
| 361 | #define RTIT_STATUS_ERROR BIT(4) |
| 362 | #define RTIT_STATUS_STOPPED BIT(5) |
| 363 | #define RTIT_STATUS_BYTECNT_OFFSET 32 |
| 364 | #define RTIT_STATUS_BYTECNT (0x1ffffull << RTIT_STATUS_BYTECNT_OFFSET) |
| 365 | #define MSR_IA32_RTIT_ADDR0_A 0x00000580 |
| 366 | #define MSR_IA32_RTIT_ADDR0_B 0x00000581 |
| 367 | #define MSR_IA32_RTIT_ADDR1_A 0x00000582 |
| 368 | #define MSR_IA32_RTIT_ADDR1_B 0x00000583 |
| 369 | #define MSR_IA32_RTIT_ADDR2_A 0x00000584 |
| 370 | #define MSR_IA32_RTIT_ADDR2_B 0x00000585 |
| 371 | #define MSR_IA32_RTIT_ADDR3_A 0x00000586 |
| 372 | #define MSR_IA32_RTIT_ADDR3_B 0x00000587 |
| 373 | #define MSR_IA32_RTIT_CR3_MATCH 0x00000572 |
| 374 | #define MSR_IA32_RTIT_OUTPUT_BASE 0x00000560 |
| 375 | #define MSR_IA32_RTIT_OUTPUT_MASK 0x00000561 |
| 376 | |
| 377 | #define MSR_MTRRfix64K_00000 0x00000250 |
| 378 | #define MSR_MTRRfix16K_80000 0x00000258 |
| 379 | #define MSR_MTRRfix16K_A0000 0x00000259 |
| 380 | #define MSR_MTRRfix4K_C0000 0x00000268 |
| 381 | #define MSR_MTRRfix4K_C8000 0x00000269 |
| 382 | #define MSR_MTRRfix4K_D0000 0x0000026a |
| 383 | #define MSR_MTRRfix4K_D8000 0x0000026b |
| 384 | #define MSR_MTRRfix4K_E0000 0x0000026c |
| 385 | #define MSR_MTRRfix4K_E8000 0x0000026d |
| 386 | #define MSR_MTRRfix4K_F0000 0x0000026e |
| 387 | #define MSR_MTRRfix4K_F8000 0x0000026f |
| 388 | #define MSR_MTRRdefType 0x000002ff |
| 389 | |
| 390 | #define MSR_IA32_CR_PAT 0x00000277 |
| 391 | |
| 392 | #define PAT_VALUE(p0, p1, p2, p3, p4, p5, p6, p7) \ |
| 393 | ((X86_MEMTYPE_ ## p0) | (X86_MEMTYPE_ ## p1 << 8) | \ |
| 394 | (X86_MEMTYPE_ ## p2 << 16) | (X86_MEMTYPE_ ## p3 << 24) | \ |
| 395 | (X86_MEMTYPE_ ## p4 << 32) | (X86_MEMTYPE_ ## p5 << 40) | \ |
| 396 | (X86_MEMTYPE_ ## p6 << 48) | (X86_MEMTYPE_ ## p7 << 56)) |
| 397 | |
| 398 | #define MSR_IA32_DEBUGCTLMSR 0x000001d9 |
| 399 | #define MSR_IA32_LASTBRANCHFROMIP 0x000001db |
| 400 | #define MSR_IA32_LASTBRANCHTOIP 0x000001dc |
| 401 | #define MSR_IA32_LASTINTFROMIP 0x000001dd |
| 402 | #define MSR_IA32_LASTINTTOIP 0x000001de |
| 403 | |
| 404 | #define MSR_IA32_PASID 0x00000d93 |
| 405 | #define MSR_IA32_PASID_VALID BIT_ULL(31) |
| 406 | |
| 407 | /* DEBUGCTLMSR bits (others vary by model): */ |
| 408 | #define DEBUGCTLMSR_LBR_BIT 0 /* last branch recording */ |
| 409 | #define DEBUGCTLMSR_LBR (1UL << DEBUGCTLMSR_LBR_BIT) |
| 410 | #define DEBUGCTLMSR_BTF_SHIFT 1 |
| 411 | #define DEBUGCTLMSR_BTF (1UL << 1) /* single-step on branches */ |
| 412 | #define DEBUGCTLMSR_BUS_LOCK_DETECT (1UL << 2) |
| 413 | #define DEBUGCTLMSR_TR (1UL << 6) |
| 414 | #define DEBUGCTLMSR_BTS (1UL << 7) |
| 415 | #define DEBUGCTLMSR_BTINT (1UL << 8) |
| 416 | #define DEBUGCTLMSR_BTS_OFF_OS (1UL << 9) |
| 417 | #define DEBUGCTLMSR_BTS_OFF_USR (1UL << 10) |
| 418 | #define DEBUGCTLMSR_FREEZE_LBRS_ON_PMI (1UL << 11) |
| 419 | #define DEBUGCTLMSR_FREEZE_PERFMON_ON_PMI (1UL << 12) |
| 420 | #define DEBUGCTLMSR_FREEZE_IN_SMM_BIT 14 |
| 421 | #define DEBUGCTLMSR_FREEZE_IN_SMM (1UL << DEBUGCTLMSR_FREEZE_IN_SMM_BIT) |
| 422 | |
| 423 | #define MSR_PEBS_FRONTEND 0x000003f7 |
| 424 | |
| 425 | #define MSR_IA32_MC0_CTL 0x00000400 |
| 426 | #define MSR_IA32_MC0_STATUS 0x00000401 |
| 427 | #define MSR_IA32_MC0_ADDR 0x00000402 |
| 428 | #define MSR_IA32_MC0_MISC 0x00000403 |
| 429 | |
| 430 | /* C-state Residency Counters */ |
| 431 | #define MSR_PKG_C3_RESIDENCY 0x000003f8 |
| 432 | #define MSR_PKG_C6_RESIDENCY 0x000003f9 |
| 433 | #define MSR_ATOM_PKG_C6_RESIDENCY 0x000003fa |
| 434 | #define MSR_PKG_C7_RESIDENCY 0x000003fa |
| 435 | #define MSR_CORE_C3_RESIDENCY 0x000003fc |
| 436 | #define MSR_CORE_C6_RESIDENCY 0x000003fd |
| 437 | #define MSR_CORE_C7_RESIDENCY 0x000003fe |
| 438 | #define MSR_KNL_CORE_C6_RESIDENCY 0x000003ff |
| 439 | #define MSR_PKG_C2_RESIDENCY 0x0000060d |
| 440 | #define MSR_PKG_C8_RESIDENCY 0x00000630 |
| 441 | #define MSR_PKG_C9_RESIDENCY 0x00000631 |
| 442 | #define MSR_PKG_C10_RESIDENCY 0x00000632 |
| 443 | |
| 444 | /* Interrupt Response Limit */ |
| 445 | #define MSR_PKGC3_IRTL 0x0000060a |
| 446 | #define MSR_PKGC6_IRTL 0x0000060b |
| 447 | #define MSR_PKGC7_IRTL 0x0000060c |
| 448 | #define MSR_PKGC8_IRTL 0x00000633 |
| 449 | #define MSR_PKGC9_IRTL 0x00000634 |
| 450 | #define MSR_PKGC10_IRTL 0x00000635 |
| 451 | |
| 452 | /* Run Time Average Power Limiting (RAPL) Interface */ |
| 453 | |
| 454 | #define MSR_VR_CURRENT_CONFIG 0x00000601 |
| 455 | #define MSR_RAPL_POWER_UNIT 0x00000606 |
| 456 | |
| 457 | #define MSR_PKG_POWER_LIMIT 0x00000610 |
| 458 | #define MSR_PKG_ENERGY_STATUS 0x00000611 |
| 459 | #define MSR_PKG_PERF_STATUS 0x00000613 |
| 460 | #define MSR_PKG_POWER_INFO 0x00000614 |
| 461 | |
| 462 | #define MSR_DRAM_POWER_LIMIT 0x00000618 |
| 463 | #define MSR_DRAM_ENERGY_STATUS 0x00000619 |
| 464 | #define MSR_DRAM_PERF_STATUS 0x0000061b |
| 465 | #define MSR_DRAM_POWER_INFO 0x0000061c |
| 466 | |
| 467 | #define MSR_PP0_POWER_LIMIT 0x00000638 |
| 468 | #define MSR_PP0_ENERGY_STATUS 0x00000639 |
| 469 | #define MSR_PP0_POLICY 0x0000063a |
| 470 | #define MSR_PP0_PERF_STATUS 0x0000063b |
| 471 | |
| 472 | #define MSR_PP1_POWER_LIMIT 0x00000640 |
| 473 | #define MSR_PP1_ENERGY_STATUS 0x00000641 |
| 474 | #define MSR_PP1_POLICY 0x00000642 |
| 475 | |
| 476 | #define MSR_AMD_RAPL_POWER_UNIT 0xc0010299 |
| 477 | #define MSR_AMD_CORE_ENERGY_STATUS 0xc001029a |
| 478 | #define MSR_AMD_PKG_ENERGY_STATUS 0xc001029b |
| 479 | |
| 480 | /* Config TDP MSRs */ |
| 481 | #define MSR_CONFIG_TDP_NOMINAL 0x00000648 |
| 482 | #define MSR_CONFIG_TDP_LEVEL_1 0x00000649 |
| 483 | #define MSR_CONFIG_TDP_LEVEL_2 0x0000064A |
| 484 | #define MSR_CONFIG_TDP_CONTROL 0x0000064B |
| 485 | #define MSR_TURBO_ACTIVATION_RATIO 0x0000064C |
| 486 | |
| 487 | #define MSR_PLATFORM_ENERGY_STATUS 0x0000064D |
| 488 | #define MSR_SECONDARY_TURBO_RATIO_LIMIT 0x00000650 |
| 489 | |
| 490 | #define MSR_PKG_WEIGHTED_CORE_C0_RES 0x00000658 |
| 491 | #define MSR_PKG_ANY_CORE_C0_RES 0x00000659 |
| 492 | #define MSR_PKG_ANY_GFXE_C0_RES 0x0000065A |
| 493 | #define MSR_PKG_BOTH_CORE_GFXE_C0_RES 0x0000065B |
| 494 | |
| 495 | #define MSR_CORE_C1_RES 0x00000660 |
| 496 | #define MSR_MODULE_C6_RES_MS 0x00000664 |
| 497 | |
| 498 | #define MSR_CC6_DEMOTION_POLICY_CONFIG 0x00000668 |
| 499 | #define MSR_MC6_DEMOTION_POLICY_CONFIG 0x00000669 |
| 500 | |
| 501 | #define MSR_ATOM_CORE_RATIOS 0x0000066a |
| 502 | #define MSR_ATOM_CORE_VIDS 0x0000066b |
| 503 | #define MSR_ATOM_CORE_TURBO_RATIOS 0x0000066c |
| 504 | #define MSR_ATOM_CORE_TURBO_VIDS 0x0000066d |
| 505 | |
| 506 | #define MSR_CORE_PERF_LIMIT_REASONS 0x00000690 |
| 507 | #define MSR_GFX_PERF_LIMIT_REASONS 0x000006B0 |
| 508 | #define MSR_RING_PERF_LIMIT_REASONS 0x000006B1 |
| 509 | |
| 510 | /* Control-flow Enforcement Technology MSRs */ |
| 511 | #define MSR_IA32_U_CET 0x000006a0 /* user mode cet */ |
| 512 | #define MSR_IA32_S_CET 0x000006a2 /* kernel mode cet */ |
| 513 | #define CET_SHSTK_EN BIT_ULL(0) |
| 514 | #define CET_WRSS_EN BIT_ULL(1) |
| 515 | #define CET_ENDBR_EN BIT_ULL(2) |
| 516 | #define CET_LEG_IW_EN BIT_ULL(3) |
| 517 | #define CET_NO_TRACK_EN BIT_ULL(4) |
| 518 | #define CET_SUPPRESS_DISABLE BIT_ULL(5) |
| 519 | #define CET_RESERVED (BIT_ULL(6) | BIT_ULL(7) | BIT_ULL(8) | BIT_ULL(9)) |
| 520 | #define CET_SUPPRESS BIT_ULL(10) |
| 521 | #define CET_WAIT_ENDBR BIT_ULL(11) |
| 522 | |
| 523 | #define MSR_IA32_PL0_SSP 0x000006a4 /* ring-0 shadow stack pointer */ |
| 524 | #define MSR_IA32_PL1_SSP 0x000006a5 /* ring-1 shadow stack pointer */ |
| 525 | #define MSR_IA32_PL2_SSP 0x000006a6 /* ring-2 shadow stack pointer */ |
| 526 | #define MSR_IA32_PL3_SSP 0x000006a7 /* ring-3 shadow stack pointer */ |
| 527 | #define MSR_IA32_INT_SSP_TAB 0x000006a8 /* exception shadow stack table */ |
| 528 | |
| 529 | /* Hardware P state interface */ |
| 530 | #define MSR_PPERF 0x0000064e |
| 531 | #define MSR_PERF_LIMIT_REASONS 0x0000064f |
| 532 | #define MSR_PM_ENABLE 0x00000770 |
| 533 | #define MSR_HWP_CAPABILITIES 0x00000771 |
| 534 | #define MSR_HWP_REQUEST_PKG 0x00000772 |
| 535 | #define MSR_HWP_INTERRUPT 0x00000773 |
| 536 | #define MSR_HWP_REQUEST 0x00000774 |
| 537 | #define MSR_HWP_STATUS 0x00000777 |
| 538 | |
| 539 | /* CPUID.6.EAX */ |
| 540 | #define HWP_BASE_BIT (1<<7) |
| 541 | #define HWP_NOTIFICATIONS_BIT (1<<8) |
| 542 | #define HWP_ACTIVITY_WINDOW_BIT (1<<9) |
| 543 | #define HWP_ENERGY_PERF_PREFERENCE_BIT (1<<10) |
| 544 | #define HWP_PACKAGE_LEVEL_REQUEST_BIT (1<<11) |
| 545 | |
| 546 | /* IA32_HWP_CAPABILITIES */ |
| 547 | #define HWP_HIGHEST_PERF(x) (((x) >> 0) & 0xff) |
| 548 | #define HWP_GUARANTEED_PERF(x) (((x) >> 8) & 0xff) |
| 549 | #define HWP_MOSTEFFICIENT_PERF(x) (((x) >> 16) & 0xff) |
| 550 | #define HWP_LOWEST_PERF(x) (((x) >> 24) & 0xff) |
| 551 | |
| 552 | /* IA32_HWP_REQUEST */ |
| 553 | #define HWP_MIN_PERF(x) (x & 0xff) |
| 554 | #define HWP_MAX_PERF(x) ((x & 0xff) << 8) |
| 555 | #define HWP_DESIRED_PERF(x) ((x & 0xff) << 16) |
| 556 | #define HWP_ENERGY_PERF_PREFERENCE(x) (((u64)x & 0xff) << 24) |
| 557 | #define HWP_EPP_PERFORMANCE 0x00 |
| 558 | #define HWP_EPP_BALANCE_PERFORMANCE 0x80 |
| 559 | #define HWP_EPP_BALANCE_POWERSAVE 0xC0 |
| 560 | #define HWP_EPP_POWERSAVE 0xFF |
| 561 | #define HWP_ACTIVITY_WINDOW(x) ((u64)(x & 0xff3) << 32) |
| 562 | #define HWP_PACKAGE_CONTROL(x) ((u64)(x & 0x1) << 42) |
| 563 | |
| 564 | /* IA32_HWP_STATUS */ |
| 565 | #define HWP_GUARANTEED_CHANGE(x) (x & 0x1) |
| 566 | #define HWP_EXCURSION_TO_MINIMUM(x) (x & 0x4) |
| 567 | |
| 568 | /* IA32_HWP_INTERRUPT */ |
| 569 | #define HWP_CHANGE_TO_GUARANTEED_INT(x) (x & 0x1) |
| 570 | #define HWP_EXCURSION_TO_MINIMUM_INT(x) (x & 0x2) |
| 571 | |
| 572 | #define MSR_AMD64_MC0_MASK 0xc0010044 |
| 573 | |
| 574 | #define MSR_IA32_MCx_CTL(x) (MSR_IA32_MC0_CTL + 4*(x)) |
| 575 | #define MSR_IA32_MCx_STATUS(x) (MSR_IA32_MC0_STATUS + 4*(x)) |
| 576 | #define MSR_IA32_MCx_ADDR(x) (MSR_IA32_MC0_ADDR + 4*(x)) |
| 577 | #define MSR_IA32_MCx_MISC(x) (MSR_IA32_MC0_MISC + 4*(x)) |
| 578 | |
| 579 | #define MSR_AMD64_MCx_MASK(x) (MSR_AMD64_MC0_MASK + (x)) |
| 580 | |
| 581 | /* These are consecutive and not in the normal 4er MCE bank block */ |
| 582 | #define MSR_IA32_MC0_CTL2 0x00000280 |
| 583 | #define MSR_IA32_MCx_CTL2(x) (MSR_IA32_MC0_CTL2 + (x)) |
| 584 | |
| 585 | #define MSR_P6_PERFCTR0 0x000000c1 |
| 586 | #define MSR_P6_PERFCTR1 0x000000c2 |
| 587 | #define MSR_P6_EVNTSEL0 0x00000186 |
| 588 | #define MSR_P6_EVNTSEL1 0x00000187 |
| 589 | |
| 590 | #define MSR_KNC_PERFCTR0 0x00000020 |
| 591 | #define MSR_KNC_PERFCTR1 0x00000021 |
| 592 | #define MSR_KNC_EVNTSEL0 0x00000028 |
| 593 | #define MSR_KNC_EVNTSEL1 0x00000029 |
| 594 | |
| 595 | /* Alternative perfctr range with full access. */ |
| 596 | #define MSR_IA32_PMC0 0x000004c1 |
| 597 | |
| 598 | /* Auto-reload via MSR instead of DS area */ |
| 599 | #define MSR_RELOAD_PMC0 0x000014c1 |
| 600 | #define MSR_RELOAD_FIXED_CTR0 0x00001309 |
| 601 | |
| 602 | /* V6 PMON MSR range */ |
| 603 | #define MSR_IA32_PMC_V6_GP0_CTR 0x1900 |
| 604 | #define MSR_IA32_PMC_V6_GP0_CFG_A 0x1901 |
| 605 | #define MSR_IA32_PMC_V6_GP0_CFG_B 0x1902 |
| 606 | #define MSR_IA32_PMC_V6_GP0_CFG_C 0x1903 |
| 607 | #define MSR_IA32_PMC_V6_FX0_CTR 0x1980 |
| 608 | #define MSR_IA32_PMC_V6_FX0_CFG_B 0x1982 |
| 609 | #define MSR_IA32_PMC_V6_FX0_CFG_C 0x1983 |
| 610 | #define MSR_IA32_PMC_V6_STEP 4 |
| 611 | |
| 612 | /* KeyID partitioning between MKTME and TDX */ |
| 613 | #define MSR_IA32_MKTME_KEYID_PARTITIONING 0x00000087 |
| 614 | |
| 615 | /* |
| 616 | * AMD64 MSRs. Not complete. See the architecture manual for a more |
| 617 | * complete list. |
| 618 | */ |
| 619 | #define MSR_AMD64_PATCH_LEVEL 0x0000008b |
| 620 | #define MSR_AMD64_TSC_RATIO 0xc0000104 |
| 621 | #define MSR_AMD64_NB_CFG 0xc001001f |
| 622 | #define MSR_AMD64_PATCH_LOADER 0xc0010020 |
| 623 | #define MSR_AMD_PERF_CTL 0xc0010062 |
| 624 | #define MSR_AMD_PERF_STATUS 0xc0010063 |
| 625 | #define MSR_AMD_PSTATE_DEF_BASE 0xc0010064 |
| 626 | #define MSR_AMD64_GUEST_TSC_FREQ 0xc0010134 |
| 627 | #define MSR_AMD64_OSVW_ID_LENGTH 0xc0010140 |
| 628 | #define MSR_AMD64_OSVW_STATUS 0xc0010141 |
| 629 | #define MSR_AMD_PPIN_CTL 0xc00102f0 |
| 630 | #define MSR_AMD_PPIN 0xc00102f1 |
| 631 | #define MSR_AMD64_CPUID_FN_1 0xc0011004 |
| 632 | #define MSR_AMD64_LS_CFG 0xc0011020 |
| 633 | #define MSR_AMD64_DC_CFG 0xc0011022 |
| 634 | #define MSR_AMD64_TW_CFG 0xc0011023 |
| 635 | |
| 636 | #define MSR_AMD64_DE_CFG 0xc0011029 |
| 637 | #define MSR_AMD64_DE_CFG_LFENCE_SERIALIZE_BIT 1 |
| 638 | #define MSR_AMD64_DE_CFG_LFENCE_SERIALIZE BIT_ULL(MSR_AMD64_DE_CFG_LFENCE_SERIALIZE_BIT) |
| 639 | #define MSR_AMD64_DE_CFG_ZEN2_FP_BACKUP_FIX_BIT 9 |
| 640 | |
| 641 | #define MSR_AMD64_BU_CFG2 0xc001102a |
| 642 | #define MSR_AMD64_IBSFETCHCTL 0xc0011030 |
| 643 | #define MSR_AMD64_IBSFETCHLINAD 0xc0011031 |
| 644 | #define MSR_AMD64_IBSFETCHPHYSAD 0xc0011032 |
| 645 | #define MSR_AMD64_IBSFETCH_REG_COUNT 3 |
| 646 | #define MSR_AMD64_IBSFETCH_REG_MASK ((1UL<<MSR_AMD64_IBSFETCH_REG_COUNT)-1) |
| 647 | #define MSR_AMD64_IBSOPCTL 0xc0011033 |
| 648 | #define MSR_AMD64_IBSOPRIP 0xc0011034 |
| 649 | #define MSR_AMD64_IBSOPDATA 0xc0011035 |
| 650 | #define MSR_AMD64_IBSOPDATA2 0xc0011036 |
| 651 | #define MSR_AMD64_IBSOPDATA3 0xc0011037 |
| 652 | #define MSR_AMD64_IBSDCLINAD 0xc0011038 |
| 653 | #define MSR_AMD64_IBSDCPHYSAD 0xc0011039 |
| 654 | #define MSR_AMD64_IBSOP_REG_COUNT 7 |
| 655 | #define MSR_AMD64_IBSOP_REG_MASK ((1UL<<MSR_AMD64_IBSOP_REG_COUNT)-1) |
| 656 | #define MSR_AMD64_IBSCTL 0xc001103a |
| 657 | #define MSR_AMD64_IBSBRTARGET 0xc001103b |
| 658 | #define MSR_AMD64_ICIBSEXTDCTL 0xc001103c |
| 659 | #define MSR_AMD64_IBSOPDATA4 0xc001103d |
| 660 | #define MSR_AMD64_IBS_REG_COUNT_MAX 8 /* includes MSR_AMD64_IBSBRTARGET */ |
| 661 | #define MSR_AMD64_SVM_AVIC_DOORBELL 0xc001011b |
| 662 | #define MSR_AMD64_VM_PAGE_FLUSH 0xc001011e |
| 663 | #define MSR_AMD64_VIRT_SPEC_CTRL 0xc001011f |
| 664 | #define MSR_AMD64_SEV_ES_GHCB 0xc0010130 |
| 665 | #define MSR_AMD64_SEV 0xc0010131 |
| 666 | #define MSR_AMD64_SEV_ENABLED_BIT 0 |
| 667 | #define MSR_AMD64_SEV_ENABLED BIT_ULL(MSR_AMD64_SEV_ENABLED_BIT) |
| 668 | #define MSR_AMD64_SEV_ES_ENABLED_BIT 1 |
| 669 | #define MSR_AMD64_SEV_ES_ENABLED BIT_ULL(MSR_AMD64_SEV_ES_ENABLED_BIT) |
| 670 | #define MSR_AMD64_SEV_SNP_ENABLED_BIT 2 |
| 671 | #define MSR_AMD64_SEV_SNP_ENABLED BIT_ULL(MSR_AMD64_SEV_SNP_ENABLED_BIT) |
| 672 | #define MSR_AMD64_SNP_VTOM_BIT 3 |
| 673 | #define MSR_AMD64_SNP_VTOM BIT_ULL(MSR_AMD64_SNP_VTOM_BIT) |
| 674 | #define MSR_AMD64_SNP_REFLECT_VC_BIT 4 |
| 675 | #define MSR_AMD64_SNP_REFLECT_VC BIT_ULL(MSR_AMD64_SNP_REFLECT_VC_BIT) |
| 676 | #define MSR_AMD64_SNP_RESTRICTED_INJ_BIT 5 |
| 677 | #define MSR_AMD64_SNP_RESTRICTED_INJ BIT_ULL(MSR_AMD64_SNP_RESTRICTED_INJ_BIT) |
| 678 | #define MSR_AMD64_SNP_ALT_INJ_BIT 6 |
| 679 | #define MSR_AMD64_SNP_ALT_INJ BIT_ULL(MSR_AMD64_SNP_ALT_INJ_BIT) |
| 680 | #define MSR_AMD64_SNP_DEBUG_SWAP_BIT 7 |
| 681 | #define MSR_AMD64_SNP_DEBUG_SWAP BIT_ULL(MSR_AMD64_SNP_DEBUG_SWAP_BIT) |
| 682 | #define MSR_AMD64_SNP_PREVENT_HOST_IBS_BIT 8 |
| 683 | #define MSR_AMD64_SNP_PREVENT_HOST_IBS BIT_ULL(MSR_AMD64_SNP_PREVENT_HOST_IBS_BIT) |
| 684 | #define MSR_AMD64_SNP_BTB_ISOLATION_BIT 9 |
| 685 | #define MSR_AMD64_SNP_BTB_ISOLATION BIT_ULL(MSR_AMD64_SNP_BTB_ISOLATION_BIT) |
| 686 | #define MSR_AMD64_SNP_VMPL_SSS_BIT 10 |
| 687 | #define MSR_AMD64_SNP_VMPL_SSS BIT_ULL(MSR_AMD64_SNP_VMPL_SSS_BIT) |
| 688 | #define MSR_AMD64_SNP_SECURE_TSC_BIT 11 |
| 689 | #define MSR_AMD64_SNP_SECURE_TSC BIT_ULL(MSR_AMD64_SNP_SECURE_TSC_BIT) |
| 690 | #define MSR_AMD64_SNP_VMGEXIT_PARAM_BIT 12 |
| 691 | #define MSR_AMD64_SNP_VMGEXIT_PARAM BIT_ULL(MSR_AMD64_SNP_VMGEXIT_PARAM_BIT) |
| 692 | #define MSR_AMD64_SNP_RESERVED_BIT13 BIT_ULL(13) |
| 693 | #define MSR_AMD64_SNP_IBS_VIRT_BIT 14 |
| 694 | #define MSR_AMD64_SNP_IBS_VIRT BIT_ULL(MSR_AMD64_SNP_IBS_VIRT_BIT) |
| 695 | #define MSR_AMD64_SNP_RESERVED_BIT15 BIT_ULL(15) |
| 696 | #define MSR_AMD64_SNP_VMSA_REG_PROT_BIT 16 |
| 697 | #define MSR_AMD64_SNP_VMSA_REG_PROT BIT_ULL(MSR_AMD64_SNP_VMSA_REG_PROT_BIT) |
| 698 | #define MSR_AMD64_SNP_SMT_PROT_BIT 17 |
| 699 | #define MSR_AMD64_SNP_SMT_PROT BIT_ULL(MSR_AMD64_SNP_SMT_PROT_BIT) |
| 700 | #define MSR_AMD64_SNP_RESV_BIT 18 |
| 701 | #define MSR_AMD64_SNP_RESERVED_MASK GENMASK_ULL(63, MSR_AMD64_SNP_RESV_BIT) |
| 702 | #define MSR_AMD64_RMP_BASE 0xc0010132 |
| 703 | #define MSR_AMD64_RMP_END 0xc0010133 |
| 704 | #define MSR_AMD64_RMP_CFG 0xc0010136 |
| 705 | #define MSR_AMD64_SEG_RMP_ENABLED_BIT 0 |
| 706 | #define MSR_AMD64_SEG_RMP_ENABLED BIT_ULL(MSR_AMD64_SEG_RMP_ENABLED_BIT) |
| 707 | #define MSR_AMD64_RMP_SEGMENT_SHIFT(x) (((x) & GENMASK_ULL(13, 8)) >> 8) |
| 708 | |
| 709 | #define MSR_SVSM_CAA 0xc001f000 |
| 710 | |
| 711 | /* AMD Collaborative Processor Performance Control MSRs */ |
| 712 | #define MSR_AMD_CPPC_CAP1 0xc00102b0 |
| 713 | #define MSR_AMD_CPPC_ENABLE 0xc00102b1 |
| 714 | #define MSR_AMD_CPPC_CAP2 0xc00102b2 |
| 715 | #define MSR_AMD_CPPC_REQ 0xc00102b3 |
| 716 | #define MSR_AMD_CPPC_STATUS 0xc00102b4 |
| 717 | |
| 718 | /* Masks for use with MSR_AMD_CPPC_CAP1 */ |
| 719 | #define AMD_CPPC_LOWEST_PERF_MASK GENMASK(7, 0) |
| 720 | #define AMD_CPPC_LOWNONLIN_PERF_MASK GENMASK(15, 8) |
| 721 | #define AMD_CPPC_NOMINAL_PERF_MASK GENMASK(23, 16) |
| 722 | #define AMD_CPPC_HIGHEST_PERF_MASK GENMASK(31, 24) |
| 723 | |
| 724 | /* Masks for use with MSR_AMD_CPPC_REQ */ |
| 725 | #define AMD_CPPC_MAX_PERF_MASK GENMASK(7, 0) |
| 726 | #define AMD_CPPC_MIN_PERF_MASK GENMASK(15, 8) |
| 727 | #define AMD_CPPC_DES_PERF_MASK GENMASK(23, 16) |
| 728 | #define AMD_CPPC_EPP_PERF_MASK GENMASK(31, 24) |
| 729 | |
| 730 | /* AMD Performance Counter Global Status and Control MSRs */ |
| 731 | #define MSR_AMD64_PERF_CNTR_GLOBAL_STATUS 0xc0000300 |
| 732 | #define MSR_AMD64_PERF_CNTR_GLOBAL_CTL 0xc0000301 |
| 733 | #define MSR_AMD64_PERF_CNTR_GLOBAL_STATUS_CLR 0xc0000302 |
| 734 | |
| 735 | /* AMD Last Branch Record MSRs */ |
| 736 | #define MSR_AMD64_LBR_SELECT 0xc000010e |
| 737 | |
| 738 | /* Zen4 */ |
| 739 | #define MSR_ZEN4_BP_CFG 0xc001102e |
| 740 | #define MSR_ZEN4_BP_CFG_BP_SPEC_REDUCE_BIT 4 |
| 741 | #define MSR_ZEN4_BP_CFG_SHARED_BTB_FIX_BIT 5 |
| 742 | |
| 743 | /* Fam 19h MSRs */ |
| 744 | #define MSR_F19H_UMC_PERF_CTL 0xc0010800 |
| 745 | #define MSR_F19H_UMC_PERF_CTR 0xc0010801 |
| 746 | |
| 747 | /* Zen 2 */ |
| 748 | #define MSR_ZEN2_SPECTRAL_CHICKEN 0xc00110e3 |
| 749 | #define MSR_ZEN2_SPECTRAL_CHICKEN_BIT BIT_ULL(1) |
| 750 | |
| 751 | /* Fam 17h MSRs */ |
| 752 | #define MSR_F17H_IRPERF 0xc00000e9 |
| 753 | |
| 754 | /* Fam 16h MSRs */ |
| 755 | #define MSR_F16H_L2I_PERF_CTL 0xc0010230 |
| 756 | #define MSR_F16H_L2I_PERF_CTR 0xc0010231 |
| 757 | #define MSR_F16H_DR1_ADDR_MASK 0xc0011019 |
| 758 | #define MSR_F16H_DR2_ADDR_MASK 0xc001101a |
| 759 | #define MSR_F16H_DR3_ADDR_MASK 0xc001101b |
| 760 | #define MSR_F16H_DR0_ADDR_MASK 0xc0011027 |
| 761 | |
| 762 | /* Fam 15h MSRs */ |
| 763 | #define MSR_F15H_CU_PWR_ACCUMULATOR 0xc001007a |
| 764 | #define MSR_F15H_CU_MAX_PWR_ACCUMULATOR 0xc001007b |
| 765 | #define MSR_F15H_PERF_CTL 0xc0010200 |
| 766 | #define MSR_F15H_PERF_CTL0 MSR_F15H_PERF_CTL |
| 767 | #define MSR_F15H_PERF_CTL1 (MSR_F15H_PERF_CTL + 2) |
| 768 | #define MSR_F15H_PERF_CTL2 (MSR_F15H_PERF_CTL + 4) |
| 769 | #define MSR_F15H_PERF_CTL3 (MSR_F15H_PERF_CTL + 6) |
| 770 | #define MSR_F15H_PERF_CTL4 (MSR_F15H_PERF_CTL + 8) |
| 771 | #define MSR_F15H_PERF_CTL5 (MSR_F15H_PERF_CTL + 10) |
| 772 | |
| 773 | #define MSR_F15H_PERF_CTR 0xc0010201 |
| 774 | #define MSR_F15H_PERF_CTR0 MSR_F15H_PERF_CTR |
| 775 | #define MSR_F15H_PERF_CTR1 (MSR_F15H_PERF_CTR + 2) |
| 776 | #define MSR_F15H_PERF_CTR2 (MSR_F15H_PERF_CTR + 4) |
| 777 | #define MSR_F15H_PERF_CTR3 (MSR_F15H_PERF_CTR + 6) |
| 778 | #define MSR_F15H_PERF_CTR4 (MSR_F15H_PERF_CTR + 8) |
| 779 | #define MSR_F15H_PERF_CTR5 (MSR_F15H_PERF_CTR + 10) |
| 780 | |
| 781 | #define MSR_F15H_NB_PERF_CTL 0xc0010240 |
| 782 | #define MSR_F15H_NB_PERF_CTR 0xc0010241 |
| 783 | #define MSR_F15H_PTSC 0xc0010280 |
| 784 | #define MSR_F15H_IC_CFG 0xc0011021 |
| 785 | #define MSR_F15H_EX_CFG 0xc001102c |
| 786 | |
| 787 | /* Fam 10h MSRs */ |
| 788 | #define MSR_FAM10H_MMIO_CONF_BASE 0xc0010058 |
| 789 | #define FAM10H_MMIO_CONF_ENABLE (1<<0) |
| 790 | #define FAM10H_MMIO_CONF_BUSRANGE_MASK 0xf |
| 791 | #define FAM10H_MMIO_CONF_BUSRANGE_SHIFT 2 |
| 792 | #define FAM10H_MMIO_CONF_BASE_MASK 0xfffffffULL |
| 793 | #define FAM10H_MMIO_CONF_BASE_SHIFT 20 |
| 794 | #define MSR_FAM10H_NODE_ID 0xc001100c |
| 795 | |
| 796 | /* K8 MSRs */ |
| 797 | #define MSR_K8_TOP_MEM1 0xc001001a |
| 798 | #define MSR_K8_TOP_MEM2 0xc001001d |
| 799 | #define MSR_AMD64_SYSCFG 0xc0010010 |
| 800 | #define MSR_AMD64_SYSCFG_MEM_ENCRYPT_BIT 23 |
| 801 | #define MSR_AMD64_SYSCFG_MEM_ENCRYPT BIT_ULL(MSR_AMD64_SYSCFG_MEM_ENCRYPT_BIT) |
| 802 | #define MSR_AMD64_SYSCFG_SNP_EN_BIT 24 |
| 803 | #define MSR_AMD64_SYSCFG_SNP_EN BIT_ULL(MSR_AMD64_SYSCFG_SNP_EN_BIT) |
| 804 | #define MSR_AMD64_SYSCFG_SNP_VMPL_EN_BIT 25 |
| 805 | #define MSR_AMD64_SYSCFG_SNP_VMPL_EN BIT_ULL(MSR_AMD64_SYSCFG_SNP_VMPL_EN_BIT) |
| 806 | #define MSR_AMD64_SYSCFG_MFDM_BIT 19 |
| 807 | #define MSR_AMD64_SYSCFG_MFDM BIT_ULL(MSR_AMD64_SYSCFG_MFDM_BIT) |
| 808 | |
| 809 | #define MSR_K8_INT_PENDING_MSG 0xc0010055 |
| 810 | /* C1E active bits in int pending message */ |
| 811 | #define K8_INTP_C1E_ACTIVE_MASK 0x18000000 |
| 812 | #define MSR_K8_TSEG_ADDR 0xc0010112 |
| 813 | #define MSR_K8_TSEG_MASK 0xc0010113 |
| 814 | #define K8_MTRRFIXRANGE_DRAM_ENABLE 0x00040000 /* MtrrFixDramEn bit */ |
| 815 | #define K8_MTRRFIXRANGE_DRAM_MODIFY 0x00080000 /* MtrrFixDramModEn bit */ |
| 816 | #define K8_MTRR_RDMEM_WRMEM_MASK 0x18181818 /* Mask: RdMem|WrMem */ |
| 817 | |
| 818 | /* K7 MSRs */ |
| 819 | #define MSR_K7_EVNTSEL0 0xc0010000 |
| 820 | #define MSR_K7_PERFCTR0 0xc0010004 |
| 821 | #define MSR_K7_EVNTSEL1 0xc0010001 |
| 822 | #define MSR_K7_PERFCTR1 0xc0010005 |
| 823 | #define MSR_K7_EVNTSEL2 0xc0010002 |
| 824 | #define MSR_K7_PERFCTR2 0xc0010006 |
| 825 | #define MSR_K7_EVNTSEL3 0xc0010003 |
| 826 | #define MSR_K7_PERFCTR3 0xc0010007 |
| 827 | #define MSR_K7_CLK_CTL 0xc001001b |
| 828 | #define MSR_K7_HWCR 0xc0010015 |
| 829 | #define MSR_K7_HWCR_SMMLOCK_BIT 0 |
| 830 | #define MSR_K7_HWCR_SMMLOCK BIT_ULL(MSR_K7_HWCR_SMMLOCK_BIT) |
| 831 | #define MSR_K7_HWCR_IRPERF_EN_BIT 30 |
| 832 | #define MSR_K7_HWCR_IRPERF_EN BIT_ULL(MSR_K7_HWCR_IRPERF_EN_BIT) |
| 833 | #define MSR_K7_FID_VID_CTL 0xc0010041 |
| 834 | #define MSR_K7_FID_VID_STATUS 0xc0010042 |
| 835 | #define MSR_K7_HWCR_CPB_DIS_BIT 25 |
| 836 | #define MSR_K7_HWCR_CPB_DIS BIT_ULL(MSR_K7_HWCR_CPB_DIS_BIT) |
| 837 | |
| 838 | /* K6 MSRs */ |
| 839 | #define MSR_K6_WHCR 0xc0000082 |
| 840 | #define MSR_K6_UWCCR 0xc0000085 |
| 841 | #define MSR_K6_EPMR 0xc0000086 |
| 842 | #define MSR_K6_PSOR 0xc0000087 |
| 843 | #define MSR_K6_PFIR 0xc0000088 |
| 844 | |
| 845 | /* Centaur-Hauls/IDT defined MSRs. */ |
| 846 | #define MSR_IDT_FCR1 0x00000107 |
| 847 | #define MSR_IDT_FCR2 0x00000108 |
| 848 | #define MSR_IDT_FCR3 0x00000109 |
| 849 | #define MSR_IDT_FCR4 0x0000010a |
| 850 | |
| 851 | #define MSR_IDT_MCR0 0x00000110 |
| 852 | #define MSR_IDT_MCR1 0x00000111 |
| 853 | #define MSR_IDT_MCR2 0x00000112 |
| 854 | #define MSR_IDT_MCR3 0x00000113 |
| 855 | #define MSR_IDT_MCR4 0x00000114 |
| 856 | #define MSR_IDT_MCR5 0x00000115 |
| 857 | #define MSR_IDT_MCR6 0x00000116 |
| 858 | #define MSR_IDT_MCR7 0x00000117 |
| 859 | #define MSR_IDT_MCR_CTRL 0x00000120 |
| 860 | |
| 861 | /* VIA Cyrix defined MSRs*/ |
| 862 | #define MSR_VIA_FCR 0x00001107 |
| 863 | #define MSR_VIA_LONGHAUL 0x0000110a |
| 864 | #define MSR_VIA_RNG 0x0000110b |
| 865 | #define MSR_VIA_BCR2 0x00001147 |
| 866 | |
| 867 | /* Transmeta defined MSRs */ |
| 868 | #define MSR_TMTA_LONGRUN_CTRL 0x80868010 |
| 869 | #define MSR_TMTA_LONGRUN_FLAGS 0x80868011 |
| 870 | #define MSR_TMTA_LRTI_READOUT 0x80868018 |
| 871 | #define MSR_TMTA_LRTI_VOLT_MHZ 0x8086801a |
| 872 | |
| 873 | /* Intel defined MSRs. */ |
| 874 | #define MSR_IA32_P5_MC_ADDR 0x00000000 |
| 875 | #define MSR_IA32_P5_MC_TYPE 0x00000001 |
| 876 | #define MSR_IA32_TSC 0x00000010 |
| 877 | #define MSR_IA32_PLATFORM_ID 0x00000017 |
| 878 | #define MSR_IA32_EBL_CR_POWERON 0x0000002a |
| 879 | #define MSR_EBC_FREQUENCY_ID 0x0000002c |
| 880 | #define MSR_SMI_COUNT 0x00000034 |
| 881 | |
| 882 | /* Referred to as IA32_FEATURE_CONTROL in Intel's SDM. */ |
| 883 | #define MSR_IA32_FEAT_CTL 0x0000003a |
| 884 | #define FEAT_CTL_LOCKED BIT(0) |
| 885 | #define FEAT_CTL_VMX_ENABLED_INSIDE_SMX BIT(1) |
| 886 | #define FEAT_CTL_VMX_ENABLED_OUTSIDE_SMX BIT(2) |
| 887 | #define FEAT_CTL_SGX_LC_ENABLED BIT(17) |
| 888 | #define FEAT_CTL_SGX_ENABLED BIT(18) |
| 889 | #define FEAT_CTL_LMCE_ENABLED BIT(20) |
| 890 | |
| 891 | #define MSR_IA32_TSC_ADJUST 0x0000003b |
| 892 | #define MSR_IA32_BNDCFGS 0x00000d90 |
| 893 | |
| 894 | #define MSR_IA32_BNDCFGS_RSVD 0x00000ffc |
| 895 | |
| 896 | #define MSR_IA32_XFD 0x000001c4 |
| 897 | #define MSR_IA32_XFD_ERR 0x000001c5 |
| 898 | #define MSR_IA32_XSS 0x00000da0 |
| 899 | |
| 900 | #define MSR_IA32_APICBASE 0x0000001b |
| 901 | #define MSR_IA32_APICBASE_BSP (1<<8) |
| 902 | #define MSR_IA32_APICBASE_ENABLE (1<<11) |
| 903 | #define MSR_IA32_APICBASE_BASE (0xfffff<<12) |
| 904 | |
| 905 | #define MSR_IA32_UCODE_WRITE 0x00000079 |
| 906 | #define MSR_IA32_UCODE_REV 0x0000008b |
| 907 | |
| 908 | /* Intel SGX Launch Enclave Public Key Hash MSRs */ |
| 909 | #define MSR_IA32_SGXLEPUBKEYHASH0 0x0000008C |
| 910 | #define MSR_IA32_SGXLEPUBKEYHASH1 0x0000008D |
| 911 | #define MSR_IA32_SGXLEPUBKEYHASH2 0x0000008E |
| 912 | #define MSR_IA32_SGXLEPUBKEYHASH3 0x0000008F |
| 913 | |
| 914 | #define MSR_IA32_SMM_MONITOR_CTL 0x0000009b |
| 915 | #define MSR_IA32_SMBASE 0x0000009e |
| 916 | |
| 917 | #define MSR_IA32_PERF_STATUS 0x00000198 |
| 918 | #define MSR_IA32_PERF_CTL 0x00000199 |
| 919 | #define INTEL_PERF_CTL_MASK 0xffff |
| 920 | |
| 921 | /* AMD Branch Sampling configuration */ |
| 922 | #define MSR_AMD_DBG_EXTN_CFG 0xc000010f |
| 923 | #define MSR_AMD_SAMP_BR_FROM 0xc0010300 |
| 924 | |
| 925 | #define DBG_EXTN_CFG_LBRV2EN BIT_ULL(6) |
| 926 | |
| 927 | #define MSR_IA32_MPERF 0x000000e7 |
| 928 | #define MSR_IA32_APERF 0x000000e8 |
| 929 | |
| 930 | #define MSR_IA32_THERM_CONTROL 0x0000019a |
| 931 | #define MSR_IA32_THERM_INTERRUPT 0x0000019b |
| 932 | |
| 933 | #define THERM_INT_HIGH_ENABLE (1 << 0) |
| 934 | #define THERM_INT_LOW_ENABLE (1 << 1) |
| 935 | #define THERM_INT_PLN_ENABLE (1 << 24) |
| 936 | |
| 937 | #define MSR_IA32_THERM_STATUS 0x0000019c |
| 938 | |
| 939 | #define THERM_STATUS_PROCHOT (1 << 0) |
| 940 | #define THERM_STATUS_POWER_LIMIT (1 << 10) |
| 941 | |
| 942 | #define MSR_THERM2_CTL 0x0000019d |
| 943 | |
| 944 | #define MSR_THERM2_CTL_TM_SELECT (1ULL << 16) |
| 945 | |
| 946 | #define MSR_IA32_MISC_ENABLE 0x000001a0 |
| 947 | |
| 948 | #define MSR_IA32_TEMPERATURE_TARGET 0x000001a2 |
| 949 | |
| 950 | #define MSR_MISC_FEATURE_CONTROL 0x000001a4 |
| 951 | #define MSR_MISC_PWR_MGMT 0x000001aa |
| 952 | |
| 953 | #define MSR_IA32_ENERGY_PERF_BIAS 0x000001b0 |
| 954 | #define ENERGY_PERF_BIAS_PERFORMANCE 0 |
| 955 | #define ENERGY_PERF_BIAS_BALANCE_PERFORMANCE 4 |
| 956 | #define ENERGY_PERF_BIAS_NORMAL 6 |
| 957 | #define ENERGY_PERF_BIAS_NORMAL_POWERSAVE 7 |
| 958 | #define ENERGY_PERF_BIAS_BALANCE_POWERSAVE 8 |
| 959 | #define ENERGY_PERF_BIAS_POWERSAVE 15 |
| 960 | |
| 961 | #define MSR_IA32_PACKAGE_THERM_STATUS 0x000001b1 |
| 962 | |
| 963 | #define PACKAGE_THERM_STATUS_PROCHOT (1 << 0) |
| 964 | #define PACKAGE_THERM_STATUS_POWER_LIMIT (1 << 10) |
| 965 | #define PACKAGE_THERM_STATUS_HFI_UPDATED (1 << 26) |
| 966 | |
| 967 | #define MSR_IA32_PACKAGE_THERM_INTERRUPT 0x000001b2 |
| 968 | |
| 969 | #define PACKAGE_THERM_INT_HIGH_ENABLE (1 << 0) |
| 970 | #define PACKAGE_THERM_INT_LOW_ENABLE (1 << 1) |
| 971 | #define PACKAGE_THERM_INT_PLN_ENABLE (1 << 24) |
| 972 | #define PACKAGE_THERM_INT_HFI_ENABLE (1 << 25) |
| 973 | |
| 974 | /* Thermal Thresholds Support */ |
| 975 | #define THERM_INT_THRESHOLD0_ENABLE (1 << 15) |
| 976 | #define THERM_SHIFT_THRESHOLD0 8 |
| 977 | #define THERM_MASK_THRESHOLD0 (0x7f << THERM_SHIFT_THRESHOLD0) |
| 978 | #define THERM_INT_THRESHOLD1_ENABLE (1 << 23) |
| 979 | #define THERM_SHIFT_THRESHOLD1 16 |
| 980 | #define THERM_MASK_THRESHOLD1 (0x7f << THERM_SHIFT_THRESHOLD1) |
| 981 | #define THERM_STATUS_THRESHOLD0 (1 << 6) |
| 982 | #define THERM_LOG_THRESHOLD0 (1 << 7) |
| 983 | #define THERM_STATUS_THRESHOLD1 (1 << 8) |
| 984 | #define THERM_LOG_THRESHOLD1 (1 << 9) |
| 985 | |
| 986 | /* MISC_ENABLE bits: architectural */ |
| 987 | #define MSR_IA32_MISC_ENABLE_FAST_STRING_BIT 0 |
| 988 | #define MSR_IA32_MISC_ENABLE_FAST_STRING (1ULL << MSR_IA32_MISC_ENABLE_FAST_STRING_BIT) |
| 989 | #define MSR_IA32_MISC_ENABLE_TCC_BIT 1 |
| 990 | #define MSR_IA32_MISC_ENABLE_TCC (1ULL << MSR_IA32_MISC_ENABLE_TCC_BIT) |
| 991 | #define MSR_IA32_MISC_ENABLE_EMON_BIT 7 |
| 992 | #define MSR_IA32_MISC_ENABLE_EMON (1ULL << MSR_IA32_MISC_ENABLE_EMON_BIT) |
| 993 | #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT 11 |
| 994 | #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT) |
| 995 | #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT 12 |
| 996 | #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT) |
| 997 | #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT 16 |
| 998 | #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP (1ULL << MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT) |
| 999 | #define MSR_IA32_MISC_ENABLE_MWAIT_BIT 18 |
| 1000 | #define MSR_IA32_MISC_ENABLE_MWAIT (1ULL << MSR_IA32_MISC_ENABLE_MWAIT_BIT) |
| 1001 | #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT 22 |
| 1002 | #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID (1ULL << MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT) |
| 1003 | #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT 23 |
| 1004 | #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT) |
| 1005 | #define MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT 34 |
| 1006 | #define MSR_IA32_MISC_ENABLE_XD_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT) |
| 1007 | |
| 1008 | /* MISC_ENABLE bits: model-specific, meaning may vary from core to core */ |
| 1009 | #define MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT 2 |
| 1010 | #define MSR_IA32_MISC_ENABLE_X87_COMPAT (1ULL << MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT) |
| 1011 | #define MSR_IA32_MISC_ENABLE_TM1_BIT 3 |
| 1012 | #define MSR_IA32_MISC_ENABLE_TM1 (1ULL << MSR_IA32_MISC_ENABLE_TM1_BIT) |
| 1013 | #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT 4 |
| 1014 | #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT) |
| 1015 | #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT 6 |
| 1016 | #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT) |
| 1017 | #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT 8 |
| 1018 | #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT) |
| 1019 | #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT 9 |
| 1020 | #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT) |
| 1021 | #define MSR_IA32_MISC_ENABLE_FERR_BIT 10 |
| 1022 | #define MSR_IA32_MISC_ENABLE_FERR (1ULL << MSR_IA32_MISC_ENABLE_FERR_BIT) |
| 1023 | #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT 10 |
| 1024 | #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX (1ULL << MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT) |
| 1025 | #define MSR_IA32_MISC_ENABLE_TM2_BIT 13 |
| 1026 | #define MSR_IA32_MISC_ENABLE_TM2 (1ULL << MSR_IA32_MISC_ENABLE_TM2_BIT) |
| 1027 | #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT 19 |
| 1028 | #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT) |
| 1029 | #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT 20 |
| 1030 | #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT) |
| 1031 | #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT 24 |
| 1032 | #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT (1ULL << MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT) |
| 1033 | #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT 37 |
| 1034 | #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT) |
| 1035 | #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT 38 |
| 1036 | #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT) |
| 1037 | #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT 39 |
| 1038 | #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT) |
| 1039 | |
| 1040 | /* MISC_FEATURES_ENABLES non-architectural features */ |
| 1041 | #define MSR_MISC_FEATURES_ENABLES 0x00000140 |
| 1042 | |
| 1043 | #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT 0 |
| 1044 | #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT BIT_ULL(MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT) |
| 1045 | #define MSR_MISC_FEATURES_ENABLES_RING3MWAIT_BIT 1 |
| 1046 | |
| 1047 | #define MSR_IA32_TSC_DEADLINE 0x000006E0 |
| 1048 | |
| 1049 | |
| 1050 | #define MSR_TSX_FORCE_ABORT 0x0000010F |
| 1051 | |
| 1052 | #define MSR_TFA_RTM_FORCE_ABORT_BIT 0 |
| 1053 | #define MSR_TFA_RTM_FORCE_ABORT BIT_ULL(MSR_TFA_RTM_FORCE_ABORT_BIT) |
| 1054 | #define MSR_TFA_TSX_CPUID_CLEAR_BIT 1 |
| 1055 | #define MSR_TFA_TSX_CPUID_CLEAR BIT_ULL(MSR_TFA_TSX_CPUID_CLEAR_BIT) |
| 1056 | #define MSR_TFA_SDV_ENABLE_RTM_BIT 2 |
| 1057 | #define MSR_TFA_SDV_ENABLE_RTM BIT_ULL(MSR_TFA_SDV_ENABLE_RTM_BIT) |
| 1058 | |
| 1059 | /* P4/Xeon+ specific */ |
| 1060 | #define MSR_IA32_MCG_EAX 0x00000180 |
| 1061 | #define MSR_IA32_MCG_EBX 0x00000181 |
| 1062 | #define MSR_IA32_MCG_ECX 0x00000182 |
| 1063 | #define MSR_IA32_MCG_EDX 0x00000183 |
| 1064 | #define MSR_IA32_MCG_ESI 0x00000184 |
| 1065 | #define MSR_IA32_MCG_EDI 0x00000185 |
| 1066 | #define MSR_IA32_MCG_EBP 0x00000186 |
| 1067 | #define MSR_IA32_MCG_ESP 0x00000187 |
| 1068 | #define MSR_IA32_MCG_EFLAGS 0x00000188 |
| 1069 | #define MSR_IA32_MCG_EIP 0x00000189 |
| 1070 | #define MSR_IA32_MCG_RESERVED 0x0000018a |
| 1071 | |
| 1072 | /* Pentium IV performance counter MSRs */ |
| 1073 | #define MSR_P4_BPU_PERFCTR0 0x00000300 |
| 1074 | #define MSR_P4_BPU_PERFCTR1 0x00000301 |
| 1075 | #define MSR_P4_BPU_PERFCTR2 0x00000302 |
| 1076 | #define MSR_P4_BPU_PERFCTR3 0x00000303 |
| 1077 | #define MSR_P4_MS_PERFCTR0 0x00000304 |
| 1078 | #define MSR_P4_MS_PERFCTR1 0x00000305 |
| 1079 | #define MSR_P4_MS_PERFCTR2 0x00000306 |
| 1080 | #define MSR_P4_MS_PERFCTR3 0x00000307 |
| 1081 | #define MSR_P4_FLAME_PERFCTR0 0x00000308 |
| 1082 | #define MSR_P4_FLAME_PERFCTR1 0x00000309 |
| 1083 | #define MSR_P4_FLAME_PERFCTR2 0x0000030a |
| 1084 | #define MSR_P4_FLAME_PERFCTR3 0x0000030b |
| 1085 | #define MSR_P4_IQ_PERFCTR0 0x0000030c |
| 1086 | #define MSR_P4_IQ_PERFCTR1 0x0000030d |
| 1087 | #define MSR_P4_IQ_PERFCTR2 0x0000030e |
| 1088 | #define MSR_P4_IQ_PERFCTR3 0x0000030f |
| 1089 | #define MSR_P4_IQ_PERFCTR4 0x00000310 |
| 1090 | #define MSR_P4_IQ_PERFCTR5 0x00000311 |
| 1091 | #define MSR_P4_BPU_CCCR0 0x00000360 |
| 1092 | #define MSR_P4_BPU_CCCR1 0x00000361 |
| 1093 | #define MSR_P4_BPU_CCCR2 0x00000362 |
| 1094 | #define MSR_P4_BPU_CCCR3 0x00000363 |
| 1095 | #define MSR_P4_MS_CCCR0 0x00000364 |
| 1096 | #define MSR_P4_MS_CCCR1 0x00000365 |
| 1097 | #define MSR_P4_MS_CCCR2 0x00000366 |
| 1098 | #define MSR_P4_MS_CCCR3 0x00000367 |
| 1099 | #define MSR_P4_FLAME_CCCR0 0x00000368 |
| 1100 | #define MSR_P4_FLAME_CCCR1 0x00000369 |
| 1101 | #define MSR_P4_FLAME_CCCR2 0x0000036a |
| 1102 | #define MSR_P4_FLAME_CCCR3 0x0000036b |
| 1103 | #define MSR_P4_IQ_CCCR0 0x0000036c |
| 1104 | #define MSR_P4_IQ_CCCR1 0x0000036d |
| 1105 | #define MSR_P4_IQ_CCCR2 0x0000036e |
| 1106 | #define MSR_P4_IQ_CCCR3 0x0000036f |
| 1107 | #define MSR_P4_IQ_CCCR4 0x00000370 |
| 1108 | #define MSR_P4_IQ_CCCR5 0x00000371 |
| 1109 | #define MSR_P4_ALF_ESCR0 0x000003ca |
| 1110 | #define MSR_P4_ALF_ESCR1 0x000003cb |
| 1111 | #define MSR_P4_BPU_ESCR0 0x000003b2 |
| 1112 | #define MSR_P4_BPU_ESCR1 0x000003b3 |
| 1113 | #define MSR_P4_BSU_ESCR0 0x000003a0 |
| 1114 | #define MSR_P4_BSU_ESCR1 0x000003a1 |
| 1115 | #define MSR_P4_CRU_ESCR0 0x000003b8 |
| 1116 | #define MSR_P4_CRU_ESCR1 0x000003b9 |
| 1117 | #define MSR_P4_CRU_ESCR2 0x000003cc |
| 1118 | #define MSR_P4_CRU_ESCR3 0x000003cd |
| 1119 | #define MSR_P4_CRU_ESCR4 0x000003e0 |
| 1120 | #define MSR_P4_CRU_ESCR5 0x000003e1 |
| 1121 | #define MSR_P4_DAC_ESCR0 0x000003a8 |
| 1122 | #define MSR_P4_DAC_ESCR1 0x000003a9 |
| 1123 | #define MSR_P4_FIRM_ESCR0 0x000003a4 |
| 1124 | #define MSR_P4_FIRM_ESCR1 0x000003a5 |
| 1125 | #define MSR_P4_FLAME_ESCR0 0x000003a6 |
| 1126 | #define MSR_P4_FLAME_ESCR1 0x000003a7 |
| 1127 | #define MSR_P4_FSB_ESCR0 0x000003a2 |
| 1128 | #define MSR_P4_FSB_ESCR1 0x000003a3 |
| 1129 | #define MSR_P4_IQ_ESCR0 0x000003ba |
| 1130 | #define MSR_P4_IQ_ESCR1 0x000003bb |
| 1131 | #define MSR_P4_IS_ESCR0 0x000003b4 |
| 1132 | #define MSR_P4_IS_ESCR1 0x000003b5 |
| 1133 | #define MSR_P4_ITLB_ESCR0 0x000003b6 |
| 1134 | #define MSR_P4_ITLB_ESCR1 0x000003b7 |
| 1135 | #define MSR_P4_IX_ESCR0 0x000003c8 |
| 1136 | #define MSR_P4_IX_ESCR1 0x000003c9 |
| 1137 | #define MSR_P4_MOB_ESCR0 0x000003aa |
| 1138 | #define MSR_P4_MOB_ESCR1 0x000003ab |
| 1139 | #define MSR_P4_MS_ESCR0 0x000003c0 |
| 1140 | #define MSR_P4_MS_ESCR1 0x000003c1 |
| 1141 | #define MSR_P4_PMH_ESCR0 0x000003ac |
| 1142 | #define MSR_P4_PMH_ESCR1 0x000003ad |
| 1143 | #define MSR_P4_RAT_ESCR0 0x000003bc |
| 1144 | #define MSR_P4_RAT_ESCR1 0x000003bd |
| 1145 | #define MSR_P4_SAAT_ESCR0 0x000003ae |
| 1146 | #define MSR_P4_SAAT_ESCR1 0x000003af |
| 1147 | #define MSR_P4_SSU_ESCR0 0x000003be |
| 1148 | #define MSR_P4_SSU_ESCR1 0x000003bf /* guess: not in manual */ |
| 1149 | |
| 1150 | #define MSR_P4_TBPU_ESCR0 0x000003c2 |
| 1151 | #define MSR_P4_TBPU_ESCR1 0x000003c3 |
| 1152 | #define MSR_P4_TC_ESCR0 0x000003c4 |
| 1153 | #define MSR_P4_TC_ESCR1 0x000003c5 |
| 1154 | #define MSR_P4_U2L_ESCR0 0x000003b0 |
| 1155 | #define MSR_P4_U2L_ESCR1 0x000003b1 |
| 1156 | |
| 1157 | #define MSR_P4_PEBS_MATRIX_VERT 0x000003f2 |
| 1158 | |
| 1159 | /* Intel Core-based CPU performance counters */ |
| 1160 | #define MSR_CORE_PERF_FIXED_CTR0 0x00000309 |
| 1161 | #define MSR_CORE_PERF_FIXED_CTR1 0x0000030a |
| 1162 | #define MSR_CORE_PERF_FIXED_CTR2 0x0000030b |
| 1163 | #define MSR_CORE_PERF_FIXED_CTR3 0x0000030c |
| 1164 | #define MSR_CORE_PERF_FIXED_CTR_CTRL 0x0000038d |
| 1165 | #define MSR_CORE_PERF_GLOBAL_STATUS 0x0000038e |
| 1166 | #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f |
| 1167 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL 0x00000390 |
| 1168 | |
| 1169 | #define MSR_PERF_METRICS 0x00000329 |
| 1170 | |
| 1171 | /* PERF_GLOBAL_OVF_CTL bits */ |
| 1172 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL_TRACE_TOPA_PMI_BIT 55 |
| 1173 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL_TRACE_TOPA_PMI (1ULL << MSR_CORE_PERF_GLOBAL_OVF_CTRL_TRACE_TOPA_PMI_BIT) |
| 1174 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL_OVF_BUF_BIT 62 |
| 1175 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL_OVF_BUF (1ULL << MSR_CORE_PERF_GLOBAL_OVF_CTRL_OVF_BUF_BIT) |
| 1176 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL_COND_CHGD_BIT 63 |
| 1177 | #define MSR_CORE_PERF_GLOBAL_OVF_CTRL_COND_CHGD (1ULL << MSR_CORE_PERF_GLOBAL_OVF_CTRL_COND_CHGD_BIT) |
| 1178 | |
| 1179 | /* Geode defined MSRs */ |
| 1180 | #define MSR_GEODE_BUSCONT_CONF0 0x00001900 |
| 1181 | |
| 1182 | /* Intel VT MSRs */ |
| 1183 | #define MSR_IA32_VMX_BASIC 0x00000480 |
| 1184 | #define MSR_IA32_VMX_PINBASED_CTLS 0x00000481 |
| 1185 | #define MSR_IA32_VMX_PROCBASED_CTLS 0x00000482 |
| 1186 | #define MSR_IA32_VMX_EXIT_CTLS 0x00000483 |
| 1187 | #define MSR_IA32_VMX_ENTRY_CTLS 0x00000484 |
| 1188 | #define MSR_IA32_VMX_MISC 0x00000485 |
| 1189 | #define MSR_IA32_VMX_CR0_FIXED0 0x00000486 |
| 1190 | #define MSR_IA32_VMX_CR0_FIXED1 0x00000487 |
| 1191 | #define MSR_IA32_VMX_CR4_FIXED0 0x00000488 |
| 1192 | #define MSR_IA32_VMX_CR4_FIXED1 0x00000489 |
| 1193 | #define MSR_IA32_VMX_VMCS_ENUM 0x0000048a |
| 1194 | #define MSR_IA32_VMX_PROCBASED_CTLS2 0x0000048b |
| 1195 | #define MSR_IA32_VMX_EPT_VPID_CAP 0x0000048c |
| 1196 | #define MSR_IA32_VMX_TRUE_PINBASED_CTLS 0x0000048d |
| 1197 | #define MSR_IA32_VMX_TRUE_PROCBASED_CTLS 0x0000048e |
| 1198 | #define MSR_IA32_VMX_TRUE_EXIT_CTLS 0x0000048f |
| 1199 | #define MSR_IA32_VMX_TRUE_ENTRY_CTLS 0x00000490 |
| 1200 | #define MSR_IA32_VMX_VMFUNC 0x00000491 |
| 1201 | #define MSR_IA32_VMX_PROCBASED_CTLS3 0x00000492 |
| 1202 | |
| 1203 | /* Resctrl MSRs: */ |
| 1204 | /* - Intel: */ |
| 1205 | #define MSR_IA32_L3_QOS_CFG 0xc81 |
| 1206 | #define MSR_IA32_L2_QOS_CFG 0xc82 |
| 1207 | #define MSR_IA32_QM_EVTSEL 0xc8d |
| 1208 | #define MSR_IA32_QM_CTR 0xc8e |
| 1209 | #define MSR_IA32_PQR_ASSOC 0xc8f |
| 1210 | #define MSR_IA32_L3_CBM_BASE 0xc90 |
| 1211 | #define MSR_RMID_SNC_CONFIG 0xca0 |
| 1212 | #define MSR_IA32_L2_CBM_BASE 0xd10 |
| 1213 | #define MSR_IA32_MBA_THRTL_BASE 0xd50 |
| 1214 | |
| 1215 | /* - AMD: */ |
| 1216 | #define MSR_IA32_MBA_BW_BASE 0xc0000200 |
| 1217 | #define MSR_IA32_SMBA_BW_BASE 0xc0000280 |
| 1218 | #define MSR_IA32_EVT_CFG_BASE 0xc0000400 |
| 1219 | |
| 1220 | /* AMD-V MSRs */ |
| 1221 | #define MSR_VM_CR 0xc0010114 |
| 1222 | #define MSR_VM_IGNNE 0xc0010115 |
| 1223 | #define MSR_VM_HSAVE_PA 0xc0010117 |
| 1224 | |
| 1225 | #define SVM_VM_CR_VALID_MASK 0x001fULL |
| 1226 | #define SVM_VM_CR_SVM_LOCK_MASK 0x0008ULL |
| 1227 | #define SVM_VM_CR_SVM_DIS_MASK 0x0010ULL |
| 1228 | |
| 1229 | /* Hardware Feedback Interface */ |
| 1230 | #define MSR_IA32_HW_FEEDBACK_PTR 0x17d0 |
| 1231 | #define MSR_IA32_HW_FEEDBACK_CONFIG 0x17d1 |
| 1232 | |
| 1233 | /* x2APIC locked status */ |
| 1234 | #define MSR_IA32_XAPIC_DISABLE_STATUS 0xBD |
| 1235 | #define LEGACY_XAPIC_DISABLED BIT(0) /* |
| 1236 | * x2APIC mode is locked and |
| 1237 | * disabling x2APIC will cause |
| 1238 | * a #GP |
| 1239 | */ |
| 1240 | |
| 1241 | #endif /* _ASM_X86_MSR_INDEX_H */ |