Commit | Line | Data |
---|---|---|
1965aae3 PA |
1 | #ifndef _ASM_X86_MSR_H |
2 | #define _ASM_X86_MSR_H | |
be7baf80 TG |
3 | |
4 | #include <asm/msr-index.h> | |
5 | ||
d43a3312 MF |
6 | #ifndef __ASSEMBLY__ |
7 | # include <linux/types.h> | |
8 | #endif | |
9 | ||
8f12dea6 GOC |
10 | #ifdef __KERNEL__ |
11 | #ifndef __ASSEMBLY__ | |
c210d249 GOC |
12 | |
13 | #include <asm/asm.h> | |
14 | #include <asm/errno.h> | |
15 | ||
1e160cc3 | 16 | static inline unsigned long long native_read_tscp(unsigned int *aux) |
8f12dea6 GOC |
17 | { |
18 | unsigned long low, high; | |
abb0ade0 JP |
19 | asm volatile(".byte 0x0f,0x01,0xf9" |
20 | : "=a" (low), "=d" (high), "=c" (*aux)); | |
41aefdcc | 21 | return low | ((u64)high << 32); |
8f12dea6 GOC |
22 | } |
23 | ||
c210d249 GOC |
24 | /* |
25 | * i386 calling convention returns 64-bit value in edx:eax, while | |
26 | * x86_64 returns at rax. Also, the "A" constraint does not really | |
27 | * mean rdx:rax in x86_64, so we need specialized behaviour for each | |
28 | * architecture | |
29 | */ | |
30 | #ifdef CONFIG_X86_64 | |
31 | #define DECLARE_ARGS(val, low, high) unsigned low, high | |
abb0ade0 | 32 | #define EAX_EDX_VAL(val, low, high) ((low) | ((u64)(high) << 32)) |
c210d249 GOC |
33 | #define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high) |
34 | #define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high) | |
35 | #else | |
36 | #define DECLARE_ARGS(val, low, high) unsigned long long val | |
37 | #define EAX_EDX_VAL(val, low, high) (val) | |
38 | #define EAX_EDX_ARGS(val, low, high) "A" (val) | |
39 | #define EAX_EDX_RET(val, low, high) "=A" (val) | |
8f12dea6 GOC |
40 | #endif |
41 | ||
be7baf80 TG |
42 | static inline unsigned long long native_read_msr(unsigned int msr) |
43 | { | |
c210d249 | 44 | DECLARE_ARGS(val, low, high); |
be7baf80 | 45 | |
c210d249 GOC |
46 | asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr)); |
47 | return EAX_EDX_VAL(val, low, high); | |
be7baf80 TG |
48 | } |
49 | ||
50 | static inline unsigned long long native_read_msr_safe(unsigned int msr, | |
51 | int *err) | |
52 | { | |
c210d249 | 53 | DECLARE_ARGS(val, low, high); |
be7baf80 | 54 | |
08970fc4 | 55 | asm volatile("2: rdmsr ; xor %[err],%[err]\n" |
be7baf80 TG |
56 | "1:\n\t" |
57 | ".section .fixup,\"ax\"\n\t" | |
08970fc4 | 58 | "3: mov %[fault],%[err] ; jmp 1b\n\t" |
be7baf80 | 59 | ".previous\n\t" |
abb0ade0 | 60 | _ASM_EXTABLE(2b, 3b) |
08970fc4 PA |
61 | : [err] "=r" (*err), EAX_EDX_RET(val, low, high) |
62 | : "c" (msr), [fault] "i" (-EFAULT)); | |
c210d249 | 63 | return EAX_EDX_VAL(val, low, high); |
be7baf80 TG |
64 | } |
65 | ||
b05f78f5 YL |
66 | static inline unsigned long long native_read_msr_amd_safe(unsigned int msr, |
67 | int *err) | |
68 | { | |
69 | DECLARE_ARGS(val, low, high); | |
70 | ||
56ec1ddc | 71 | asm volatile("2: rdmsr ; xor %0,%0\n" |
be7baf80 TG |
72 | "1:\n\t" |
73 | ".section .fixup,\"ax\"\n\t" | |
56ec1ddc | 74 | "3: mov %3,%0 ; jmp 1b\n\t" |
be7baf80 | 75 | ".previous\n\t" |
abb0ade0 | 76 | _ASM_EXTABLE(2b, 3b) |
c210d249 | 77 | : "=r" (*err), EAX_EDX_RET(val, low, high) |
b05f78f5 | 78 | : "c" (msr), "D" (0x9c5a203a), "i" (-EFAULT)); |
c210d249 | 79 | return EAX_EDX_VAL(val, low, high); |
be7baf80 TG |
80 | } |
81 | ||
c9dcda5c GOC |
82 | static inline void native_write_msr(unsigned int msr, |
83 | unsigned low, unsigned high) | |
be7baf80 | 84 | { |
af2b1c60 | 85 | asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory"); |
be7baf80 TG |
86 | } |
87 | ||
88 | static inline int native_write_msr_safe(unsigned int msr, | |
c9dcda5c | 89 | unsigned low, unsigned high) |
be7baf80 TG |
90 | { |
91 | int err; | |
08970fc4 | 92 | asm volatile("2: wrmsr ; xor %[err],%[err]\n" |
be7baf80 TG |
93 | "1:\n\t" |
94 | ".section .fixup,\"ax\"\n\t" | |
08970fc4 | 95 | "3: mov %[fault],%[err] ; jmp 1b\n\t" |
be7baf80 | 96 | ".previous\n\t" |
abb0ade0 | 97 | _ASM_EXTABLE(2b, 3b) |
08970fc4 | 98 | : [err] "=a" (err) |
c9dcda5c | 99 | : "c" (msr), "0" (low), "d" (high), |
08970fc4 | 100 | [fault] "i" (-EFAULT) |
af2b1c60 | 101 | : "memory"); |
be7baf80 TG |
102 | return err; |
103 | } | |
104 | ||
cdc7957d | 105 | extern unsigned long long native_read_tsc(void); |
be7baf80 | 106 | |
92767af0 IM |
107 | static __always_inline unsigned long long __native_read_tsc(void) |
108 | { | |
109 | DECLARE_ARGS(val, low, high); | |
110 | ||
111 | rdtsc_barrier(); | |
112 | asm volatile("rdtsc" : EAX_EDX_RET(val, low, high)); | |
113 | rdtsc_barrier(); | |
114 | ||
115 | return EAX_EDX_VAL(val, low, high); | |
116 | } | |
117 | ||
b8d1fae7 | 118 | static inline unsigned long long native_read_pmc(int counter) |
be7baf80 | 119 | { |
c210d249 GOC |
120 | DECLARE_ARGS(val, low, high); |
121 | ||
122 | asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter)); | |
123 | return EAX_EDX_VAL(val, low, high); | |
be7baf80 TG |
124 | } |
125 | ||
126 | #ifdef CONFIG_PARAVIRT | |
127 | #include <asm/paravirt.h> | |
96a388de | 128 | #else |
be7baf80 TG |
129 | #include <linux/errno.h> |
130 | /* | |
131 | * Access to machine-specific registers (available on 586 and better only) | |
132 | * Note: the rd* operations modify the parameters directly (without using | |
133 | * pointer indirection), this allows gcc to optimize better | |
134 | */ | |
135 | ||
abb0ade0 JP |
136 | #define rdmsr(msr, val1, val2) \ |
137 | do { \ | |
138 | u64 __val = native_read_msr((msr)); \ | |
139 | (val1) = (u32)__val; \ | |
140 | (val2) = (u32)(__val >> 32); \ | |
141 | } while (0) | |
be7baf80 | 142 | |
c9dcda5c | 143 | static inline void wrmsr(unsigned msr, unsigned low, unsigned high) |
be7baf80 | 144 | { |
c9dcda5c | 145 | native_write_msr(msr, low, high); |
be7baf80 TG |
146 | } |
147 | ||
abb0ade0 JP |
148 | #define rdmsrl(msr, val) \ |
149 | ((val) = native_read_msr((msr))) | |
be7baf80 | 150 | |
c210d249 | 151 | #define wrmsrl(msr, val) \ |
abb0ade0 | 152 | native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32)) |
be7baf80 TG |
153 | |
154 | /* wrmsr with exception handling */ | |
c9dcda5c | 155 | static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) |
be7baf80 | 156 | { |
c9dcda5c | 157 | return native_write_msr_safe(msr, low, high); |
be7baf80 TG |
158 | } |
159 | ||
160 | /* rdmsr with exception handling */ | |
abb0ade0 JP |
161 | #define rdmsr_safe(msr, p1, p2) \ |
162 | ({ \ | |
163 | int __err; \ | |
164 | u64 __val = native_read_msr_safe((msr), &__err); \ | |
165 | (*p1) = (u32)__val; \ | |
166 | (*p2) = (u32)(__val >> 32); \ | |
167 | __err; \ | |
168 | }) | |
be7baf80 | 169 | |
1de87bd4 AK |
170 | static inline int rdmsrl_safe(unsigned msr, unsigned long long *p) |
171 | { | |
172 | int err; | |
173 | ||
174 | *p = native_read_msr_safe(msr, &err); | |
175 | return err; | |
176 | } | |
b05f78f5 YL |
177 | static inline int rdmsrl_amd_safe(unsigned msr, unsigned long long *p) |
178 | { | |
179 | int err; | |
180 | ||
181 | *p = native_read_msr_amd_safe(msr, &err); | |
182 | return err; | |
183 | } | |
1de87bd4 | 184 | |
be7baf80 TG |
185 | #define rdtscl(low) \ |
186 | ((low) = (u32)native_read_tsc()) | |
187 | ||
188 | #define rdtscll(val) \ | |
189 | ((val) = native_read_tsc()) | |
190 | ||
abb0ade0 JP |
191 | #define rdpmc(counter, low, high) \ |
192 | do { \ | |
193 | u64 _l = native_read_pmc((counter)); \ | |
194 | (low) = (u32)_l; \ | |
195 | (high) = (u32)(_l >> 32); \ | |
196 | } while (0) | |
be7baf80 | 197 | |
abb0ade0 JP |
198 | #define rdtscp(low, high, aux) \ |
199 | do { \ | |
200 | unsigned long long _val = native_read_tscp(&(aux)); \ | |
201 | (low) = (u32)_val; \ | |
202 | (high) = (u32)(_val >> 32); \ | |
203 | } while (0) | |
be7baf80 | 204 | |
c210d249 | 205 | #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux)) |
be7baf80 | 206 | |
c210d249 | 207 | #endif /* !CONFIG_PARAVIRT */ |
be7baf80 | 208 | |
be7baf80 | 209 | |
abb0ade0 JP |
210 | #define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val), \ |
211 | (u32)((val) >> 32)) | |
be7baf80 | 212 | |
abb0ade0 | 213 | #define write_tsc(val1, val2) wrmsr(0x10, (val1), (val2)) |
be7baf80 | 214 | |
abb0ade0 | 215 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, (val), 0) |
be7baf80 | 216 | |
be7baf80 | 217 | #ifdef CONFIG_SMP |
c6f31932 PA |
218 | int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); |
219 | int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | |
be7baf80 TG |
220 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); |
221 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | |
222 | #else /* CONFIG_SMP */ | |
c6f31932 | 223 | static inline int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) |
be7baf80 TG |
224 | { |
225 | rdmsr(msr_no, *l, *h); | |
c6f31932 | 226 | return 0; |
be7baf80 | 227 | } |
c6f31932 | 228 | static inline int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) |
be7baf80 TG |
229 | { |
230 | wrmsr(msr_no, l, h); | |
c6f31932 | 231 | return 0; |
be7baf80 | 232 | } |
abb0ade0 JP |
233 | static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, |
234 | u32 *l, u32 *h) | |
be7baf80 TG |
235 | { |
236 | return rdmsr_safe(msr_no, l, h); | |
237 | } | |
238 | static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | |
239 | { | |
240 | return wrmsr_safe(msr_no, l, h); | |
241 | } | |
242 | #endif /* CONFIG_SMP */ | |
751de83c | 243 | #endif /* __ASSEMBLY__ */ |
c210d249 GOC |
244 | #endif /* __KERNEL__ */ |
245 | ||
be7baf80 | 246 | |
1965aae3 | 247 | #endif /* _ASM_X86_MSR_H */ |