Commit | Line | Data |
---|---|---|
be7baf80 TG |
1 | #ifndef __ASM_X86_MSR_H_ |
2 | #define __ASM_X86_MSR_H_ | |
3 | ||
4 | #include <asm/msr-index.h> | |
5 | ||
d43a3312 MF |
6 | #ifndef __ASSEMBLY__ |
7 | # include <linux/types.h> | |
8 | #endif | |
9 | ||
8f12dea6 GOC |
10 | #ifdef __KERNEL__ |
11 | #ifndef __ASSEMBLY__ | |
c210d249 GOC |
12 | |
13 | #include <asm/asm.h> | |
14 | #include <asm/errno.h> | |
15 | ||
1e160cc3 | 16 | static inline unsigned long long native_read_tscp(unsigned int *aux) |
8f12dea6 GOC |
17 | { |
18 | unsigned long low, high; | |
19 | asm volatile (".byte 0x0f,0x01,0xf9" | |
20 | : "=a" (low), "=d" (high), "=c" (*aux)); | |
21 | return low | ((u64)high >> 32); | |
22 | } | |
23 | ||
c210d249 GOC |
24 | /* |
25 | * i386 calling convention returns 64-bit value in edx:eax, while | |
26 | * x86_64 returns at rax. Also, the "A" constraint does not really | |
27 | * mean rdx:rax in x86_64, so we need specialized behaviour for each | |
28 | * architecture | |
29 | */ | |
30 | #ifdef CONFIG_X86_64 | |
31 | #define DECLARE_ARGS(val, low, high) unsigned low, high | |
32 | #define EAX_EDX_VAL(val, low, high) (low | ((u64)(high) << 32)) | |
33 | #define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high) | |
34 | #define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high) | |
35 | #else | |
36 | #define DECLARE_ARGS(val, low, high) unsigned long long val | |
37 | #define EAX_EDX_VAL(val, low, high) (val) | |
38 | #define EAX_EDX_ARGS(val, low, high) "A" (val) | |
39 | #define EAX_EDX_RET(val, low, high) "=A" (val) | |
8f12dea6 GOC |
40 | #endif |
41 | ||
be7baf80 TG |
42 | static inline unsigned long long native_read_msr(unsigned int msr) |
43 | { | |
c210d249 | 44 | DECLARE_ARGS(val, low, high); |
be7baf80 | 45 | |
c210d249 GOC |
46 | asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr)); |
47 | return EAX_EDX_VAL(val, low, high); | |
be7baf80 TG |
48 | } |
49 | ||
50 | static inline unsigned long long native_read_msr_safe(unsigned int msr, | |
51 | int *err) | |
52 | { | |
c210d249 | 53 | DECLARE_ARGS(val, low, high); |
be7baf80 | 54 | |
56ec1ddc | 55 | asm volatile("2: rdmsr ; xor %0,%0\n" |
be7baf80 TG |
56 | "1:\n\t" |
57 | ".section .fixup,\"ax\"\n\t" | |
56ec1ddc | 58 | "3: mov %3,%0 ; jmp 1b\n\t" |
be7baf80 | 59 | ".previous\n\t" |
7d24a827 | 60 | _ASM_EXTABLE(2b,3b) |
c210d249 | 61 | : "=r" (*err), EAX_EDX_RET(val, low, high) |
be7baf80 | 62 | : "c" (msr), "i" (-EFAULT)); |
c210d249 | 63 | return EAX_EDX_VAL(val, low, high); |
be7baf80 TG |
64 | } |
65 | ||
c9dcda5c GOC |
66 | static inline void native_write_msr(unsigned int msr, |
67 | unsigned low, unsigned high) | |
be7baf80 | 68 | { |
c9dcda5c | 69 | asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high)); |
be7baf80 TG |
70 | } |
71 | ||
72 | static inline int native_write_msr_safe(unsigned int msr, | |
c9dcda5c | 73 | unsigned low, unsigned high) |
be7baf80 TG |
74 | { |
75 | int err; | |
56ec1ddc | 76 | asm volatile("2: wrmsr ; xor %0,%0\n" |
be7baf80 TG |
77 | "1:\n\t" |
78 | ".section .fixup,\"ax\"\n\t" | |
56ec1ddc | 79 | "3: mov %4,%0 ; jmp 1b\n\t" |
be7baf80 | 80 | ".previous\n\t" |
7d24a827 | 81 | _ASM_EXTABLE(2b,3b) |
be7baf80 | 82 | : "=a" (err) |
c9dcda5c | 83 | : "c" (msr), "0" (low), "d" (high), |
be7baf80 TG |
84 | "i" (-EFAULT)); |
85 | return err; | |
86 | } | |
87 | ||
cdc7957d | 88 | extern unsigned long long native_read_tsc(void); |
be7baf80 | 89 | |
92767af0 IM |
90 | static __always_inline unsigned long long __native_read_tsc(void) |
91 | { | |
92 | DECLARE_ARGS(val, low, high); | |
93 | ||
94 | rdtsc_barrier(); | |
95 | asm volatile("rdtsc" : EAX_EDX_RET(val, low, high)); | |
96 | rdtsc_barrier(); | |
97 | ||
98 | return EAX_EDX_VAL(val, low, high); | |
99 | } | |
100 | ||
b8d1fae7 | 101 | static inline unsigned long long native_read_pmc(int counter) |
be7baf80 | 102 | { |
c210d249 GOC |
103 | DECLARE_ARGS(val, low, high); |
104 | ||
105 | asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter)); | |
106 | return EAX_EDX_VAL(val, low, high); | |
be7baf80 TG |
107 | } |
108 | ||
109 | #ifdef CONFIG_PARAVIRT | |
110 | #include <asm/paravirt.h> | |
96a388de | 111 | #else |
be7baf80 TG |
112 | #include <linux/errno.h> |
113 | /* | |
114 | * Access to machine-specific registers (available on 586 and better only) | |
115 | * Note: the rd* operations modify the parameters directly (without using | |
116 | * pointer indirection), this allows gcc to optimize better | |
117 | */ | |
118 | ||
119 | #define rdmsr(msr,val1,val2) \ | |
120 | do { \ | |
121 | u64 __val = native_read_msr(msr); \ | |
122 | (val1) = (u32)__val; \ | |
123 | (val2) = (u32)(__val >> 32); \ | |
124 | } while(0) | |
125 | ||
c9dcda5c | 126 | static inline void wrmsr(unsigned msr, unsigned low, unsigned high) |
be7baf80 | 127 | { |
c9dcda5c | 128 | native_write_msr(msr, low, high); |
be7baf80 TG |
129 | } |
130 | ||
131 | #define rdmsrl(msr,val) \ | |
132 | ((val) = native_read_msr(msr)) | |
133 | ||
c210d249 GOC |
134 | #define wrmsrl(msr, val) \ |
135 | native_write_msr(msr, (u32)((u64)(val)), (u32)((u64)(val) >> 32)) | |
be7baf80 TG |
136 | |
137 | /* wrmsr with exception handling */ | |
c9dcda5c | 138 | static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) |
be7baf80 | 139 | { |
c9dcda5c | 140 | return native_write_msr_safe(msr, low, high); |
be7baf80 TG |
141 | } |
142 | ||
143 | /* rdmsr with exception handling */ | |
144 | #define rdmsr_safe(msr,p1,p2) \ | |
145 | ({ \ | |
146 | int __err; \ | |
147 | u64 __val = native_read_msr_safe(msr, &__err); \ | |
148 | (*p1) = (u32)__val; \ | |
149 | (*p2) = (u32)(__val >> 32); \ | |
150 | __err; \ | |
151 | }) | |
152 | ||
153 | #define rdtscl(low) \ | |
154 | ((low) = (u32)native_read_tsc()) | |
155 | ||
156 | #define rdtscll(val) \ | |
157 | ((val) = native_read_tsc()) | |
158 | ||
be7baf80 TG |
159 | #define rdpmc(counter,low,high) \ |
160 | do { \ | |
b8d1fae7 | 161 | u64 _l = native_read_pmc(counter); \ |
be7baf80 TG |
162 | (low) = (u32)_l; \ |
163 | (high) = (u32)(_l >> 32); \ | |
164 | } while(0) | |
be7baf80 | 165 | |
c210d249 GOC |
166 | #define rdtscp(low, high, aux) \ |
167 | do { \ | |
168 | unsigned long long _val = native_read_tscp(&(aux)); \ | |
169 | (low) = (u32)_val; \ | |
170 | (high) = (u32)(_val >> 32); \ | |
171 | } while (0) | |
be7baf80 | 172 | |
c210d249 | 173 | #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux)) |
be7baf80 | 174 | |
c210d249 | 175 | #endif /* !CONFIG_PARAVIRT */ |
be7baf80 | 176 | |
be7baf80 | 177 | |
c210d249 | 178 | #define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32)) |
be7baf80 | 179 | |
be7baf80 TG |
180 | #define write_tsc(val1,val2) wrmsr(0x10, val1, val2) |
181 | ||
182 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, val, 0) | |
183 | ||
be7baf80 TG |
184 | #ifdef CONFIG_SMP |
185 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | |
186 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | |
187 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | |
188 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | |
189 | #else /* CONFIG_SMP */ | |
190 | static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | |
191 | { | |
192 | rdmsr(msr_no, *l, *h); | |
193 | } | |
194 | static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | |
195 | { | |
196 | wrmsr(msr_no, l, h); | |
197 | } | |
198 | static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | |
199 | { | |
200 | return rdmsr_safe(msr_no, l, h); | |
201 | } | |
202 | static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | |
203 | { | |
204 | return wrmsr_safe(msr_no, l, h); | |
205 | } | |
206 | #endif /* CONFIG_SMP */ | |
751de83c | 207 | #endif /* __ASSEMBLY__ */ |
c210d249 GOC |
208 | #endif /* __KERNEL__ */ |
209 | ||
be7baf80 | 210 | |
96a388de | 211 | #endif |