ia64/xen-unstable

view xen/include/asm-x86/msr.h @ 15812:86a154e1ef5d

[HVM] Shadow: don't shadow the p2m table.
For HVM vcpus with paging disabled, we used to shadow the p2m table,
and skip the p2m lookup to go from gfn to mfn. Instead, we now
provide a simple pagetable that gives a one-to-one mapping of 4GB, and
shadow that, making the translations from gfn to mfn via the p2m.
This removes the paging-disabled special-case code from the shadow
fault handler, and allows us to expand the p2m interface, since all HVM
translations now go through the same p2m lookups.
Signed-off-by: Tim Deegan <Tim.Deegan@xensource.com>
author Tim Deegan <Tim.Deegan@xensource.com>
date Fri Aug 31 11:06:22 2007 +0100 (2007-08-31)
parents 9554ec3e27cd
children 2477e94450aa
line source
1 #ifndef __ASM_MSR_H
2 #define __ASM_MSR_H
4 #ifndef __ASSEMBLY__
6 #include <xen/smp.h>
7 #include <xen/percpu.h>
9 #define rdmsr(msr,val1,val2) \
10 __asm__ __volatile__("rdmsr" \
11 : "=a" (val1), "=d" (val2) \
12 : "c" (msr))
14 #define rdmsrl(msr,val) do { unsigned long a__,b__; \
15 __asm__ __volatile__("rdmsr" \
16 : "=a" (a__), "=d" (b__) \
17 : "c" (msr)); \
18 val = a__ | ((u64)b__<<32); \
19 } while(0);
21 #define wrmsr(msr,val1,val2) \
22 __asm__ __volatile__("wrmsr" \
23 : /* no outputs */ \
24 : "c" (msr), "a" (val1), "d" (val2))
26 static inline void wrmsrl(unsigned int msr, __u64 val)
27 {
28 __u32 lo, hi;
29 lo = (__u32)val;
30 hi = (__u32)(val >> 32);
31 wrmsr(msr, lo, hi);
32 }
34 /* rdmsr with exception handling */
35 #define rdmsr_safe(msr,val1,val2) ({\
36 int _rc; \
37 __asm__ __volatile__( \
38 "1: rdmsr\n2:\n" \
39 ".section .fixup,\"ax\"\n" \
40 "3: movl %5,%2\n; jmp 2b\n" \
41 ".previous\n" \
42 ".section __ex_table,\"a\"\n" \
43 " "__FIXUP_ALIGN"\n" \
44 " "__FIXUP_WORD" 1b,3b\n" \
45 ".previous\n" \
46 : "=a" (val1), "=d" (val2), "=&r" (_rc) \
47 : "c" (msr), "2" (0), "i" (-EFAULT)); \
48 _rc; })
50 /* wrmsr with exception handling */
51 #define wrmsr_safe(msr,val1,val2) ({\
52 int _rc; \
53 __asm__ __volatile__( \
54 "1: wrmsr\n2:\n" \
55 ".section .fixup,\"ax\"\n" \
56 "3: movl %5,%0\n; jmp 2b\n" \
57 ".previous\n" \
58 ".section __ex_table,\"a\"\n" \
59 " "__FIXUP_ALIGN"\n" \
60 " "__FIXUP_WORD" 1b,3b\n" \
61 ".previous\n" \
62 : "=&r" (_rc) \
63 : "c" (msr), "a" (val1), "d" (val2), "0" (0), "i" (-EFAULT)); \
64 _rc; })
66 #define rdtsc(low,high) \
67 __asm__ __volatile__("rdtsc" : "=a" (low), "=d" (high))
69 #define rdtscl(low) \
70 __asm__ __volatile__("rdtsc" : "=a" (low) : : "edx")
72 #if defined(__i386__)
73 #define rdtscll(val) \
74 __asm__ __volatile__("rdtsc" : "=A" (val))
75 #elif defined(__x86_64__)
76 #define rdtscll(val) do { \
77 unsigned int a,d; \
78 asm volatile("rdtsc" : "=a" (a), "=d" (d)); \
79 (val) = ((unsigned long)a) | (((unsigned long)d)<<32); \
80 } while(0)
81 #endif
83 #define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
85 #define rdpmc(counter,low,high) \
86 __asm__ __volatile__("rdpmc" \
87 : "=a" (low), "=d" (high) \
88 : "c" (counter))
90 #endif /* !__ASSEMBLY__ */
92 /* symbolic names for some interesting MSRs */
93 /* Intel defined MSRs. */
94 #define MSR_IA32_P5_MC_ADDR 0
95 #define MSR_IA32_P5_MC_TYPE 1
96 #define MSR_IA32_TIME_STAMP_COUNTER 0x10
97 #define MSR_IA32_PLATFORM_ID 0x17
98 #define MSR_IA32_EBL_CR_POWERON 0x2a
99 #define MSR_IA32_EBC_FREQUENCY_ID 0x2c
101 #define MSR_IA32_APICBASE 0x1b
102 #define MSR_IA32_APICBASE_BSP (1<<8)
103 #define MSR_IA32_APICBASE_ENABLE (1<<11)
104 #define MSR_IA32_APICBASE_BASE (0xfffff<<12)
106 #define MSR_IA32_UCODE_WRITE 0x79
107 #define MSR_IA32_UCODE_REV 0x8b
109 #define MSR_P6_PERFCTR0 0xc1
110 #define MSR_P6_PERFCTR1 0xc2
112 /* MSRs & bits used for VMX enabling */
113 #define MSR_IA32_VMX_BASIC 0x480
114 #define MSR_IA32_VMX_PINBASED_CTLS 0x481
115 #define MSR_IA32_VMX_PROCBASED_CTLS 0x482
116 #define MSR_IA32_VMX_EXIT_CTLS 0x483
117 #define MSR_IA32_VMX_ENTRY_CTLS 0x484
118 #define MSR_IA32_VMX_MISC 0x485
119 #define MSR_IA32_VMX_CR0_FIXED0 0x486
120 #define MSR_IA32_VMX_CR0_FIXED1 0x487
121 #define MSR_IA32_VMX_CR4_FIXED0 0x488
122 #define MSR_IA32_VMX_CR4_FIXED1 0x489
123 #define MSR_IA32_VMX_PROCBASED_CTLS2 0x48b
124 #define IA32_FEATURE_CONTROL_MSR 0x3a
125 #define IA32_FEATURE_CONTROL_MSR_LOCK 0x0001
126 #define IA32_FEATURE_CONTROL_MSR_ENABLE_VMXON_INSIDE_SMX 0x0002
127 #define IA32_FEATURE_CONTROL_MSR_ENABLE_VMXON_OUTSIDE_SMX 0x0004
128 #define IA32_FEATURE_CONTROL_MSR_SENTER_PARAM_CTL 0x7f00
129 #define IA32_FEATURE_CONTROL_MSR_ENABLE_SENTER 0x8000
131 /* AMD/K8 specific MSRs */
132 #define MSR_EFER 0xc0000080 /* extended feature register */
133 #define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */
134 #define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */
135 #define MSR_CSTAR 0xc0000083 /* compatibility mode SYSCALL target */
136 #define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */
137 #define MSR_FS_BASE 0xc0000100 /* 64bit FS base */
138 #define MSR_GS_BASE 0xc0000101 /* 64bit GS base */
139 #define MSR_SHADOW_GS_BASE 0xc0000102 /* SwapGS GS shadow */
140 /* EFER bits: */
141 #define _EFER_SCE 0 /* SYSCALL/SYSRET */
142 #define _EFER_LME 8 /* Long mode enable */
143 #define _EFER_LMA 10 /* Long mode active (read-only) */
144 #define _EFER_NX 11 /* No execute enable */
145 #define _EFER_SVME 12
147 #define EFER_SCE (1<<_EFER_SCE)
148 #define EFER_LME (1<<_EFER_LME)
149 #define EFER_LMA (1<<_EFER_LMA)
150 #define EFER_NX (1<<_EFER_NX)
151 #define EFER_SVME (1<<_EFER_SVME)
153 #ifndef __ASSEMBLY__
155 DECLARE_PER_CPU(__u64, efer);
157 static inline __u64 read_efer(void)
158 {
159 if (!this_cpu(efer))
160 rdmsrl(MSR_EFER, this_cpu(efer));
161 return this_cpu(efer);
162 }
164 static inline void write_efer(__u64 val)
165 {
166 this_cpu(efer) = val;
167 wrmsrl(MSR_EFER, val);
168 }
170 #endif
172 /* Intel MSRs. Some also available on other CPUs */
173 #define MSR_IA32_PLATFORM_ID 0x17
175 #define MSR_MTRRcap 0x0fe
176 #define MSR_IA32_BBL_CR_CTL 0x119
178 #define MSR_IA32_SYSENTER_CS 0x174
179 #define MSR_IA32_SYSENTER_ESP 0x175
180 #define MSR_IA32_SYSENTER_EIP 0x176
182 #define MSR_IA32_MCG_CAP 0x179
183 #define MSR_IA32_MCG_STATUS 0x17a
184 #define MSR_IA32_MCG_CTL 0x17b
186 /* P4/Xeon+ specific */
187 #define MSR_IA32_MCG_EAX 0x180
188 #define MSR_IA32_MCG_EBX 0x181
189 #define MSR_IA32_MCG_ECX 0x182
190 #define MSR_IA32_MCG_EDX 0x183
191 #define MSR_IA32_MCG_ESI 0x184
192 #define MSR_IA32_MCG_EDI 0x185
193 #define MSR_IA32_MCG_EBP 0x186
194 #define MSR_IA32_MCG_ESP 0x187
195 #define MSR_IA32_MCG_EFLAGS 0x188
196 #define MSR_IA32_MCG_EIP 0x189
197 #define MSR_IA32_MCG_RESERVED 0x18A
199 #define MSR_P6_EVNTSEL0 0x186
200 #define MSR_P6_EVNTSEL1 0x187
202 #define MSR_IA32_PERF_STATUS 0x198
203 #define MSR_IA32_PERF_CTL 0x199
205 #define MSR_IA32_THERM_CONTROL 0x19a
206 #define MSR_IA32_THERM_INTERRUPT 0x19b
207 #define MSR_IA32_THERM_STATUS 0x19c
208 #define MSR_IA32_MISC_ENABLE 0x1a0
210 #define MSR_IA32_MISC_ENABLE_PERF_AVAIL (1<<7)
211 #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL (1<<11)
212 #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL (1<<12)
214 #define MSR_IA32_DEBUGCTLMSR 0x1d9
215 #define MSR_IA32_LASTBRANCHFROMIP 0x1db
216 #define MSR_IA32_LASTBRANCHTOIP 0x1dc
217 #define MSR_IA32_LASTINTFROMIP 0x1dd
218 #define MSR_IA32_LASTINTTOIP 0x1de
220 #define MSR_IA32_MC0_CTL 0x400
221 #define MSR_IA32_MC0_STATUS 0x401
222 #define MSR_IA32_MC0_ADDR 0x402
223 #define MSR_IA32_MC0_MISC 0x403
225 /* K8 Machine Check MSRs */
226 #define MSR_K8_MC1_CTL 0x404
227 #define MSR_K8_MC1_STATUS 0x405
228 #define MSR_K8_MC1_ADDR 0x406
229 #define MSR_K8_MC1_MISC 0x407
231 #define MSR_K8_MC2_CTL 0x408
232 #define MSR_K8_MC2_STATUS 0x409
233 #define MSR_K8_MC2_ADDR 0x40A
234 #define MSR_K8_MC2_MISC 0x40B
236 #define MSR_K8_MC3_CTL 0x40C
237 #define MSR_K8_MC3_STATUS 0x40D
238 #define MSR_K8_MC3_ADDR 0x40E
239 #define MSR_K8_MC3_MISC 0x40F
241 #define MSR_K8_MC4_CTL 0x410
242 #define MSR_K8_MC4_STATUS 0x411
243 #define MSR_K8_MC4_ADDR 0x412
244 #define MSR_K8_MC4_MISC 0x413
246 #define MSR_K8_MC5_CTL 0x414
247 #define MSR_K8_MC5_STATUS 0x415
248 #define MSR_K8_MC5_ADDR 0x416
249 #define MSR_K8_MC5_MISC 0x417
251 /* Pentium IV performance counter MSRs */
252 #define MSR_P4_BPU_PERFCTR0 0x300
253 #define MSR_P4_BPU_PERFCTR1 0x301
254 #define MSR_P4_BPU_PERFCTR2 0x302
255 #define MSR_P4_BPU_PERFCTR3 0x303
256 #define MSR_P4_MS_PERFCTR0 0x304
257 #define MSR_P4_MS_PERFCTR1 0x305
258 #define MSR_P4_MS_PERFCTR2 0x306
259 #define MSR_P4_MS_PERFCTR3 0x307
260 #define MSR_P4_FLAME_PERFCTR0 0x308
261 #define MSR_P4_FLAME_PERFCTR1 0x309
262 #define MSR_P4_FLAME_PERFCTR2 0x30a
263 #define MSR_P4_FLAME_PERFCTR3 0x30b
264 #define MSR_P4_IQ_PERFCTR0 0x30c
265 #define MSR_P4_IQ_PERFCTR1 0x30d
266 #define MSR_P4_IQ_PERFCTR2 0x30e
267 #define MSR_P4_IQ_PERFCTR3 0x30f
268 #define MSR_P4_IQ_PERFCTR4 0x310
269 #define MSR_P4_IQ_PERFCTR5 0x311
270 #define MSR_P4_BPU_CCCR0 0x360
271 #define MSR_P4_BPU_CCCR1 0x361
272 #define MSR_P4_BPU_CCCR2 0x362
273 #define MSR_P4_BPU_CCCR3 0x363
274 #define MSR_P4_MS_CCCR0 0x364
275 #define MSR_P4_MS_CCCR1 0x365
276 #define MSR_P4_MS_CCCR2 0x366
277 #define MSR_P4_MS_CCCR3 0x367
278 #define MSR_P4_FLAME_CCCR0 0x368
279 #define MSR_P4_FLAME_CCCR1 0x369
280 #define MSR_P4_FLAME_CCCR2 0x36a
281 #define MSR_P4_FLAME_CCCR3 0x36b
282 #define MSR_P4_IQ_CCCR0 0x36c
283 #define MSR_P4_IQ_CCCR1 0x36d
284 #define MSR_P4_IQ_CCCR2 0x36e
285 #define MSR_P4_IQ_CCCR3 0x36f
286 #define MSR_P4_IQ_CCCR4 0x370
287 #define MSR_P4_IQ_CCCR5 0x371
288 #define MSR_P4_ALF_ESCR0 0x3ca
289 #define MSR_P4_ALF_ESCR1 0x3cb
290 #define MSR_P4_BPU_ESCR0 0x3b2
291 #define MSR_P4_BPU_ESCR1 0x3b3
292 #define MSR_P4_BSU_ESCR0 0x3a0
293 #define MSR_P4_BSU_ESCR1 0x3a1
294 #define MSR_P4_CRU_ESCR0 0x3b8
295 #define MSR_P4_CRU_ESCR1 0x3b9
296 #define MSR_P4_CRU_ESCR2 0x3cc
297 #define MSR_P4_CRU_ESCR3 0x3cd
298 #define MSR_P4_CRU_ESCR4 0x3e0
299 #define MSR_P4_CRU_ESCR5 0x3e1
300 #define MSR_P4_DAC_ESCR0 0x3a8
301 #define MSR_P4_DAC_ESCR1 0x3a9
302 #define MSR_P4_FIRM_ESCR0 0x3a4
303 #define MSR_P4_FIRM_ESCR1 0x3a5
304 #define MSR_P4_FLAME_ESCR0 0x3a6
305 #define MSR_P4_FLAME_ESCR1 0x3a7
306 #define MSR_P4_FSB_ESCR0 0x3a2
307 #define MSR_P4_FSB_ESCR1 0x3a3
308 #define MSR_P4_IQ_ESCR0 0x3ba
309 #define MSR_P4_IQ_ESCR1 0x3bb
310 #define MSR_P4_IS_ESCR0 0x3b4
311 #define MSR_P4_IS_ESCR1 0x3b5
312 #define MSR_P4_ITLB_ESCR0 0x3b6
313 #define MSR_P4_ITLB_ESCR1 0x3b7
314 #define MSR_P4_IX_ESCR0 0x3c8
315 #define MSR_P4_IX_ESCR1 0x3c9
316 #define MSR_P4_MOB_ESCR0 0x3aa
317 #define MSR_P4_MOB_ESCR1 0x3ab
318 #define MSR_P4_MS_ESCR0 0x3c0
319 #define MSR_P4_MS_ESCR1 0x3c1
320 #define MSR_P4_PMH_ESCR0 0x3ac
321 #define MSR_P4_PMH_ESCR1 0x3ad
322 #define MSR_P4_RAT_ESCR0 0x3bc
323 #define MSR_P4_RAT_ESCR1 0x3bd
324 #define MSR_P4_SAAT_ESCR0 0x3ae
325 #define MSR_P4_SAAT_ESCR1 0x3af
326 #define MSR_P4_SSU_ESCR0 0x3be
327 #define MSR_P4_SSU_ESCR1 0x3bf /* guess: not defined in manual */
328 #define MSR_P4_TBPU_ESCR0 0x3c2
329 #define MSR_P4_TBPU_ESCR1 0x3c3
330 #define MSR_P4_TC_ESCR0 0x3c4
331 #define MSR_P4_TC_ESCR1 0x3c5
332 #define MSR_P4_U2L_ESCR0 0x3b0
333 #define MSR_P4_U2L_ESCR1 0x3b1
335 #define MSR_K6_EFER 0xC0000080
336 #define MSR_K6_STAR 0xC0000081
337 #define MSR_K6_WHCR 0xC0000082
338 #define MSR_K6_UWCCR 0xC0000085
339 #define MSR_K6_EPMR 0xC0000086
340 #define MSR_K6_PSOR 0xC0000087
341 #define MSR_K6_PFIR 0xC0000088
343 #define MSR_K7_EVNTSEL0 0xC0010000
344 #define MSR_K7_EVNTSEL1 0xC0010001
345 #define MSR_K7_EVNTSEL2 0xC0010002
346 #define MSR_K7_EVNTSEL3 0xC0010003
347 #define MSR_K7_PERFCTR0 0xC0010004
348 #define MSR_K7_PERFCTR1 0xC0010005
349 #define MSR_K7_PERFCTR2 0xC0010006
350 #define MSR_K7_PERFCTR3 0xC0010007
351 #define MSR_K7_HWCR 0xC0010015
352 #define MSR_K7_CLK_CTL 0xC001001b
353 #define MSR_K7_FID_VID_CTL 0xC0010041
354 #define MSR_K7_FID_VID_STATUS 0xC0010042
356 #define MSR_K8_TOP_MEM1 0xC001001A
357 #define MSR_K8_TOP_MEM2 0xC001001D
358 #define MSR_K8_SYSCFG 0xC0010010
359 #define MSR_K8_HWCR 0xC0010015
360 #define MSR_K8_VM_CR 0xC0010114
361 #define MSR_K8_VM_HSAVE_PA 0xC0010117
363 /* MSR_K8_VM_CR bits: */
364 #define _K8_VMCR_SVME_DISABLE 4
365 #define K8_VMCR_SVME_DISABLE (1 << _K8_VMCR_SVME_DISABLE)
367 /* Centaur-Hauls/IDT defined MSRs. */
368 #define MSR_IDT_FCR1 0x107
369 #define MSR_IDT_FCR2 0x108
370 #define MSR_IDT_FCR3 0x109
371 #define MSR_IDT_FCR4 0x10a
373 #define MSR_IDT_MCR0 0x110
374 #define MSR_IDT_MCR1 0x111
375 #define MSR_IDT_MCR2 0x112
376 #define MSR_IDT_MCR3 0x113
377 #define MSR_IDT_MCR4 0x114
378 #define MSR_IDT_MCR5 0x115
379 #define MSR_IDT_MCR6 0x116
380 #define MSR_IDT_MCR7 0x117
381 #define MSR_IDT_MCR_CTRL 0x120
383 /* VIA Cyrix defined MSRs*/
384 #define MSR_VIA_FCR 0x1107
385 #define MSR_VIA_LONGHAUL 0x110a
386 #define MSR_VIA_RNG 0x110b
387 #define MSR_VIA_BCR2 0x1147
389 /* Transmeta defined MSRs */
390 #define MSR_TMTA_LONGRUN_CTRL 0x80868010
391 #define MSR_TMTA_LONGRUN_FLAGS 0x80868011
392 #define MSR_TMTA_LRTI_READOUT 0x80868018
393 #define MSR_TMTA_LRTI_VOLT_MHZ 0x8086801a
395 #endif /* __ASM_MSR_H */