ia64/xen-unstable

view xen/include/asm-x86/x86_64/elf.h @ 15812:86a154e1ef5d

[HVM] Shadow: don't shadow the p2m table.
For HVM vcpus with paging disabled, we used to shadow the p2m table,
and skip the p2m lookup to go from gfn to mfn. Instead, we now
provide a simple pagetable that gives a one-to-one mapping of 4GB, and
shadow that, making the translations from gfn to mfn via the p2m.
This removes the paging-disabled special-case code from the shadow
fault handler, and allows us to expand the p2m interface, since all HVM
translations now go through the same p2m lookups.
Signed-off-by: Tim Deegan <Tim.Deegan@xensource.com>
author Tim Deegan <Tim.Deegan@xensource.com>
date Fri Aug 31 11:06:22 2007 +0100 (2007-08-31)
parents 995841d99780
children 4034317507de
line source
1 #ifndef __X86_64_ELF_H__
2 #define __X86_64_ELF_H__
4 #include <asm/processor.h>
6 typedef struct {
7 unsigned long r15;
8 unsigned long r14;
9 unsigned long r13;
10 unsigned long r12;
11 unsigned long rbp;
12 unsigned long rbx;
13 unsigned long r11;
14 unsigned long r10;
15 unsigned long r9;
16 unsigned long r8;
17 unsigned long rax;
18 unsigned long rcx;
19 unsigned long rdx;
20 unsigned long rsi;
21 unsigned long rdi;
22 unsigned long orig_rax;
23 unsigned long rip;
24 unsigned long cs;
25 unsigned long eflags;
26 unsigned long rsp;
27 unsigned long ss;
28 unsigned long thread_fs;
29 unsigned long thread_gs;
30 unsigned long ds;
31 unsigned long es;
32 unsigned long fs;
33 unsigned long gs;
34 } ELF_Gregset;
36 static inline void elf_core_save_regs(ELF_Gregset *core_regs,
37 crash_xen_core_t *xen_core_regs)
38 {
39 unsigned long tmp;
41 asm volatile("movq %%r15,%0" : "=m"(core_regs->r15));
42 asm volatile("movq %%r14,%0" : "=m"(core_regs->r14));
43 asm volatile("movq %%r13,%0" : "=m"(core_regs->r13));
44 asm volatile("movq %%r12,%0" : "=m"(core_regs->r12));
45 asm volatile("movq %%rbp,%0" : "=m"(core_regs->rbp));
46 asm volatile("movq %%rbx,%0" : "=m"(core_regs->rbx));
47 asm volatile("movq %%r11,%0" : "=m"(core_regs->r11));
48 asm volatile("movq %%r10,%0" : "=m"(core_regs->r10));
49 asm volatile("movq %%r9,%0" : "=m"(core_regs->r9));
50 asm volatile("movq %%r8,%0" : "=m"(core_regs->r8));
51 asm volatile("movq %%rax,%0" : "=m"(core_regs->rax));
52 asm volatile("movq %%rcx,%0" : "=m"(core_regs->rcx));
53 asm volatile("movq %%rdx,%0" : "=m"(core_regs->rdx));
54 asm volatile("movq %%rsi,%0" : "=m"(core_regs->rsi));
55 asm volatile("movq %%rdi,%0" : "=m"(core_regs->rdi));
56 /* orig_rax not filled in for now */
57 core_regs->rip = (unsigned long)current_text_addr();
58 asm volatile("movl %%cs, %%eax;" :"=a"(core_regs->cs));
59 asm volatile("pushfq; popq %0" :"=m"(core_regs->eflags));
60 asm volatile("movq %%rsp,%0" : "=m"(core_regs->rsp));
61 asm volatile("movl %%ss, %%eax;" :"=a"(core_regs->ss));
62 /* thread_fs not filled in for now */
63 /* thread_gs not filled in for now */
64 asm volatile("movl %%ds, %%eax;" :"=a"(core_regs->ds));
65 asm volatile("movl %%es, %%eax;" :"=a"(core_regs->es));
66 asm volatile("movl %%fs, %%eax;" :"=a"(core_regs->fs));
67 asm volatile("movl %%gs, %%eax;" :"=a"(core_regs->gs));
69 asm volatile("mov %%cr0, %0" : "=r" (tmp) : );
70 xen_core_regs->cr0 = tmp;
72 asm volatile("mov %%cr2, %0" : "=r" (tmp) : );
73 xen_core_regs->cr2 = tmp;
75 asm volatile("mov %%cr3, %0" : "=r" (tmp) : );
76 xen_core_regs->cr3 = tmp;
78 asm volatile("mov %%cr4, %0" : "=r" (tmp) : );
79 xen_core_regs->cr4 = tmp;
80 }
82 #endif /* __X86_64_ELF_H__ */
84 /*
85 * Local variables:
86 * mode: C
87 * c-set-style: "BSD"
88 * c-basic-offset: 4
89 * tab-width: 4
90 * indent-tabs-mode: nil
91 * End:
92 */