ia64/xen-unstable

view xen/arch/x86/x86_64/compat/entry.S @ 13792:56377f5ce588

[XEN] Fix dom/sysctl structure alignement.

Versions of gcc up to and including 3.3 appear to ignore
__attribute__((align(x))) when applied to typedefs or to members of
anonymous unions. Workaround this by explicitly aligning each
definition of a 64 bit guest handle.

Tidy up x86_64 compat entry.S a bit.

Signed-off-by: Ian Campbell <ian.campbell@xensource.com>
author Ian Campbell <ian.campbell@xensource.com>
date Wed Jan 31 16:54:21 2007 +0000 (2007-01-31)
parents 2f8a7e5fd8ba
children 3050c8339da6
line source
1 /*
2 * Compatibility hypercall routines.
3 */
5 #include <asm/desc.h>
7 .text
9 ENTRY(compat_hypercall)
10 pushq $0
11 movl $TRAP_syscall,4(%rsp)
12 SAVE_ALL
13 GET_CURRENT(%rbx)
15 cmpl $NR_hypercalls,%eax
16 jae compat_bad_hypercall
17 #ifndef NDEBUG
18 /* Deliberately corrupt parameter regs not used by this hypercall. */
19 pushq UREGS_rbx(%rsp); pushq %rcx; pushq %rdx; pushq %rsi; pushq %rdi
20 pushq UREGS_rbp+5*8(%rsp)
21 leaq compat_hypercall_args_table(%rip),%r10
22 movq $6,%rcx
23 subb (%r10,%rax,1),%cl
24 movq %rsp,%rdi
25 movl $0xDEADBEEF,%eax
26 rep stosq
27 popq %r8 ; popq %r9 ; xchgl %r8d,%r9d /* Args 5&6: zero extend */
28 popq %rdx; popq %rcx; xchgl %edx,%ecx /* Args 3&4: zero extend */
29 popq %rdi; popq %rsi; xchgl %edi,%esi /* Args 1&2: zero extend */
30 movl UREGS_rax(%rsp),%eax
31 pushq %rax
32 pushq UREGS_rip+8(%rsp)
33 #else
34 /* Relocate argument registers and zero-extend to 64 bits. */
35 movl %eax,%eax /* Hypercall # */
36 xchgl %ecx,%esi /* Arg 2, Arg 4 */
37 movl %edx,%edx /* Arg 3 */
38 movl %edi,%r8d /* Arg 5 */
39 movl %ebp,%r9d /* Arg 6 */
40 movl UREGS_rbx(%rsp),%edi /* Arg 1 */
41 #endif
42 leaq compat_hypercall_table(%rip),%r10
43 PERFC_INCR(PERFC_hypercalls, %rax)
44 callq *(%r10,%rax,8)
45 #ifndef NDEBUG
46 /* Deliberately corrupt parameter regs used by this hypercall. */
47 popq %r10 # Shadow RIP
48 cmpq %r10,UREGS_rip+8(%rsp)
49 popq %rcx # Shadow hypercall index
50 jne compat_skip_clobber /* If RIP has changed then don't clobber. */
51 leaq compat_hypercall_args_table(%rip),%r10
52 movb (%r10,%rcx,1),%cl
53 movl $0xDEADBEEF,%r10d
54 testb %cl,%cl; jz compat_skip_clobber; movl %r10d,UREGS_rbx(%rsp)
55 cmpb $2, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rcx(%rsp)
56 cmpb $3, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rdx(%rsp)
57 cmpb $4, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rsi(%rsp)
58 cmpb $5, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rdi(%rsp)
59 cmpb $6, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rbp(%rsp)
60 compat_skip_clobber:
61 #endif
62 movl %eax,UREGS_rax(%rsp) # save the return value
64 /* %rbx: struct vcpu */
65 compat_test_all_events:
66 cli # tests must not race interrupts
67 /*compat_test_softirqs:*/
68 movl VCPU_processor(%rbx),%eax
69 shlq $IRQSTAT_shift,%rax
70 leaq irq_stat(%rip),%rcx
71 testl $~0,(%rcx,%rax,1)
72 jnz compat_process_softirqs
73 btrq $_VCPUF_nmi_pending,VCPU_flags(%rbx)
74 jc compat_process_nmi
75 compat_test_guest_events:
76 movq VCPU_vcpu_info(%rbx),%rax
77 testb $0xFF,COMPAT_VCPUINFO_upcall_mask(%rax)
78 jnz compat_restore_all_guest
79 testb $0xFF,COMPAT_VCPUINFO_upcall_pending(%rax)
80 jz compat_restore_all_guest
81 /*compat_process_guest_events:*/
82 sti
83 leaq VCPU_trap_bounce(%rbx),%rdx
84 movl VCPU_event_addr(%rbx),%eax
85 movl %eax,TRAPBOUNCE_eip(%rdx)
86 movl VCPU_event_sel(%rbx),%eax
87 movl %eax,TRAPBOUNCE_cs(%rdx)
88 movw $TBF_INTERRUPT,TRAPBOUNCE_flags(%rdx)
89 call compat_create_bounce_frame
90 jmp compat_test_all_events
92 ALIGN
93 /* %rbx: struct vcpu */
94 compat_process_softirqs:
95 sti
96 call do_softirq
97 jmp compat_test_all_events
99 ALIGN
100 /* %rbx: struct vcpu */
101 compat_process_nmi:
102 movl VCPU_nmi_addr(%rbx),%eax
103 testl %eax,%eax
104 jz compat_test_all_events
105 btsq $_VCPUF_nmi_masked,VCPU_flags(%rbx)
106 jc 1f
107 sti
108 leaq VCPU_trap_bounce(%rbx),%rdx
109 movl %eax,TRAPBOUNCE_eip(%rdx)
110 movl $FLAT_COMPAT_KERNEL_CS,TRAPBOUNCE_cs(%rdx)
111 movw $TBF_INTERRUPT,TRAPBOUNCE_flags(%rdx)
112 call compat_create_bounce_frame
113 jmp compat_test_all_events
114 1:
115 btsq $_VCPUF_nmi_pending,VCPU_flags(%rbx)
116 jmp compat_test_guest_events
118 compat_bad_hypercall:
119 movl $-ENOSYS,UREGS_rax(%rsp)
120 jmp compat_test_all_events
122 /* %rbx: struct vcpu, interrupts disabled */
123 compat_restore_all_guest:
124 RESTORE_ALL
125 addq $8,%rsp
126 CFLT0: iretq
128 .section .fixup,"ax"
129 CFIX0: popq -15*8-8(%rsp) # error_code/entry_vector
130 SAVE_ALL # 15*8 bytes pushed
131 movq -8(%rsp),%rsi # error_code/entry_vector
132 sti # after stack abuse (-1024(%rsp))
133 pushq $__HYPERVISOR_DS # SS
134 leaq 8(%rsp),%rax
135 pushq %rax # RSP
136 pushfq # RFLAGS
137 pushq $__HYPERVISOR_CS # CS
138 leaq CDBLFLT0(%rip),%rax
139 pushq %rax # RIP
140 pushq %rsi # error_code/entry_vector
141 jmp handle_exception
142 CDBLFLT0:GET_CURRENT(%rbx)
143 jmp compat_test_all_events
144 compat_failsafe_callback:
145 GET_CURRENT(%rbx)
146 leaq VCPU_trap_bounce(%rbx),%rdx
147 movl VCPU_failsafe_addr(%rbx),%eax
148 movl %eax,TRAPBOUNCE_eip(%rdx)
149 movl VCPU_failsafe_sel(%rbx),%eax
150 movl %eax,TRAPBOUNCE_cs(%rdx)
151 movw $TBF_FAILSAFE,TRAPBOUNCE_flags(%rdx)
152 btq $_VGCF_failsafe_disables_events,VCPU_guest_context_flags(%rbx)
153 jnc 1f
154 orw $TBF_INTERRUPT,TRAPBOUNCE_flags(%rdx)
155 1:
156 call compat_create_bounce_frame
157 jmp compat_test_all_events
158 .previous
159 .section __pre_ex_table,"a"
160 .quad CFLT0,CFIX0
161 .previous
162 .section __ex_table,"a"
163 .quad CDBLFLT0,compat_failsafe_callback
164 .previous
166 /* %rdx: trap_bounce, %rbx: struct vcpu */
167 compat_post_handle_exception:
168 testb $TBF_EXCEPTION,TRAPBOUNCE_flags(%rdx)
169 jz compat_test_all_events
170 call compat_create_bounce_frame
171 jmp compat_test_all_events
173 /* CREATE A BASIC EXCEPTION FRAME ON GUEST OS (RING-1) STACK: */
174 /* {[ERRCODE,] EIP, CS, EFLAGS, [ESP, SS]} */
175 /* %rdx: trap_bounce, %rbx: struct vcpu */
176 /* On return only %rbx is guaranteed non-clobbered. */
177 compat_create_bounce_frame:
178 mov %fs,%edi
179 testb $2,UREGS_cs+8(%rsp)
180 jz 1f
181 /* Push new frame at registered guest-OS stack base. */
182 movl VCPU_kernel_sp(%rbx),%esi
183 CFLT1: mov VCPU_kernel_ss(%rbx),%fs
184 subl $2*4,%esi
185 movl UREGS_rsp+8(%rsp),%eax
186 CFLT2: movl %eax,%fs:(%rsi)
187 movl UREGS_ss+8(%rsp),%eax
188 CFLT3: movl %eax,%fs:4(%rsi)
189 jmp 2f
190 1: /* In kernel context already: push new frame at existing %rsp. */
191 movl UREGS_rsp+8(%rsp),%esi
192 CFLT4: mov UREGS_ss+8(%rsp),%fs
193 2:
194 movb TRAPBOUNCE_flags(%rdx),%cl
195 subl $3*4,%esi
196 movq VCPU_vcpu_info(%rbx),%rax
197 pushq COMPAT_VCPUINFO_upcall_mask(%rax)
198 testb $TBF_INTERRUPT,%cl
199 setnz %ch # TBF_INTERRUPT -> set upcall mask
200 orb %ch,COMPAT_VCPUINFO_upcall_mask(%rax)
201 popq %rax
202 shll $16,%eax # Bits 16-23: saved_upcall_mask
203 movw UREGS_cs+8(%rsp),%ax # Bits 0-15: CS
204 CFLT5: movl %eax,%fs:4(%rsi) # CS / saved_upcall_mask
205 shrl $16,%eax
206 testb %al,%al # Bits 0-7: saved_upcall_mask
207 setz %ch # %ch == !saved_upcall_mask
208 movl UREGS_eflags+8(%rsp),%eax
209 andl $~X86_EFLAGS_IF,%eax
210 shlb $1,%ch # Bit 9 (EFLAGS.IF)
211 orb %ch,%ah # Fold EFLAGS.IF into %eax
212 CFLT6: movl %eax,%fs:2*4(%rsi) # EFLAGS
213 movl UREGS_rip+8(%rsp),%eax
214 CFLT7: movl %eax,%fs:(%rsi) # EIP
215 testb $TBF_EXCEPTION_ERRCODE,%cl
216 jz 1f
217 subl $4,%esi
218 movl TRAPBOUNCE_error_code(%rdx),%eax
219 CFLT8: movl %eax,%fs:(%rsi) # ERROR CODE
220 1:
221 testb $TBF_FAILSAFE,%cl
222 jz 2f
223 subl $4*4,%esi
224 movl %gs,%eax
225 CFLT9: movl %eax,%fs:3*4(%rsi) # GS
226 CFLT10: movl %edi,%fs:2*4(%rsi) # FS
227 movl %es,%eax
228 CFLT11: movl %eax,%fs:1*4(%rsi) # ES
229 movl %ds,%eax
230 CFLT12: movl %eax,%fs:0*4(%rsi) # DS
231 2:
232 /* Rewrite our stack frame and return to guest-OS mode. */
233 /* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
234 movl $TRAP_syscall,UREGS_entry_vector+8(%rsp)
235 andl $~(X86_EFLAGS_VM|X86_EFLAGS_RF|\
236 X86_EFLAGS_NT|X86_EFLAGS_TF),UREGS_eflags+8(%rsp)
237 mov %fs,UREGS_ss+8(%rsp)
238 movl %esi,UREGS_rsp+8(%rsp)
239 CFLT13: mov %edi,%fs
240 movzwl TRAPBOUNCE_cs(%rdx),%eax
241 /* Null selectors (0-3) are not allowed. */
242 testl $~3,%eax
243 jz domain_crash_synchronous
244 movl %eax,UREGS_cs+8(%rsp)
245 movl TRAPBOUNCE_eip(%rdx),%eax
246 movl %eax,UREGS_rip+8(%rsp)
247 movb $0,TRAPBOUNCE_flags(%rdx)
248 ret
249 .section .fixup,"ax"
250 CFIX13:
251 xorl %edi,%edi
252 jmp CFLT13
253 .previous
254 .section __ex_table,"a"
255 .quad CFLT1,domain_crash_synchronous , CFLT2,compat_crash_page_fault
256 .quad CFLT3,compat_crash_page_fault_4 , CFLT4,domain_crash_synchronous
257 .quad CFLT5,compat_crash_page_fault_4 , CFLT6,compat_crash_page_fault_8
258 .quad CFLT7,compat_crash_page_fault , CFLT8,compat_crash_page_fault
259 .quad CFLT9,compat_crash_page_fault_12, CFLT10,compat_crash_page_fault_8
260 .quad CFLT11,compat_crash_page_fault_4 , CFLT12,compat_crash_page_fault
261 .quad CFLT13,CFIX13
262 .previous
264 compat_crash_page_fault_12:
265 addl $4,%esi
266 compat_crash_page_fault_8:
267 addl $4,%esi
268 compat_crash_page_fault_4:
269 addl $4,%esi
270 compat_crash_page_fault:
271 CFLT14: mov %edi,%fs
272 movl %esi,%edi
273 call show_page_walk
274 jmp domain_crash_synchronous
275 .section .fixup,"ax"
276 CFIX14:
277 xorl %edi,%edi
278 jmp CFLT14
279 .previous
280 .section __ex_table,"a"
281 .quad CFLT14,CFIX14
282 .previous
284 .section .rodata, "a", @progbits
286 ENTRY(compat_hypercall_table)
287 .quad compat_set_trap_table /* 0 */
288 .quad do_mmu_update
289 .quad compat_set_gdt
290 .quad do_stack_switch
291 .quad compat_set_callbacks
292 .quad do_fpu_taskswitch /* 5 */
293 .quad do_sched_op_compat
294 .quad compat_platform_op
295 .quad do_set_debugreg
296 .quad do_get_debugreg
297 .quad compat_update_descriptor /* 10 */
298 .quad compat_ni_hypercall
299 .quad compat_memory_op
300 .quad compat_multicall
301 .quad compat_update_va_mapping
302 .quad compat_set_timer_op /* 15 */
303 .quad do_event_channel_op_compat
304 .quad compat_xen_version
305 .quad do_console_io
306 .quad compat_physdev_op_compat
307 .quad compat_grant_table_op /* 20 */
308 .quad compat_vm_assist
309 .quad compat_update_va_mapping_otherdomain
310 .quad compat_iret
311 .quad compat_vcpu_op
312 .quad compat_ni_hypercall /* 25 */
313 .quad compat_mmuext_op
314 .quad compat_acm_op
315 .quad compat_nmi_op
316 .quad compat_sched_op
317 .quad compat_callback_op /* 30 */
318 .quad compat_xenoprof_op
319 .quad do_event_channel_op
320 .quad compat_physdev_op
321 .quad do_hvm_op
322 .quad do_sysctl /* 35 */
323 .quad do_domctl
324 .quad compat_kexec_op
325 .rept NR_hypercalls-((.-compat_hypercall_table)/8)
326 .quad compat_ni_hypercall
327 .endr
329 ENTRY(compat_hypercall_args_table)
330 .byte 1 /* compat_set_trap_table */ /* 0 */
331 .byte 4 /* compat_mmu_update */
332 .byte 2 /* compat_set_gdt */
333 .byte 2 /* compat_stack_switch */
334 .byte 4 /* compat_set_callbacks */
335 .byte 1 /* compat_fpu_taskswitch */ /* 5 */
336 .byte 2 /* compat_sched_op_compat */
337 .byte 1 /* compat_platform_op */
338 .byte 2 /* compat_set_debugreg */
339 .byte 1 /* compat_get_debugreg */
340 .byte 4 /* compat_update_descriptor */ /* 10 */
341 .byte 0 /* compat_ni_hypercall */
342 .byte 2 /* compat_memory_op */
343 .byte 2 /* compat_multicall */
344 .byte 4 /* compat_update_va_mapping */
345 .byte 2 /* compat_set_timer_op */ /* 15 */
346 .byte 1 /* compat_event_channel_op_compat */
347 .byte 2 /* compat_xen_version */
348 .byte 3 /* compat_console_io */
349 .byte 1 /* compat_physdev_op_compat */
350 .byte 3 /* compat_grant_table_op */ /* 20 */
351 .byte 2 /* compat_vm_assist */
352 .byte 5 /* compat_update_va_mapping_otherdomain */
353 .byte 0 /* compat_iret */
354 .byte 3 /* compat_vcpu_op */
355 .byte 0 /* compat_ni_hypercall */ /* 25 */
356 .byte 4 /* compat_mmuext_op */
357 .byte 1 /* compat_acm_op */
358 .byte 2 /* compat_nmi_op */
359 .byte 2 /* compat_sched_op */
360 .byte 2 /* compat_callback_op */ /* 30 */
361 .byte 2 /* compat_xenoprof_op */
362 .byte 2 /* compat_event_channel_op */
363 .byte 2 /* compat_physdev_op */
364 .byte 2 /* do_hvm_op */
365 .byte 1 /* do_sysctl */ /* 35 */
366 .byte 1 /* do_domctl */
367 .byte 2 /* compat_kexec_op */
368 .rept NR_hypercalls-(.-compat_hypercall_args_table)
369 .byte 0 /* compat_ni_hypercall */
370 .endr