ia64/xen-unstable

view xen/arch/x86/x86_64/compat/entry.S @ 13611:e68bf334ecb8

32-on-64: All argument registers must be zero extended to 64 bits
(%rdx was missed).
Signed-off-by: Keir Fraser <keir@xensource.com>
author kfraser@localhost.localdomain
date Wed Jan 24 18:20:24 2007 +0000 (2007-01-24)
parents 30af6cfdb05c
children 2a9b6b1f848f
line source
1 /*
2 * Compatibility hypercall routines.
3 */
5 #include <asm/desc.h>
7 .text
9 ENTRY(compat_hypercall)
10 pushq $0
11 movl $TRAP_syscall,4(%rsp)
12 SAVE_ALL
13 GET_CURRENT(%rbx)
15 cmpl $NR_hypercalls,%eax
16 jae compat_bad_hypercall
17 #ifndef NDEBUG
18 /* Deliberately corrupt parameter regs not used by this hypercall. */
19 pushq UREGS_rbx(%rsp); pushq %rcx; pushq %rdx; pushq %rsi; pushq %rdi
20 pushq UREGS_rbp+5*8(%rsp)
21 leaq compat_hypercall_args_table(%rip),%r10
22 movq $6,%rcx
23 subb (%r10,%rax,1),%cl
24 movq %rsp,%rdi
25 movl $0xDEADBEEF,%eax
26 rep stosq
27 popq %r9 ; popq %r8 ; popq %rcx; popq %rdx; popq %rsi; popq %rdi
28 movl UREGS_rax(%rsp),%eax
29 pushq %rax
30 pushq UREGS_rip+8(%rsp)
31 #else
32 /* Relocate argument registers and zero-extend to 64 bits. */
33 movl %eax,%eax /* Hypercall # */
34 movl UREGS_rbx(%rsp),%edi /* Arg 1 */
35 xchgl %ecx,%esi /* Arg 2, Arg 4 */
36 movl %edx,%edx /* Arg 3 */
37 movl %edi,%r8d /* Arg 5 */
38 movl %ebp,%r9d /* Arg 6 */
39 #endif
40 leaq compat_hypercall_table(%rip),%r10
41 PERFC_INCR(PERFC_hypercalls, %rax)
42 callq *(%r10,%rax,8)
43 #ifndef NDEBUG
44 /* Deliberately corrupt parameter regs used by this hypercall. */
45 popq %r10 # Shadow RIP
46 cmpq %r10,UREGS_rip+8(%rsp)
47 popq %rcx # Shadow hypercall index
48 jne compat_skip_clobber /* If RIP has changed then don't clobber. */
49 leaq compat_hypercall_args_table(%rip),%r10
50 movb (%r10,%rcx,1),%cl
51 movl $0xDEADBEEF,%r10d
52 testb %cl,%cl; jz compat_skip_clobber; movl %r10d,UREGS_rbx(%rsp)
53 cmpb $2, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rcx(%rsp)
54 cmpb $3, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rdx(%rsp)
55 cmpb $4, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rsi(%rsp)
56 cmpb $5, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rdi(%rsp)
57 cmpb $6, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rbp(%rsp)
58 compat_skip_clobber:
59 #endif
60 movl %eax,UREGS_rax(%rsp) # save the return value
62 /* %rbx: struct vcpu */
63 compat_test_all_events:
64 cli # tests must not race interrupts
65 /*compat_test_softirqs:*/
66 movl VCPU_processor(%rbx),%eax
67 shlq $IRQSTAT_shift,%rax
68 leaq irq_stat(%rip),%rcx
69 testl $~0,(%rcx,%rax,1)
70 jnz compat_process_softirqs
71 btrq $_VCPUF_nmi_pending,VCPU_flags(%rbx)
72 jc compat_process_nmi
73 compat_test_guest_events:
74 movq VCPU_vcpu_info(%rbx),%rax
75 testb $0xFF,COMPAT_VCPUINFO_upcall_mask(%rax)
76 jnz compat_restore_all_guest
77 testb $0xFF,COMPAT_VCPUINFO_upcall_pending(%rax)
78 jz compat_restore_all_guest
79 /*compat_process_guest_events:*/
80 sti
81 leaq VCPU_trap_bounce(%rbx),%rdx
82 movl VCPU_event_addr(%rbx),%eax
83 movl %eax,TRAPBOUNCE_eip(%rdx)
84 movl VCPU_event_sel(%rbx),%eax
85 movl %eax,TRAPBOUNCE_cs(%rdx)
86 movw $TBF_INTERRUPT,TRAPBOUNCE_flags(%rdx)
87 call compat_create_bounce_frame
88 jmp compat_test_all_events
90 ALIGN
91 /* %rbx: struct vcpu */
92 compat_process_softirqs:
93 sti
94 call do_softirq
95 jmp compat_test_all_events
97 ALIGN
98 /* %rbx: struct vcpu */
99 compat_process_nmi:
100 movl VCPU_nmi_addr(%rbx),%eax
101 testl %eax,%eax
102 jz compat_test_all_events
103 btsq $_VCPUF_nmi_masked,VCPU_flags(%rbx)
104 jc 1f
105 sti
106 leaq VCPU_trap_bounce(%rbx),%rdx
107 movl %eax,TRAPBOUNCE_eip(%rdx)
108 movl $FLAT_COMPAT_KERNEL_CS,TRAPBOUNCE_cs(%rdx)
109 movw $TBF_INTERRUPT,TRAPBOUNCE_flags(%rdx)
110 call compat_create_bounce_frame
111 jmp compat_test_all_events
112 1:
113 btsq $_VCPUF_nmi_pending,VCPU_flags(%rbx)
114 jmp compat_test_guest_events
116 compat_bad_hypercall:
117 movl $-ENOSYS,UREGS_rax(%rsp)
118 jmp compat_test_all_events
120 /* %rbx: struct vcpu, interrupts disabled */
121 compat_restore_all_guest:
122 RESTORE_ALL
123 addq $8,%rsp
124 CFLT0: iretq
126 .section .fixup,"ax"
127 CFIX0: popq -15*8-8(%rsp) # error_code/entry_vector
128 SAVE_ALL # 15*8 bytes pushed
129 movq -8(%rsp),%rsi # error_code/entry_vector
130 sti # after stack abuse (-1024(%rsp))
131 pushq $__HYPERVISOR_DS # SS
132 leaq 8(%rsp),%rax
133 pushq %rax # RSP
134 pushfq # RFLAGS
135 pushq $__HYPERVISOR_CS # CS
136 leaq CDBLFLT0(%rip),%rax
137 pushq %rax # RIP
138 pushq %rsi # error_code/entry_vector
139 jmp handle_exception
140 CDBLFLT0:GET_CURRENT(%rbx)
141 jmp compat_test_all_events
142 compat_failsafe_callback:
143 GET_CURRENT(%rbx)
144 leaq VCPU_trap_bounce(%rbx),%rdx
145 movl VCPU_failsafe_addr(%rbx),%eax
146 movl %eax,TRAPBOUNCE_eip(%rdx)
147 movl VCPU_failsafe_sel(%rbx),%eax
148 movl %eax,TRAPBOUNCE_cs(%rdx)
149 movw $TBF_FAILSAFE,TRAPBOUNCE_flags(%rdx)
150 btq $_VGCF_failsafe_disables_events,VCPU_guest_context_flags(%rbx)
151 jnc 1f
152 orw $TBF_INTERRUPT,TRAPBOUNCE_flags(%rdx)
153 1:
154 call compat_create_bounce_frame
155 jmp compat_test_all_events
156 .previous
157 .section __pre_ex_table,"a"
158 .quad CFLT0,CFIX0
159 .previous
160 .section __ex_table,"a"
161 .quad CDBLFLT0,compat_failsafe_callback
162 .previous
164 /* %rdx: trap_bounce, %rbx: struct vcpu */
165 compat_post_handle_exception:
166 testb $TBF_EXCEPTION,TRAPBOUNCE_flags(%rdx)
167 jz compat_test_all_events
168 call compat_create_bounce_frame
169 jmp compat_test_all_events
171 /* CREATE A BASIC EXCEPTION FRAME ON GUEST OS (RING-1) STACK: */
172 /* {[ERRCODE,] EIP, CS, EFLAGS, [ESP, SS]} */
173 /* %rdx: trap_bounce, %rbx: struct vcpu */
174 /* On return only %rbx is guaranteed non-clobbered. */
175 compat_create_bounce_frame:
176 mov %fs,%edi
177 testb $2,UREGS_cs+8(%rsp)
178 jz 1f
179 /* Push new frame at registered guest-OS stack base. */
180 movl VCPU_kernel_sp(%rbx),%esi
181 CFLT1: mov VCPU_kernel_ss(%rbx),%fs
182 subl $2*4,%esi
183 movl UREGS_rsp+8(%rsp),%eax
184 CFLT2: movl %eax,%fs:(%rsi)
185 movl UREGS_ss+8(%rsp),%eax
186 CFLT3: movl %eax,%fs:4(%rsi)
187 jmp 2f
188 1: /* In kernel context already: push new frame at existing %rsp. */
189 movl UREGS_rsp+8(%rsp),%esi
190 CFLT4: mov UREGS_ss+8(%rsp),%fs
191 2:
192 movb TRAPBOUNCE_flags(%rdx),%cl
193 subl $3*4,%esi
194 movq VCPU_vcpu_info(%rbx),%rax
195 pushq COMPAT_VCPUINFO_upcall_mask(%rax)
196 testb $TBF_INTERRUPT,%cl
197 setnz %ch # TBF_INTERRUPT -> set upcall mask
198 orb %ch,COMPAT_VCPUINFO_upcall_mask(%rax)
199 popq %rax
200 shll $16,%eax # Bits 16-23: saved_upcall_mask
201 movw UREGS_cs+8(%rsp),%ax # Bits 0-15: CS
202 CFLT5: movl %eax,%fs:4(%rsi) # CS / saved_upcall_mask
203 shrl $16,%eax
204 testb %al,%al # Bits 0-7: saved_upcall_mask
205 setz %ch # %ch == !saved_upcall_mask
206 movl UREGS_eflags+8(%rsp),%eax
207 andl $~X86_EFLAGS_IF,%eax
208 shlb $1,%ch # Bit 9 (EFLAGS.IF)
209 orb %ch,%ah # Fold EFLAGS.IF into %eax
210 CFLT6: movl %eax,%fs:2*4(%rsi) # EFLAGS
211 movl UREGS_rip+8(%rsp),%eax
212 CFLT7: movl %eax,%fs:(%rsi) # EIP
213 testb $TBF_EXCEPTION_ERRCODE,%cl
214 jz 1f
215 subl $4,%esi
216 movl TRAPBOUNCE_error_code(%rdx),%eax
217 CFLT8: movl %eax,%fs:(%rsi) # ERROR CODE
218 1:
219 testb $TBF_FAILSAFE,%cl
220 jz 2f
221 subl $4*4,%esi
222 movl %gs,%eax
223 CFLT9: movl %eax,%fs:3*4(%rsi) # GS
224 CFLT10: movl %edi,%fs:2*4(%rsi) # FS
225 movl %es,%eax
226 CFLT11: movl %eax,%fs:1*4(%rsi) # ES
227 movl %ds,%eax
228 CFLT12: movl %eax,%fs:0*4(%rsi) # DS
229 2:
230 /* Rewrite our stack frame and return to guest-OS mode. */
231 /* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
232 movl $TRAP_syscall,UREGS_entry_vector+8(%rsp)
233 andl $~(X86_EFLAGS_VM|X86_EFLAGS_RF|\
234 X86_EFLAGS_NT|X86_EFLAGS_TF),UREGS_eflags+8(%rsp)
235 mov %fs,UREGS_ss+8(%rsp)
236 movl %esi,UREGS_rsp+8(%rsp)
237 CFLT13: mov %edi,%fs
238 movzwl TRAPBOUNCE_cs(%rdx),%eax
239 /* Null selectors (0-3) are not allowed. */
240 testl $~3,%eax
241 jz domain_crash_synchronous
242 movl %eax,UREGS_cs+8(%rsp)
243 movl TRAPBOUNCE_eip(%rdx),%eax
244 movl %eax,UREGS_rip+8(%rsp)
245 movb $0,TRAPBOUNCE_flags(%rdx)
246 ret
247 .section .fixup,"ax"
248 CFIX13:
249 xorl %edi,%edi
250 jmp CFLT13
251 .previous
252 .section __ex_table,"a"
253 .quad CFLT1,domain_crash_synchronous , CFLT2,compat_crash_page_fault
254 .quad CFLT3,compat_crash_page_fault_4 , CFLT4,domain_crash_synchronous
255 .quad CFLT5,compat_crash_page_fault_4 , CFLT6,compat_crash_page_fault_8
256 .quad CFLT7,compat_crash_page_fault , CFLT8,compat_crash_page_fault
257 .quad CFLT9,compat_crash_page_fault_12, CFLT10,compat_crash_page_fault_8
258 .quad CFLT11,compat_crash_page_fault_4 , CFLT12,compat_crash_page_fault
259 .quad CFLT13,CFIX13
260 .previous
262 compat_crash_page_fault_12:
263 addl $4,%esi
264 compat_crash_page_fault_8:
265 addl $4,%esi
266 compat_crash_page_fault_4:
267 addl $4,%esi
268 compat_crash_page_fault:
269 CFLT14: mov %edi,%fs
270 movl %esi,%edi
271 call show_page_walk
272 jmp domain_crash_synchronous
273 .section .fixup,"ax"
274 CFIX14:
275 xorl %edi,%edi
276 jmp CFLT14
277 .previous
278 .section __ex_table,"a"
279 .quad CFLT14,CFIX14
280 .previous
282 compat_domctl:
283 compat_sysctl:
285 .section .rodata, "a", @progbits
287 ENTRY(compat_hypercall_table)
288 .quad compat_set_trap_table /* 0 */
289 .quad do_mmu_update
290 .quad compat_set_gdt
291 .quad do_stack_switch
292 .quad compat_set_callbacks
293 .quad do_fpu_taskswitch /* 5 */
294 .quad do_sched_op_compat
295 .quad compat_platform_op
296 .quad do_set_debugreg
297 .quad do_get_debugreg
298 .quad compat_update_descriptor /* 10 */
299 .quad compat_ni_hypercall
300 .quad compat_memory_op
301 .quad compat_multicall
302 .quad compat_update_va_mapping
303 .quad compat_set_timer_op /* 15 */
304 .quad do_event_channel_op_compat
305 .quad compat_xen_version
306 .quad do_console_io
307 .quad compat_physdev_op_compat
308 .quad compat_grant_table_op /* 20 */
309 .quad compat_vm_assist
310 .quad compat_update_va_mapping_otherdomain
311 .quad compat_iret
312 .quad compat_vcpu_op
313 .quad compat_ni_hypercall /* 25 */
314 .quad compat_mmuext_op
315 .quad compat_acm_op
316 .quad compat_nmi_op
317 .quad compat_sched_op
318 .quad compat_callback_op /* 30 */
319 .quad compat_xenoprof_op
320 .quad do_event_channel_op
321 .quad compat_physdev_op
322 .quad do_hvm_op
323 .quad do_sysctl /* 35 */
324 .quad do_domctl
325 .quad compat_kexec_op
326 .rept NR_hypercalls-((.-compat_hypercall_table)/8)
327 .quad compat_ni_hypercall
328 .endr
330 ENTRY(compat_hypercall_args_table)
331 .byte 1 /* compat_set_trap_table */ /* 0 */
332 .byte 4 /* compat_mmu_update */
333 .byte 2 /* compat_set_gdt */
334 .byte 2 /* compat_stack_switch */
335 .byte 4 /* compat_set_callbacks */
336 .byte 1 /* compat_fpu_taskswitch */ /* 5 */
337 .byte 2 /* compat_sched_op_compat */
338 .byte 1 /* compat_platform_op */
339 .byte 2 /* compat_set_debugreg */
340 .byte 1 /* compat_get_debugreg */
341 .byte 4 /* compat_update_descriptor */ /* 10 */
342 .byte 0 /* compat_ni_hypercall */
343 .byte 2 /* compat_memory_op */
344 .byte 2 /* compat_multicall */
345 .byte 4 /* compat_update_va_mapping */
346 .byte 2 /* compat_set_timer_op */ /* 15 */
347 .byte 1 /* compat_event_channel_op_compat */
348 .byte 2 /* compat_xen_version */
349 .byte 3 /* compat_console_io */
350 .byte 1 /* compat_physdev_op_compat */
351 .byte 3 /* compat_grant_table_op */ /* 20 */
352 .byte 2 /* compat_vm_assist */
353 .byte 5 /* compat_update_va_mapping_otherdomain */
354 .byte 0 /* compat_iret */
355 .byte 3 /* compat_vcpu_op */
356 .byte 0 /* compat_ni_hypercall */ /* 25 */
357 .byte 4 /* compat_mmuext_op */
358 .byte 1 /* compat_acm_op */
359 .byte 2 /* compat_nmi_op */
360 .byte 2 /* compat_sched_op */
361 .byte 2 /* compat_callback_op */ /* 30 */
362 .byte 2 /* compat_xenoprof_op */
363 .byte 2 /* compat_event_channel_op */
364 .byte 2 /* compat_physdev_op */
365 .byte 2 /* do_hvm_op */
366 .byte 1 /* compat_sysctl */ /* 35 */
367 .byte 1 /* compat_domctl */
368 .byte 2 /* compat_kexec_op */
369 .rept NR_hypercalls-(.-compat_hypercall_args_table)
370 .byte 0 /* compat_ni_hypercall */
371 .endr