ia64/xen-unstable

view xen/arch/x86/x86_64/compat/entry.S @ 13608:30af6cfdb05c

Make domctl/sysctl interfaces 32-/64-bit invariant.
This kills off a fair amount of unpleasant CONFIG_COMPAT shimming and
avoids needing to keep the compat paths in sync as these interfaces
continue to develop.
Signed-off-by: Keir Fraser <keir@xensource.com>
author kfraser@localhost.localdomain
date Wed Jan 24 16:33:19 2007 +0000 (2007-01-24)
parents 23dcc167b97e
children e68bf334ecb8
line source
1 /*
2 * Compatibility hypercall routines.
3 */
5 #include <asm/desc.h>
7 .text
9 ENTRY(compat_hypercall)
10 pushq $0
11 movl $TRAP_syscall,4(%rsp)
12 SAVE_ALL
13 GET_CURRENT(%rbx)
15 cmpl $NR_hypercalls,%eax
16 jae compat_bad_hypercall
17 #ifndef NDEBUG
18 /* Deliberately corrupt parameter regs not used by this hypercall. */
19 pushq UREGS_rbx(%rsp); pushq %rcx; pushq %rdx; pushq %rsi; pushq %rdi; pushq UREGS_rbp+5*8(%rsp)
20 leaq compat_hypercall_args_table(%rip),%r10
21 movq $6,%rcx
22 subb (%r10,%rax,1),%cl
23 movq %rsp,%rdi
24 movl $0xDEADBEEF,%eax
25 rep stosq
26 popq %r9 ; popq %r8 ; popq %rcx; popq %rdx; popq %rsi; popq %rdi
27 movl UREGS_rax(%rsp),%eax
28 pushq %rax
29 pushq UREGS_rip+8(%rsp)
30 #else
31 movl %eax,%eax
32 movl %ebp,%r9d
33 movl %edi,%r8d
34 xchgl %ecx,%esi
35 movl UREGS_rbx(%rsp),%edi
36 #endif
37 leaq compat_hypercall_table(%rip),%r10
38 PERFC_INCR(PERFC_hypercalls, %rax)
39 callq *(%r10,%rax,8)
40 #ifndef NDEBUG
41 /* Deliberately corrupt parameter regs used by this hypercall. */
42 popq %r10 # Shadow RIP
43 cmpq %r10,UREGS_rip+8(%rsp)
44 popq %rcx # Shadow hypercall index
45 jne compat_skip_clobber /* If RIP has changed then don't clobber. */
46 leaq compat_hypercall_args_table(%rip),%r10
47 movb (%r10,%rcx,1),%cl
48 movl $0xDEADBEEF,%r10d
49 testb %cl,%cl; jz compat_skip_clobber; movl %r10d,UREGS_rbx(%rsp)
50 cmpb $2, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rcx(%rsp)
51 cmpb $3, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rdx(%rsp)
52 cmpb $4, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rsi(%rsp)
53 cmpb $5, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rdi(%rsp)
54 cmpb $6, %cl; jb compat_skip_clobber; movl %r10d,UREGS_rbp(%rsp)
55 compat_skip_clobber:
56 #endif
57 movl %eax,UREGS_rax(%rsp) # save the return value
59 /* %rbx: struct vcpu */
60 compat_test_all_events:
61 cli # tests must not race interrupts
62 /*compat_test_softirqs:*/
63 movl VCPU_processor(%rbx),%eax
64 shlq $IRQSTAT_shift,%rax
65 leaq irq_stat(%rip),%rcx
66 testl $~0,(%rcx,%rax,1)
67 jnz compat_process_softirqs
68 btrq $_VCPUF_nmi_pending,VCPU_flags(%rbx)
69 jc compat_process_nmi
70 compat_test_guest_events:
71 movq VCPU_vcpu_info(%rbx),%rax
72 testb $0xFF,COMPAT_VCPUINFO_upcall_mask(%rax)
73 jnz compat_restore_all_guest
74 testb $0xFF,COMPAT_VCPUINFO_upcall_pending(%rax)
75 jz compat_restore_all_guest
76 /*compat_process_guest_events:*/
77 sti
78 leaq VCPU_trap_bounce(%rbx),%rdx
79 movl VCPU_event_addr(%rbx),%eax
80 movl %eax,TRAPBOUNCE_eip(%rdx)
81 movl VCPU_event_sel(%rbx),%eax
82 movl %eax,TRAPBOUNCE_cs(%rdx)
83 movw $TBF_INTERRUPT,TRAPBOUNCE_flags(%rdx)
84 call compat_create_bounce_frame
85 jmp compat_test_all_events
87 ALIGN
88 /* %rbx: struct vcpu */
89 compat_process_softirqs:
90 sti
91 call do_softirq
92 jmp compat_test_all_events
94 ALIGN
95 /* %rbx: struct vcpu */
96 compat_process_nmi:
97 movl VCPU_nmi_addr(%rbx),%eax
98 testl %eax,%eax
99 jz compat_test_all_events
100 btsq $_VCPUF_nmi_masked,VCPU_flags(%rbx)
101 jc 1f
102 sti
103 leaq VCPU_trap_bounce(%rbx),%rdx
104 movl %eax,TRAPBOUNCE_eip(%rdx)
105 movl $FLAT_COMPAT_KERNEL_CS,TRAPBOUNCE_cs(%rdx)
106 movw $TBF_INTERRUPT,TRAPBOUNCE_flags(%rdx)
107 call compat_create_bounce_frame
108 jmp compat_test_all_events
109 1:
110 btsq $_VCPUF_nmi_pending,VCPU_flags(%rbx)
111 jmp compat_test_guest_events
113 compat_bad_hypercall:
114 movl $-ENOSYS,UREGS_rax(%rsp)
115 jmp compat_test_all_events
117 /* %rbx: struct vcpu, interrupts disabled */
118 compat_restore_all_guest:
119 RESTORE_ALL
120 addq $8,%rsp
121 CFLT0: iretq
123 .section .fixup,"ax"
124 CFIX0: popq -15*8-8(%rsp) # error_code/entry_vector
125 SAVE_ALL # 15*8 bytes pushed
126 movq -8(%rsp),%rsi # error_code/entry_vector
127 sti # after stack abuse (-1024(%rsp))
128 pushq $__HYPERVISOR_DS # SS
129 leaq 8(%rsp),%rax
130 pushq %rax # RSP
131 pushfq # RFLAGS
132 pushq $__HYPERVISOR_CS # CS
133 leaq CDBLFLT0(%rip),%rax
134 pushq %rax # RIP
135 pushq %rsi # error_code/entry_vector
136 jmp handle_exception
137 CDBLFLT0:GET_CURRENT(%rbx)
138 jmp compat_test_all_events
139 compat_failsafe_callback:
140 GET_CURRENT(%rbx)
141 leaq VCPU_trap_bounce(%rbx),%rdx
142 movl VCPU_failsafe_addr(%rbx),%eax
143 movl %eax,TRAPBOUNCE_eip(%rdx)
144 movl VCPU_failsafe_sel(%rbx),%eax
145 movl %eax,TRAPBOUNCE_cs(%rdx)
146 movw $TBF_FAILSAFE,TRAPBOUNCE_flags(%rdx)
147 btq $_VGCF_failsafe_disables_events,VCPU_guest_context_flags(%rbx)
148 jnc 1f
149 orw $TBF_INTERRUPT,TRAPBOUNCE_flags(%rdx)
150 1:
151 call compat_create_bounce_frame
152 jmp compat_test_all_events
153 .previous
154 .section __pre_ex_table,"a"
155 .quad CFLT0,CFIX0
156 .previous
157 .section __ex_table,"a"
158 .quad CDBLFLT0,compat_failsafe_callback
159 .previous
161 /* %rdx: trap_bounce, %rbx: struct vcpu */
162 compat_post_handle_exception:
163 testb $TBF_EXCEPTION,TRAPBOUNCE_flags(%rdx)
164 jz compat_test_all_events
165 call compat_create_bounce_frame
166 jmp compat_test_all_events
168 /* CREATE A BASIC EXCEPTION FRAME ON GUEST OS (RING-1) STACK: */
169 /* {[ERRCODE,] EIP, CS, EFLAGS, [ESP, SS]} */
170 /* %rdx: trap_bounce, %rbx: struct vcpu */
171 /* On return only %rbx is guaranteed non-clobbered. */
172 compat_create_bounce_frame:
173 mov %fs,%edi
174 testb $2,UREGS_cs+8(%rsp)
175 jz 1f
176 /* Push new frame at registered guest-OS stack base. */
177 movl VCPU_kernel_sp(%rbx),%esi
178 CFLT1: mov VCPU_kernel_ss(%rbx),%fs
179 subl $2*4,%esi
180 movl UREGS_rsp+8(%rsp),%eax
181 CFLT2: movl %eax,%fs:(%rsi)
182 movl UREGS_ss+8(%rsp),%eax
183 CFLT3: movl %eax,%fs:4(%rsi)
184 jmp 2f
185 1: /* In kernel context already: push new frame at existing %rsp. */
186 movl UREGS_rsp+8(%rsp),%esi
187 CFLT4: mov UREGS_ss+8(%rsp),%fs
188 2:
189 movb TRAPBOUNCE_flags(%rdx),%cl
190 subl $3*4,%esi
191 movq VCPU_vcpu_info(%rbx),%rax
192 pushq COMPAT_VCPUINFO_upcall_mask(%rax)
193 testb $TBF_INTERRUPT,%cl
194 setnz %ch # TBF_INTERRUPT -> set upcall mask
195 orb %ch,COMPAT_VCPUINFO_upcall_mask(%rax)
196 popq %rax
197 shll $16,%eax # Bits 16-23: saved_upcall_mask
198 movw UREGS_cs+8(%rsp),%ax # Bits 0-15: CS
199 CFLT5: movl %eax,%fs:4(%rsi) # CS / saved_upcall_mask
200 shrl $16,%eax
201 testb %al,%al # Bits 0-7: saved_upcall_mask
202 setz %ch # %ch == !saved_upcall_mask
203 movl UREGS_eflags+8(%rsp),%eax
204 andl $~X86_EFLAGS_IF,%eax
205 shlb $1,%ch # Bit 9 (EFLAGS.IF)
206 orb %ch,%ah # Fold EFLAGS.IF into %eax
207 CFLT6: movl %eax,%fs:2*4(%rsi) # EFLAGS
208 movl UREGS_rip+8(%rsp),%eax
209 CFLT7: movl %eax,%fs:(%rsi) # EIP
210 testb $TBF_EXCEPTION_ERRCODE,%cl
211 jz 1f
212 subl $4,%esi
213 movl TRAPBOUNCE_error_code(%rdx),%eax
214 CFLT8: movl %eax,%fs:(%rsi) # ERROR CODE
215 1:
216 testb $TBF_FAILSAFE,%cl
217 jz 2f
218 subl $4*4,%esi
219 movl %gs,%eax
220 CFLT9: movl %eax,%fs:3*4(%rsi) # GS
221 CFLT10: movl %edi,%fs:2*4(%rsi) # FS
222 movl %es,%eax
223 CFLT11: movl %eax,%fs:1*4(%rsi) # ES
224 movl %ds,%eax
225 CFLT12: movl %eax,%fs:0*4(%rsi) # DS
226 2:
227 /* Rewrite our stack frame and return to guest-OS mode. */
228 /* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
229 movl $TRAP_syscall,UREGS_entry_vector+8(%rsp)
230 andl $~(X86_EFLAGS_VM|X86_EFLAGS_RF|\
231 X86_EFLAGS_NT|X86_EFLAGS_TF),UREGS_eflags+8(%rsp)
232 mov %fs,UREGS_ss+8(%rsp)
233 movl %esi,UREGS_rsp+8(%rsp)
234 CFLT13: mov %edi,%fs
235 movzwl TRAPBOUNCE_cs(%rdx),%eax
236 /* Null selectors (0-3) are not allowed. */
237 testl $~3,%eax
238 jz domain_crash_synchronous
239 movl %eax,UREGS_cs+8(%rsp)
240 movl TRAPBOUNCE_eip(%rdx),%eax
241 movl %eax,UREGS_rip+8(%rsp)
242 movb $0,TRAPBOUNCE_flags(%rdx)
243 ret
244 .section .fixup,"ax"
245 CFIX13:
246 xorl %edi,%edi
247 jmp CFLT13
248 .previous
249 .section __ex_table,"a"
250 .quad CFLT1,domain_crash_synchronous , CFLT2,compat_crash_page_fault
251 .quad CFLT3,compat_crash_page_fault_4 , CFLT4,domain_crash_synchronous
252 .quad CFLT5,compat_crash_page_fault_4 , CFLT6,compat_crash_page_fault_8
253 .quad CFLT7,compat_crash_page_fault , CFLT8,compat_crash_page_fault
254 .quad CFLT9,compat_crash_page_fault_12, CFLT10,compat_crash_page_fault_8
255 .quad CFLT11,compat_crash_page_fault_4 , CFLT12,compat_crash_page_fault
256 .quad CFLT13,CFIX13
257 .previous
259 compat_crash_page_fault_12:
260 addl $4,%esi
261 compat_crash_page_fault_8:
262 addl $4,%esi
263 compat_crash_page_fault_4:
264 addl $4,%esi
265 compat_crash_page_fault:
266 CFLT14: mov %edi,%fs
267 movl %esi,%edi
268 call show_page_walk
269 jmp domain_crash_synchronous
270 .section .fixup,"ax"
271 CFIX14:
272 xorl %edi,%edi
273 jmp CFLT14
274 .previous
275 .section __ex_table,"a"
276 .quad CFLT14,CFIX14
277 .previous
279 compat_domctl:
280 compat_sysctl:
282 .section .rodata, "a", @progbits
284 ENTRY(compat_hypercall_table)
285 .quad compat_set_trap_table /* 0 */
286 .quad do_mmu_update
287 .quad compat_set_gdt
288 .quad do_stack_switch
289 .quad compat_set_callbacks
290 .quad do_fpu_taskswitch /* 5 */
291 .quad do_sched_op_compat
292 .quad compat_platform_op
293 .quad do_set_debugreg
294 .quad do_get_debugreg
295 .quad compat_update_descriptor /* 10 */
296 .quad compat_ni_hypercall
297 .quad compat_memory_op
298 .quad compat_multicall
299 .quad compat_update_va_mapping
300 .quad compat_set_timer_op /* 15 */
301 .quad do_event_channel_op_compat
302 .quad compat_xen_version
303 .quad do_console_io
304 .quad compat_physdev_op_compat
305 .quad compat_grant_table_op /* 20 */
306 .quad compat_vm_assist
307 .quad compat_update_va_mapping_otherdomain
308 .quad compat_iret
309 .quad compat_vcpu_op
310 .quad compat_ni_hypercall /* 25 */
311 .quad compat_mmuext_op
312 .quad compat_acm_op
313 .quad compat_nmi_op
314 .quad compat_sched_op
315 .quad compat_callback_op /* 30 */
316 .quad compat_xenoprof_op
317 .quad do_event_channel_op
318 .quad compat_physdev_op
319 .quad do_hvm_op
320 .quad do_sysctl /* 35 */
321 .quad do_domctl
322 .quad compat_kexec_op
323 .rept NR_hypercalls-((.-compat_hypercall_table)/8)
324 .quad compat_ni_hypercall
325 .endr
327 ENTRY(compat_hypercall_args_table)
328 .byte 1 /* compat_set_trap_table */ /* 0 */
329 .byte 4 /* compat_mmu_update */
330 .byte 2 /* compat_set_gdt */
331 .byte 2 /* compat_stack_switch */
332 .byte 4 /* compat_set_callbacks */
333 .byte 1 /* compat_fpu_taskswitch */ /* 5 */
334 .byte 2 /* compat_sched_op_compat */
335 .byte 1 /* compat_platform_op */
336 .byte 2 /* compat_set_debugreg */
337 .byte 1 /* compat_get_debugreg */
338 .byte 4 /* compat_update_descriptor */ /* 10 */
339 .byte 0 /* compat_ni_hypercall */
340 .byte 2 /* compat_memory_op */
341 .byte 2 /* compat_multicall */
342 .byte 4 /* compat_update_va_mapping */
343 .byte 2 /* compat_set_timer_op */ /* 15 */
344 .byte 1 /* compat_event_channel_op_compat */
345 .byte 2 /* compat_xen_version */
346 .byte 3 /* compat_console_io */
347 .byte 1 /* compat_physdev_op_compat */
348 .byte 3 /* compat_grant_table_op */ /* 20 */
349 .byte 2 /* compat_vm_assist */
350 .byte 5 /* compat_update_va_mapping_otherdomain */
351 .byte 0 /* compat_iret */
352 .byte 3 /* compat_vcpu_op */
353 .byte 0 /* compat_ni_hypercall */ /* 25 */
354 .byte 4 /* compat_mmuext_op */
355 .byte 1 /* compat_acm_op */
356 .byte 2 /* compat_nmi_op */
357 .byte 2 /* compat_sched_op */
358 .byte 2 /* compat_callback_op */ /* 30 */
359 .byte 2 /* compat_xenoprof_op */
360 .byte 2 /* compat_event_channel_op */
361 .byte 2 /* compat_physdev_op */
362 .byte 2 /* do_hvm_op */
363 .byte 1 /* compat_sysctl */ /* 35 */
364 .byte 1 /* compat_domctl */
365 .byte 2 /* compat_kexec_op */
366 .rept NR_hypercalls-(.-compat_hypercall_args_table)
367 .byte 0 /* compat_ni_hypercall */
368 .endr