#define STGI .byte 0x0F,0x01,0xDC
#define CLGI .byte 0x0F,0x01,0xDD
-#define get_current(reg) GET_CURRENT(r(reg))
-
-#define r(reg) %r##reg
-#define addr_of(lbl) lbl(%rip)
-#define call_with_regs(fn) \
- mov %rsp,%rdi; \
- call fn;
-
ENTRY(svm_asm_do_resume)
call svm_intr_assist
- call_with_regs(nsvm_vcpu_switch)
+ mov %rsp,%rdi
+ call nsvm_vcpu_switch
ASSERT_NOT_IN_ATOMIC
- get_current(bx)
+ GET_CURRENT(%rbx)
CLGI
- mov VCPU_processor(r(bx)),%eax
- shl $IRQSTAT_shift,r(ax)
- lea addr_of(irq_stat),r(dx)
- testl $~0,(r(dx),r(ax),1)
+ mov VCPU_processor(%rbx),%eax
+ shl $IRQSTAT_shift,%rax
+ lea irq_stat(%rip),%rdx
+ testl $~0,(%rdx,%rax,1)
jnz .Lsvm_process_softirqs
- testb $0, VCPU_nsvm_hap_enabled(r(bx))
+ testb $0, VCPU_nsvm_hap_enabled(%rbx)
UNLIKELY_START(nz, nsvm_hap)
- mov VCPU_nhvm_p2m(r(bx)),r(ax)
- test r(ax),r(ax)
+ mov VCPU_nhvm_p2m(%rbx),%rax
+ test %rax,%rax
sete %al
- andb VCPU_nhvm_guestmode(r(bx)),%al
+ andb VCPU_nhvm_guestmode(%rbx),%al
jnz .Lsvm_nsvm_no_p2m
UNLIKELY_END(nsvm_hap)
call svm_asid_handle_vmrun
- cmpb $0,addr_of(tb_init_done)
+ cmpb $0,tb_init_done(%rip)
UNLIKELY_START(nz, svm_trace)
call svm_trace_vmentry
UNLIKELY_END(svm_trace)
- mov VCPU_svm_vmcb(r(bx)),r(cx)
- mov UREGS_rax(r(sp)),r(ax)
- mov r(ax),VMCB_rax(r(cx))
- mov UREGS_rip(r(sp)),r(ax)
- mov r(ax),VMCB_rip(r(cx))
- mov UREGS_rsp(r(sp)),r(ax)
- mov r(ax),VMCB_rsp(r(cx))
- mov UREGS_eflags(r(sp)),r(ax)
- mov r(ax),VMCB_rflags(r(cx))
+ mov VCPU_svm_vmcb(%rbx),%rcx
+ mov UREGS_rax(%rsp),%rax
+ mov %rax,VMCB_rax(%rcx)
+ mov UREGS_rip(%rsp),%rax
+ mov %rax,VMCB_rip(%rcx)
+ mov UREGS_rsp(%rsp),%rax
+ mov %rax,VMCB_rsp(%rcx)
+ mov UREGS_eflags(%rsp),%rax
+ mov %rax,VMCB_rflags(%rcx)
- mov VCPU_svm_vmcb_pa(r(bx)),r(ax)
+ mov VCPU_svm_vmcb_pa(%rbx),%rax
pop %r15
pop %r14
push %r14
push %r15
- get_current(bx)
- movb $0,VCPU_svm_vmcb_in_sync(r(bx))
- mov VCPU_svm_vmcb(r(bx)),r(cx)
- mov VMCB_rax(r(cx)),r(ax)
- mov r(ax),UREGS_rax(r(sp))
- mov VMCB_rip(r(cx)),r(ax)
- mov r(ax),UREGS_rip(r(sp))
- mov VMCB_rsp(r(cx)),r(ax)
- mov r(ax),UREGS_rsp(r(sp))
- mov VMCB_rflags(r(cx)),r(ax)
- mov r(ax),UREGS_eflags(r(sp))
+ GET_CURRENT(%rbx)
+ movb $0,VCPU_svm_vmcb_in_sync(%rbx)
+ mov VCPU_svm_vmcb(%rbx),%rcx
+ mov VMCB_rax(%rcx),%rax
+ mov %rax,UREGS_rax(%rsp)
+ mov VMCB_rip(%rcx),%rax
+ mov %rax,UREGS_rip(%rsp)
+ mov VMCB_rsp(%rcx),%rax
+ mov %rax,UREGS_rsp(%rsp)
+ mov VMCB_rflags(%rcx),%rax
+ mov %rax,UREGS_eflags(%rsp)
#ifndef NDEBUG
mov $0xbeef,%ax
- mov %ax,UREGS_error_code(r(sp))
- mov %ax,UREGS_entry_vector(r(sp))
- mov %ax,UREGS_saved_upcall_mask(r(sp))
- mov %ax,UREGS_cs(r(sp))
- mov %ax,UREGS_ds(r(sp))
- mov %ax,UREGS_es(r(sp))
- mov %ax,UREGS_fs(r(sp))
- mov %ax,UREGS_gs(r(sp))
- mov %ax,UREGS_ss(r(sp))
+ mov %ax,UREGS_error_code(%rsp)
+ mov %ax,UREGS_entry_vector(%rsp)
+ mov %ax,UREGS_saved_upcall_mask(%rsp)
+ mov %ax,UREGS_cs(%rsp)
+ mov %ax,UREGS_ds(%rsp)
+ mov %ax,UREGS_es(%rsp)
+ mov %ax,UREGS_fs(%rsp)
+ mov %ax,UREGS_gs(%rsp)
+ mov %ax,UREGS_ss(%rsp)
#endif
STGI
.globl svm_stgi_label
svm_stgi_label:
- call_with_regs(svm_vmexit_handler)
+ mov %rsp,%rdi
+ call svm_vmexit_handler
jmp svm_asm_do_resume
.Lsvm_process_softirqs:
#define GUEST_RIP 0x681e
#define GUEST_RFLAGS 0x6820
-#define get_current(reg) GET_CURRENT(r(reg))
-
-#define r(reg) %r##reg
-#define addr_of(lbl) lbl(%rip)
-#define call_with_regs(fn) \
- mov %rsp,%rdi; \
- call fn;
-
ALIGN
.globl vmx_asm_vmexit_handler
vmx_asm_vmexit_handler:
push %r14
push %r15
- get_current(bx)
+ GET_CURRENT(%rbx)
- movb $1,VCPU_vmx_launched(r(bx))
+ movb $1,VCPU_vmx_launched(%rbx)
- lea UREGS_rip(r(sp)),r(di)
+ lea UREGS_rip(%rsp),%rdi
mov $GUEST_RIP,%eax
/*VMREAD(UREGS_rip)*/
- .byte 0x0f,0x78,0x07 /* vmread r(ax),(r(di)) */
+ .byte 0x0f,0x78,0x07 /* vmread %rax,(%rdi) */
mov $GUEST_RSP,%eax
VMREAD(UREGS_rsp)
mov $GUEST_RFLAGS,%eax
VMREAD(UREGS_eflags)
- mov %cr2,r(ax)
- mov r(ax),VCPU_hvm_guest_cr2(r(bx))
+ mov %cr2,%rax
+ mov %rax,VCPU_hvm_guest_cr2(%rbx)
#ifndef NDEBUG
mov $0xbeef,%ax
- mov %ax,UREGS_error_code(r(sp))
- mov %ax,UREGS_entry_vector(r(sp))
- mov %ax,UREGS_saved_upcall_mask(r(sp))
- mov %ax,UREGS_cs(r(sp))
- mov %ax,UREGS_ds(r(sp))
- mov %ax,UREGS_es(r(sp))
- mov %ax,UREGS_fs(r(sp))
- mov %ax,UREGS_gs(r(sp))
- mov %ax,UREGS_ss(r(sp))
+ mov %ax,UREGS_error_code(%rsp)
+ mov %ax,UREGS_entry_vector(%rsp)
+ mov %ax,UREGS_saved_upcall_mask(%rsp)
+ mov %ax,UREGS_cs(%rsp)
+ mov %ax,UREGS_ds(%rsp)
+ mov %ax,UREGS_es(%rsp)
+ mov %ax,UREGS_fs(%rsp)
+ mov %ax,UREGS_gs(%rsp)
+ mov %ax,UREGS_ss(%rsp)
#endif
- call_with_regs(vmx_vmexit_handler)
+ mov %rsp,%rdi
+ call vmx_vmexit_handler
.globl vmx_asm_do_vmentry
vmx_asm_do_vmentry:
call nvmx_switch_guest
ASSERT_NOT_IN_ATOMIC
- get_current(bx)
+ GET_CURRENT(%rbx)
cli
- mov VCPU_processor(r(bx)),%eax
- shl $IRQSTAT_shift,r(ax)
- lea addr_of(irq_stat),r(dx)
- cmpl $0,(r(dx),r(ax),1)
+ mov VCPU_processor(%rbx),%eax
+ shl $IRQSTAT_shift,%rax
+ lea irq_stat(%rip),%rdx
+ cmpl $0,(%rdx,%rax,1)
jnz .Lvmx_process_softirqs
- testb $0xff,VCPU_vmx_emulate(r(bx))
+ testb $0xff,VCPU_vmx_emulate(%rbx)
jnz .Lvmx_goto_emulator
- testb $0xff,VCPU_vmx_realmode(r(bx))
+ testb $0xff,VCPU_vmx_realmode(%rbx)
UNLIKELY_START(nz, realmode)
- cmpw $0,VCPU_vm86_seg_mask(r(bx))
+ cmpw $0,VCPU_vm86_seg_mask(%rbx)
jnz .Lvmx_goto_emulator
- call_with_regs(vmx_enter_realmode)
+ mov %rsp,%rdi
+ call vmx_enter_realmode
UNLIKELY_END(realmode)
call vmx_vmenter_helper
- mov VCPU_hvm_guest_cr2(r(bx)),r(ax)
- mov r(ax),%cr2
+ mov VCPU_hvm_guest_cr2(%rbx),%rax
+ mov %rax,%cr2
- lea UREGS_rip(r(sp)),r(di)
+ lea UREGS_rip(%rsp),%rdi
mov $GUEST_RIP,%eax
/*VMWRITE(UREGS_rip)*/
- .byte 0x0f,0x79,0x07 /* vmwrite (r(di)),r(ax) */
+ .byte 0x0f,0x79,0x07 /* vmwrite (%rdi),%rax */
mov $GUEST_RSP,%eax
VMWRITE(UREGS_rsp)
mov $GUEST_RFLAGS,%eax
VMWRITE(UREGS_eflags)
- cmpb $0,VCPU_vmx_launched(r(bx))
+ cmpb $0,VCPU_vmx_launched(%rbx)
pop %r15
pop %r14
pop %r13
.Lvmx_goto_emulator:
sti
- call_with_regs(vmx_realmode)
+ mov %rsp,%rdi
+ call vmx_realmode
jmp vmx_asm_do_vmentry
.Lvmx_process_softirqs: