testb $1 << VCPU_TRAP_MCE,VCPU_async_exception_mask(%rbx)
jnz .Lcompat_test_guest_nmi
sti
- movb $0,VCPU_mce_pending(%rbx)
- call set_guest_machinecheck_trapbounce
- testl %eax,%eax
+ movb $0, VCPU_mce_pending(%rbx)
+ call set_guest_machinecheck_trapbounce
+ test %al, %al
jz compat_test_all_events
movzbl VCPU_async_exception_mask(%rbx),%edx # save mask for the
movb %dl,VCPU_mce_old_mask(%rbx) # iret hypercall
/* %rbx: struct vcpu */
compat_process_nmi:
testb $1 << VCPU_TRAP_NMI,VCPU_async_exception_mask(%rbx)
- jnz compat_test_guest_events
+ jnz compat_test_guest_events
sti
- movb $0,VCPU_nmi_pending(%rbx)
+ movb $0, VCPU_nmi_pending(%rbx)
call set_guest_nmi_trapbounce
- testl %eax,%eax
+ test %al, %al
jz compat_test_all_events
movzbl VCPU_async_exception_mask(%rbx),%edx # save mask for the
movb %dl,VCPU_nmi_old_mask(%rbx) # iret hypercall
sti
movb $0, VCPU_mce_pending(%rbx)
call set_guest_machinecheck_trapbounce
- test %eax, %eax
+ test %al, %al
jz test_all_events
movzbl VCPU_async_exception_mask(%rbx), %edx # save mask for the
movb %dl, VCPU_mce_old_mask(%rbx) # iret hypercall
sti
movb $0, VCPU_nmi_pending(%rbx)
call set_guest_nmi_trapbounce
- test %eax, %eax
+ test %al, %al
jz test_all_events
movzbl VCPU_async_exception_mask(%rbx), %edx # save mask for the
movb %dl, VCPU_nmi_old_mask(%rbx) # iret hypercall