if ( rc != X86EMUL_OKAY )
return rc;
+ if ( hvmemul_ctxt->ctxt.retire.singlestep )
+ hvm_inject_hw_exception(TRAP_debug, X86_EVENT_NO_EC);
+
new_intr_shadow = hvmemul_ctxt->intr_shadow;
/* MOV-SS instruction toggles MOV-SS shadow, else we just clear it. */
if ( rc == X86EMUL_UNHANDLEABLE )
goto bail;
+ if ( ptwr_ctxt.ctxt.retire.singlestep )
+ pv_inject_hw_exception(TRAP_debug, X86_EVENT_NO_EC);
+
perfc_incr(ptwr_emulations);
return EXCRET_fault_fixed;
else
rc = x86_emulate(&ctxt, &mmio_ro_emulate_ops);
- return rc != X86EMUL_UNHANDLEABLE ? EXCRET_fault_fixed : 0;
+ if ( rc == X86EMUL_UNHANDLEABLE )
+ return 0;
+
+ if ( ctxt.retire.singlestep )
+ pv_inject_hw_exception(TRAP_debug, X86_EVENT_NO_EC);
+
+ return EXCRET_fault_fixed;
}
void *alloc_xen_pagetable(void)
v->arch.paging.last_write_emul_ok = 0;
#endif
+ if ( emul_ctxt.ctxt.retire.singlestep )
+ {
+ if ( has_hvm_container_domain(d) )
+ hvm_inject_hw_exception(TRAP_debug, X86_EVENT_NO_EC);
+ else
+ pv_inject_hw_exception(TRAP_debug, X86_EVENT_NO_EC);
+ }
+
#if GUEST_PAGING_LEVELS == 3 /* PAE guest */
- if ( r == X86EMUL_OKAY ) {
+ /*
+ * If there are no pending actions, emulate up to four extra instructions
+ * in the hope of catching the "second half" of a 64-bit pagetable write.
+ */
+ if ( r == X86EMUL_OKAY && !emul_ctxt.ctxt.retire.raw )
+ {
int i, emulation_count=0;
this_cpu(trace_emulate_initial_va) = va;
- /* Emulate up to four extra instructions in the hope of catching
- * the "second half" of a 64-bit pagetable write. */
+
for ( i = 0 ; i < 4 ; i++ )
{
shadow_continue_emulation(&emul_ctxt, regs);
v->arch.paging.last_write_was_pt = 0;
r = x86_emulate(&emul_ctxt.ctxt, emul_ops);
- if ( r == X86EMUL_OKAY )
+
+ /*
+ * Only continue the search for the second half if there are no
+ * exceptions or pending actions. Otherwise, give up and re-enter
+ * the guest.
+ */
+ if ( r == X86EMUL_OKAY && !emul_ctxt.ctxt.retire.raw )
{
emulation_count++;
if ( v->arch.paging.last_write_was_pt )
{
perfc_incr(shadow_em_ex_fail);
TRACE_SHADOW_PATH_FLAG(TRCE_SFLAG_EMULATION_LAST_FAILED);
+
+ if ( emul_ctxt.ctxt.retire.singlestep )
+ {
+ if ( has_hvm_container_domain(d) )
+ hvm_inject_hw_exception(TRAP_debug, X86_EVENT_NO_EC);
+ else
+ pv_inject_hw_exception(TRAP_debug, X86_EVENT_NO_EC);
+ }
+
break; /* Don't emulate again if we failed! */
}
}
struct x86_emulate_state state;
int rc;
uint8_t b, d;
- bool tf = ctxt->regs->eflags & EFLG_TF;
struct operand src = { .reg = PTR_POISON };
struct operand dst = { .reg = PTR_POISON };
enum x86_swint_type swint_type;
if ( !mode_64bit() )
_regs.eip = (uint32_t)_regs.eip;
- *ctxt->regs = _regs;
+ /* Was singestepping active at the start of this instruction? */
+ if ( (rc == X86EMUL_OKAY) && (ctxt->regs->eflags & EFLG_TF) )
+ ctxt->retire.singlestep = true;
- /* Inject #DB if single-step tracing was enabled at instruction start. */
- if ( tf && (rc == X86EMUL_OKAY) && ops->inject_hw_exception )
- rc = ops->inject_hw_exception(EXC_DB, -1, ctxt) ? : X86EMUL_EXCEPTION;
+ *ctxt->regs = _regs;
done:
_put_fpu();
bool hlt:1; /* Instruction HLTed. */
bool mov_ss:1; /* Instruction sets MOV-SS irq shadow. */
bool sti:1; /* Instruction sets STI irq shadow. */
+ bool singlestep:1; /* Singlestepping was active. */
};
} retire;
};
struct x86_emulate_ctxt *ctxt,
const struct x86_emulate_ops *ops)
{
+ unsigned long orig_eip = ctxt->regs->eip;
int rc = x86_emulate(ctxt, ops);
/* Retire flags should only be set for successful instruction emulation. */
if ( rc != X86EMUL_OKAY )
ASSERT(ctxt->retire.raw == 0);
+ /* All cases returning X86EMUL_EXCEPTION should have fault semantics. */
+ if ( rc == X86EMUL_EXCEPTION )
+ ASSERT(ctxt->regs->eip == orig_eip);
+
return rc;
}