#define vcpu_must_have(leaf, reg, bit) \
generate_exception_if(!vcpu_has(leaf, reg, bit, ctxt, ops), EXC_UD, -1)
+#define vcpu_must_have_fpu() vcpu_must_have(0x00000001, EDX, 0)
#define vcpu_must_have_mmx() vcpu_must_have(0x00000001, EDX, 23)
#define vcpu_must_have_sse() vcpu_must_have(0x00000001, EDX, 25)
#define vcpu_must_have_sse2() vcpu_must_have(0x00000001, EDX, 26)
{
struct fpu_insn_ctxt fic = { .insn_bytes = 1 };
+ host_and_vcpu_must_have(fpu);
get_fpu(X86EMUL_FPU_wait, &fic);
asm volatile ( "fwait" ::: "memory" );
put_fpu(&fic);
}
case 0xd8: /* FPU 0xd8 */
+ host_and_vcpu_must_have(fpu);
switch ( modrm )
{
case 0xc0 ... 0xc7: /* fadd %stN,%stN */
break;
case 0xd9: /* FPU 0xd9 */
+ host_and_vcpu_must_have(fpu);
switch ( modrm )
{
case 0xfb: /* fsincos */
break;
case 0xda: /* FPU 0xda */
+ host_and_vcpu_must_have(fpu);
switch ( modrm )
{
case 0xc0 ... 0xc7: /* fcmovb %stN */
break;
case 0xdb: /* FPU 0xdb */
+ host_and_vcpu_must_have(fpu);
switch ( modrm )
{
case 0xc0 ... 0xc7: /* fcmovnb %stN */
break;
case 0xdc: /* FPU 0xdc */
+ host_and_vcpu_must_have(fpu);
switch ( modrm )
{
case 0xc0 ... 0xc7: /* fadd %stN */
break;
case 0xdd: /* FPU 0xdd */
+ host_and_vcpu_must_have(fpu);
switch ( modrm )
{
case 0xc0 ... 0xc7: /* ffree %stN */
break;
case 0xde: /* FPU 0xde */
+ host_and_vcpu_must_have(fpu);
switch ( modrm )
{
case 0xc0 ... 0xc7: /* faddp %stN */
break;
case 0xdf: /* FPU 0xdf */
+ host_and_vcpu_must_have(fpu);
switch ( modrm )
{
case 0xe0: