};
int rc;
- stack_exec = emul_test_make_stack_executable();
+ stack_exec = emul_test_init();
if ( !stack_exec )
{
printf("Warning: Stack could not be made executable (%d).\n", errno);
}
instr = (char *)res + 0x100;
- stack_exec = emul_test_make_stack_executable();
+ stack_exec = emul_test_init();
if ( !stack_exec )
printf("Warning: Stack could not be made executable (%d).\n", errno);
else
printf("skipped\n");
+ printf("%-40s", "Testing stmxcsr (%edx)...");
+ if ( cpu_has_sse )
+ {
+ decl_insn(stmxcsr);
+
+ asm volatile ( put_insn(stmxcsr, "stmxcsr (%0)") :: "d" (NULL) );
+
+ res[0] = 0x12345678;
+ res[1] = 0x87654321;
+ asm ( "stmxcsr %0" : "=m" (res[2]) );
+ set_insn(stmxcsr);
+ regs.edx = (unsigned long)res;
+ rc = x86_emulate(&ctxt, &emulops);
+ if ( rc != X86EMUL_OKAY || !check_eip(stmxcsr) ||
+ res[0] != res[2] || res[1] != 0x87654321 )
+ goto fail;
+ printf("okay\n");
+ }
+ else
+ printf("skipped\n");
+
+ printf("%-40s", "Testing ldmxcsr 4(%ecx)...");
+ if ( cpu_has_sse )
+ {
+ decl_insn(ldmxcsr);
+
+ asm volatile ( put_insn(ldmxcsr, "ldmxcsr 4(%0)") :: "c" (NULL) );
+
+ set_insn(ldmxcsr);
+ res[1] = mxcsr_mask;
+ regs.ecx = (unsigned long)res;
+ rc = x86_emulate(&ctxt, &emulops);
+ asm ( "stmxcsr %0; ldmxcsr %1" : "=m" (res[0]) : "m" (res[2]) );
+ if ( rc != X86EMUL_OKAY || !check_eip(ldmxcsr) ||
+ res[0] != mxcsr_mask )
+ goto fail;
+ printf("okay\n");
+ }
+ else
+ printf("skipped\n");
+
+ printf("%-40s", "Testing vstmxcsr (%ecx)...");
+ if ( cpu_has_avx )
+ {
+ decl_insn(vstmxcsr);
+
+ asm volatile ( put_insn(vstmxcsr, "vstmxcsr (%0)") :: "c" (NULL) );
+
+ res[0] = 0x12345678;
+ res[1] = 0x87654321;
+ set_insn(vstmxcsr);
+ regs.ecx = (unsigned long)res;
+ rc = x86_emulate(&ctxt, &emulops);
+ if ( rc != X86EMUL_OKAY || !check_eip(vstmxcsr) ||
+ res[0] != res[2] || res[1] != 0x87654321 )
+ goto fail;
+ printf("okay\n");
+ }
+ else
+ printf("skipped\n");
+
+ printf("%-40s", "Testing vldmxcsr 4(%edx)...");
+ if ( cpu_has_avx )
+ {
+ decl_insn(vldmxcsr);
+
+ asm volatile ( put_insn(vldmxcsr, "vldmxcsr 4(%0)") :: "d" (NULL) );
+
+ set_insn(vldmxcsr);
+ res[1] = mxcsr_mask;
+ regs.edx = (unsigned long)res;
+ rc = x86_emulate(&ctxt, &emulops);
+ asm ( "stmxcsr %0; ldmxcsr %1" : "=m" (res[0]) : "m" (res[2]) );
+ if ( rc != X86EMUL_OKAY || !check_eip(vldmxcsr) ||
+ res[0] != mxcsr_mask )
+ goto fail;
+ printf("okay\n");
+ }
+ else
+ printf("skipped\n");
+
#undef decl_insn
#undef put_insn
#undef set_insn
})
#define put_stub(stb) ((stb).addr = 0)
-bool emul_test_make_stack_executable(void)
+uint32_t mxcsr_mask = 0x0000ffbf;
+
+bool emul_test_init(void)
{
unsigned long sp;
+ if ( cpu_has_fxsr )
+ {
+ static union __attribute__((__aligned__(16))) {
+ char x[464];
+ struct {
+ uint32_t other[6];
+ uint32_t mxcsr;
+ uint32_t mxcsr_mask;
+ /* ... */
+ };
+ } fxs;
+
+ asm ( "fxsave %0" : "=m" (fxs) );
+ if ( fxs.mxcsr_mask )
+ mxcsr_mask = fxs.mxcsr_mask;
+ }
+
/*
* Mark the entire stack executable so that the stub executions
* don't fault
#define is_canonical_address(x) (((int64_t)(x) >> 47) == ((int64_t)(x) >> 63))
+extern uint32_t mxcsr_mask;
+
#define MMAP_SZ 16384
-bool emul_test_make_stack_executable(void);
+bool emul_test_init(void);
#include "x86_emulate/x86_emulate.h"
(res.d & (1U << 23)) != 0; \
})
+#define cpu_has_fxsr ({ \
+ struct cpuid_leaf res; \
+ emul_test_cpuid(1, 0, &res, NULL); \
+ (res.d & (1U << 24)) != 0; \
+})
+
#define cpu_has_sse ({ \
struct cpuid_leaf res; \
emul_test_cpuid(1, 0, &res, NULL); \
case 0x50 ... 0x77:
case 0x79 ... 0x7d:
case 0x7f:
- case 0xae:
case 0xc2 ... 0xc3:
case 0xc5 ... 0xc6:
case 0xd0 ... 0xfe:
}
break;
+ case 0xae:
+ ctxt->opcode |= MASK_INSR(vex.pfx, X86EMUL_OPC_PFX_MASK);
+ /* fall through */
+ case X86EMUL_OPC_VEX(0, 0xae):
+ switch ( modrm_reg & 7 )
+ {
+ case 2: /* {,v}ldmxcsr */
+ state->desc = DstImplicit | SrcMem | ModRM | Mov;
+ op_bytes = 4;
+ break;
+
+ case 3: /* {,v}stmxcsr */
+ state->desc = DstMem | SrcImplicit | ModRM | Mov;
+ op_bytes = 4;
+ break;
+ }
+ break;
+
case 0xb8: /* jmpe / popcnt */
if ( rep_prefix() )
ctxt->opcode |= MASK_INSR(vex.pfx, X86EMUL_OPC_PFX_MASK);
case X86EMUL_OPC(0x0f, 0xae): case X86EMUL_OPC_66(0x0f, 0xae): /* Grp15 */
switch ( modrm_reg & 7 )
{
+ case 2: /* ldmxcsr */
+ generate_exception_if(vex.pfx, EXC_UD);
+ vcpu_must_have(sse);
+ ldmxcsr:
+ generate_exception_if(src.type != OP_MEM, EXC_UD);
+ generate_exception_if(src.val & ~mxcsr_mask, EXC_GP, 0);
+ asm volatile ( "ldmxcsr %0" :: "m" (src.val) );
+ break;
+
+ case 3: /* stmxcsr */
+ generate_exception_if(vex.pfx, EXC_UD);
+ vcpu_must_have(sse);
+ stmxcsr:
+ generate_exception_if(dst.type != OP_MEM, EXC_UD);
+ asm volatile ( "stmxcsr %0" : "=m" (dst.val) );
+ break;
+
case 5: /* lfence */
fail_if(modrm_mod != 3);
generate_exception_if(vex.pfx, EXC_UD);
}
break;
+ case X86EMUL_OPC_VEX(0x0f, 0xae): /* Grp15 */
+ switch ( modrm_reg & 7 )
+ {
+ case 2: /* vldmxcsr */
+ generate_exception_if(vex.l || vex.reg != 0xf, EXC_UD);
+ vcpu_must_have(avx);
+ goto ldmxcsr;
+ case 3: /* vstmxcsr */
+ generate_exception_if(vex.l || vex.reg != 0xf, EXC_UD);
+ vcpu_must_have(avx);
+ goto stmxcsr;
+ }
+ goto cannot_emulate;
+
case X86EMUL_OPC_F3(0x0f, 0xae): /* Grp15 */
fail_if(modrm_mod != 3);
generate_exception_if((modrm_reg & 4) || !mode_64bit(), EXC_UD);
u64 __read_mostly xstate_align;
static unsigned int __read_mostly xstate_features;
-static uint32_t __read_mostly mxcsr_mask = 0x0000ffbf;
+uint32_t __read_mostly mxcsr_mask = 0x0000ffbf;
/* Cached xcr0 for fast read */
static DEFINE_PER_CPU(uint64_t, xcr0);
#define FCW_RESET 0x0040
#define MXCSR_DEFAULT 0x1f80
+extern uint32_t mxcsr_mask;
+
#define XSTATE_CPUID 0x0000000d
#define XCR_XFEATURE_ENABLED_MASK 0x00000000 /* index of XCR0 */