return this_cpu(xcr0);
}
+/* Cached xss for fast read */
+static DEFINE_PER_CPU(uint64_t, xss);
+
+void set_msr_xss(u64 xss)
+{
+ u64 *this_xss = &this_cpu(xss);
+
+ if ( *this_xss != xss )
+ {
+ wrmsrl(MSR_IA32_XSS, xss);
+ *this_xss = xss;
+ }
+}
+
+uint64_t get_msr_xss(void)
+{
+ return this_cpu(xss);
+}
+
void xsave(struct vcpu *v, uint64_t mask)
{
struct xsave_struct *ptr = v->arch.xsave_area;
#define MSR_IA32_BNDCFGS 0x00000D90
+#define MSR_IA32_XSS 0x00000da0
+
#define MSR_MTRRfix64K_00000 0x00000250
#define MSR_MTRRfix16K_80000 0x00000258
#define MSR_MTRRfix16K_A0000 0x00000259
#define XCR_XFEATURE_ENABLED_MASK 0x00000000 /* index of XCR0 */
+#define XSAVE_HDR_SIZE 64
+#define XSAVE_SSE_OFFSET 160
#define XSTATE_YMM_SIZE 256
-#define XSTATE_AREA_MIN_SIZE (512 + 64) /* FP/SSE + XSAVE.HEADER */
+#define FXSAVE_SIZE 512
+#define XSAVE_HDR_OFFSET FXSAVE_SIZE
+#define XSTATE_AREA_MIN_SIZE (FXSAVE_SIZE + XSAVE_HDR_SIZE)
#define XSTATE_FP (1ULL << 0)
#define XSTATE_SSE (1ULL << 1)
#define XSTATE_ALL (~(1ULL << 63))
#define XSTATE_NONLAZY (XSTATE_LWP | XSTATE_BNDREGS | XSTATE_BNDCSR)
#define XSTATE_LAZY (XSTATE_ALL & ~XSTATE_NONLAZY)
+#define XSTATE_COMPACTION_ENABLED (1ULL << 63)
extern u64 xfeature_mask;
struct {
u64 xstate_bv;
- u64 reserved[7];
+ u64 xcomp_bv;
+ u64 reserved[6];
} xsave_hdr; /* The 64-byte header */
struct { char x[XSTATE_YMM_SIZE]; } ymm; /* YMM */
/* extended state operations */
bool_t __must_check set_xcr0(u64 xfeatures);
uint64_t get_xcr0(void);
+void set_msr_xss(u64 xss);
+uint64_t get_msr_xss(void);
void xsave(struct vcpu *v, uint64_t mask);
void xrstor(struct vcpu *v, uint64_t mask);
bool_t xsave_enabled(const struct vcpu *v);