hvm_cpuid(1, &cpuid[0], &cpuid[1], &cpuid[2], &cpuid[3]);
mtrr = !!(cpuid[3] & cpufeat_mask(X86_FEATURE_MTRR));
+ hvm_memory_event_msr(msr, msr_content);
+
switch ( msr )
{
case MSR_EFER:
break;
case HVM_PARAM_MEMORY_EVENT_INT3:
case HVM_PARAM_MEMORY_EVENT_SINGLE_STEP:
+ case HVM_PARAM_MEMORY_EVENT_MSR:
if ( d == current->domain )
{
rc = -EPERM;
value, old, 0, 0);
}
+void hvm_memory_event_msr(unsigned long msr, unsigned long value)
+{
+ hvm_memory_event_traps(current->domain->arch.hvm_domain
+ .params[HVM_PARAM_MEMORY_EVENT_MSR],
+ MEM_EVENT_REASON_MSR,
+ value, ~value, 1, msr);
+}
+
int hvm_memory_event_int3(unsigned long gla)
{
uint32_t pfec = PFEC_page_present;
void hvm_memory_event_cr0(unsigned long value, unsigned long old);
void hvm_memory_event_cr3(unsigned long value, unsigned long old);
void hvm_memory_event_cr4(unsigned long value, unsigned long old);
+void hvm_memory_event_msr(unsigned long msr, unsigned long value);
/* Called for current VCPU on int3: returns -1 if no listener */
int hvm_memory_event_int3(unsigned long gla);
#define HVM_PARAM_MEMORY_EVENT_CR4 22
#define HVM_PARAM_MEMORY_EVENT_INT3 23
#define HVM_PARAM_MEMORY_EVENT_SINGLE_STEP 25
+#define HVM_PARAM_MEMORY_EVENT_MSR 30
#define HVMPME_MODE_MASK (3 << 0)
#define HVMPME_mode_disabled 0
#define HVM_PARAM_ACCESS_RING_PFN 28
#define HVM_PARAM_SHARING_RING_PFN 29
-#define HVM_NR_PARAMS 30
+#define HVM_NR_PARAMS 31
#endif /* __XEN_PUBLIC_HVM_PARAMS_H__ */
#define MEM_EVENT_REASON_CR4 4 /* CR4 was hit: gfn is CR4 value */
#define MEM_EVENT_REASON_INT3 5 /* int3 was hit: gla/gfn are RIP */
#define MEM_EVENT_REASON_SINGLESTEP 6 /* single step was invoked: gla/gfn are RIP */
+#define MEM_EVENT_REASON_MSR 7 /* MSR was hit: gfn is MSR value, gla is MSR address;
+ does NOT honour HVMPME_onchangeonly */
typedef struct mem_event_st {
uint32_t flags;