return hvm_funcs.do_pmu_interrupt(regs);
}
+static inline bool_t hvm_vcpu_has_smep(void)
+{
+ unsigned int eax, ebx, ecx = 0, dummy;
+
+ hvm_cpuid(0, &eax, &dummy, &dummy, &dummy);
+
+ if ( eax < 7 )
+ return 0;
+
+ hvm_cpuid(7, &dummy, &ebx, &ecx, &dummy);
+ return !!(ebx & cpufeat_mask(X86_FEATURE_SMEP));
+}
+
/* These reserved bits in lower 32 remain 0 after any load of CR0 */
#define HVM_CR0_GUEST_RESERVED_BITS \
(~((unsigned long) \
X86_CR4_DE | X86_CR4_PSE | X86_CR4_PAE | \
X86_CR4_MCE | X86_CR4_PGE | X86_CR4_PCE | \
X86_CR4_OSFXSR | X86_CR4_OSXMMEXCPT | \
- (cpu_has_smep ? X86_CR4_SMEP : 0) | \
+ (hvm_vcpu_has_smep() ? X86_CR4_SMEP : 0) | \
(cpu_has_fsgsbase ? X86_CR4_FSGSBASE : 0) | \
((nestedhvm_enabled((_v)->domain) && cpu_has_vmx)\
? X86_CR4_VMXE : 0) | \