ia64/xen-unstable

changeset 18413:10e0e90831f0

amd: Extended migration support

This patch adds support for AMD's extended migration, aka CPUID
features and extended features masking.

Signed-off-by: Travis Betak <travis.betak@amd.com>
Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Fri Aug 29 11:13:41 2008 +0100 (2008-08-29)
parents 8623a537aff1
children 1721689cc834
files xen/arch/x86/cpu/amd.c xen/arch/x86/cpu/amd.h xen/include/asm-x86/msr-index.h
line diff
     1.1 --- a/xen/arch/x86/cpu/amd.c	Fri Aug 29 10:46:46 2008 +0100
     1.2 +++ b/xen/arch/x86/cpu/amd.c	Fri Aug 29 11:13:41 2008 +0100
     1.3 @@ -10,10 +10,144 @@
     1.4  #include <asm/hvm/support.h>
     1.5  
     1.6  #include "cpu.h"
     1.7 +#include "amd.h"
     1.8  
     1.9  int start_svm(struct cpuinfo_x86 *c);
    1.10  
    1.11  /*
    1.12 + * Pre-canned values for overriding the CPUID features 
    1.13 + * and extended features masks.
    1.14 + *
    1.15 + * Currently supported processors:
    1.16 + * 
    1.17 + * "fam_0f_rev_c"
    1.18 + * "fam_0f_rev_d"
    1.19 + * "fam_0f_rev_e"
    1.20 + * "fam_0f_rev_f"
    1.21 + * "fam_0f_rev_g"
    1.22 + * "fam_10_rev_b"
    1.23 + * "fam_10_rev_c"
    1.24 + * "fam_11_rev_b"
    1.25 + */
    1.26 +static char opt_famrev[14];
    1.27 +string_param("cpuid_mask_cpu", opt_famrev);
    1.28 +
    1.29 +/* Finer-grained CPUID feature control. */
    1.30 +static unsigned int opt_cpuid_mask_ecx, opt_cpuid_mask_edx;
    1.31 +integer_param("cpuid_mask_ecx", opt_cpuid_mask_ecx);
    1.32 +integer_param("cpuid_mask_edx", opt_cpuid_mask_edx);
    1.33 +static unsigned int opt_cpuid_mask_ext_ecx, opt_cpuid_mask_ext_edx;
    1.34 +integer_param("cpuid_mask_ecx", opt_cpuid_mask_ext_ecx);
    1.35 +integer_param("cpuid_mask_edx", opt_cpuid_mask_ext_edx);
    1.36 +
    1.37 +static inline void wrmsr_amd(unsigned int index, unsigned int lo, 
    1.38 +		unsigned int hi)
    1.39 +{
    1.40 +	asm volatile (
    1.41 +		"wrmsr"
    1.42 +		: /* No outputs */
    1.43 +		: "c" (index), "a" (lo), 
    1.44 +		"d" (hi), "D" (0x9c5a203a)
    1.45 +	);
    1.46 +}
    1.47 +
    1.48 +/*
    1.49 + * Mask the features and extended features returned by CPUID.  Parameters are
    1.50 + * set from the boot line via two methods:
    1.51 + *
    1.52 + *   1) Specific processor revision string
    1.53 + *   2) User-defined masks
    1.54 + *
    1.55 + * The processor revision string parameter has precedene.
    1.56 + */
    1.57 +static void __devinit set_cpuidmask(struct cpuinfo_x86 *c)
    1.58 +{
    1.59 +	static unsigned int feat_ecx, feat_edx;
    1.60 +	static unsigned int extfeat_ecx, extfeat_edx;
    1.61 +	static enum { not_parsed, no_mask, set_mask } status;
    1.62 +
    1.63 +	if (status == no_mask)
    1.64 +		return;
    1.65 +
    1.66 +	if (status == set_mask)
    1.67 +		goto setmask;
    1.68 +
    1.69 +	ASSERT((status == not_parsed) && (smp_processor_id() == 0));
    1.70 +	status = no_mask;
    1.71 +
    1.72 +	if (opt_cpuid_mask_ecx | opt_cpuid_mask_edx |
    1.73 +	    opt_cpuid_mask_ext_ecx | opt_cpuid_mask_ext_edx) {
    1.74 +		feat_ecx = opt_cpuid_mask_ecx ? : ~0U;
    1.75 +		feat_edx = opt_cpuid_mask_edx ? : ~0U;
    1.76 +		extfeat_ecx = opt_cpuid_mask_ext_ecx ? : ~0U;
    1.77 +		extfeat_edx = opt_cpuid_mask_ext_edx ? : ~0U;
    1.78 +	} else if (*opt_famrev == '\0') {
    1.79 +		return;
    1.80 +	} else if (!strcmp(opt_famrev, "fam_0f_rev_c")) {
    1.81 +		feat_ecx = AMD_FEATURES_K8_REV_C_ECX;
    1.82 +		feat_edx = AMD_FEATURES_K8_REV_C_EDX;
    1.83 +		extfeat_ecx = AMD_EXTFEATURES_K8_REV_C_ECX;
    1.84 +		extfeat_edx = AMD_EXTFEATURES_K8_REV_C_EDX;
    1.85 +	} else if (!strcmp(opt_famrev, "fam_0f_rev_d")) {
    1.86 +		feat_ecx = AMD_FEATURES_K8_REV_D_ECX;
    1.87 +		feat_edx = AMD_FEATURES_K8_REV_D_EDX;
    1.88 +		extfeat_ecx = AMD_EXTFEATURES_K8_REV_D_ECX;
    1.89 +		extfeat_edx = AMD_EXTFEATURES_K8_REV_D_EDX;
    1.90 +	} else if (!strcmp(opt_famrev, "fam_0f_rev_e")) {
    1.91 +		feat_ecx = AMD_FEATURES_K8_REV_E_ECX;
    1.92 +		feat_edx = AMD_FEATURES_K8_REV_E_EDX;
    1.93 +		extfeat_ecx = AMD_EXTFEATURES_K8_REV_E_ECX;
    1.94 +		extfeat_edx = AMD_EXTFEATURES_K8_REV_E_EDX;
    1.95 +	} else if (!strcmp(opt_famrev, "fam_0f_rev_f")) {
    1.96 +		feat_ecx = AMD_FEATURES_K8_REV_F_ECX;
    1.97 +		feat_edx = AMD_FEATURES_K8_REV_F_EDX;
    1.98 +		extfeat_ecx = AMD_EXTFEATURES_K8_REV_F_ECX;
    1.99 +		extfeat_edx = AMD_EXTFEATURES_K8_REV_F_EDX;
   1.100 +	} else if (!strcmp(opt_famrev, "fam_0f_rev_g")) {
   1.101 +		feat_ecx = AMD_FEATURES_K8_REV_G_ECX;
   1.102 +		feat_edx = AMD_FEATURES_K8_REV_G_EDX;
   1.103 +		extfeat_ecx = AMD_EXTFEATURES_K8_REV_G_ECX;
   1.104 +		extfeat_edx = AMD_EXTFEATURES_K8_REV_G_EDX;
   1.105 +	} else if (!strcmp(opt_famrev, "fam_10_rev_b")) {
   1.106 +		feat_ecx = AMD_FEATURES_FAM10h_REV_B_ECX;
   1.107 +		feat_edx = AMD_FEATURES_FAM10h_REV_B_EDX;
   1.108 +		extfeat_ecx = AMD_EXTFEATURES_FAM10h_REV_B_ECX;
   1.109 +		extfeat_edx = AMD_EXTFEATURES_FAM10h_REV_B_EDX;
   1.110 +	} else if (!strcmp(opt_famrev, "fam_10_rev_c")) {
   1.111 +		feat_ecx = AMD_FEATURES_FAM10h_REV_C_ECX;
   1.112 +		feat_edx = AMD_FEATURES_FAM10h_REV_C_EDX;
   1.113 +		extfeat_ecx = AMD_EXTFEATURES_FAM10h_REV_C_ECX;
   1.114 +		extfeat_edx = AMD_EXTFEATURES_FAM10h_REV_C_EDX;
   1.115 +	} else if (!strcmp(opt_famrev, "fam_11_rev_b")) {
   1.116 +		feat_ecx = AMD_FEATURES_FAM11h_REV_B_ECX;
   1.117 +		feat_edx = AMD_FEATURES_FAM11h_REV_B_EDX;
   1.118 +		extfeat_ecx = AMD_EXTFEATURES_FAM11h_REV_B_ECX;
   1.119 +		extfeat_edx = AMD_EXTFEATURES_FAM11h_REV_B_EDX;
   1.120 +	} else {
   1.121 +		printk("Invalid processor string: %s\n", opt_famrev);
   1.122 +		printk("CPUID will not be masked\n");
   1.123 +		return;
   1.124 +	}
   1.125 +
   1.126 +	status = set_mask;
   1.127 +	printk("Writing CPUID feature mask ECX:EDX -> %08Xh:%08Xh\n", 
   1.128 +	       feat_ecx, feat_edx);
   1.129 +	printk("Writing CPUID extended feature mask ECX:EDX -> %08Xh:%08Xh\n", 
   1.130 +	       extfeat_ecx, extfeat_edx);
   1.131 +
   1.132 + setmask:
   1.133 +	/* FIXME check if processor supports CPUID masking */
   1.134 +	/* AMD processors prior to family 10h required a 32-bit password */
   1.135 +	if (c->x86 >= 0x10) {
   1.136 +		wrmsr(MSR_K8_FEATURE_MASK, feat_edx, feat_ecx);
   1.137 +		wrmsr(MSR_K8_EXT_FEATURE_MASK, extfeat_edx, extfeat_ecx);
   1.138 +	} else if (c->x86 == 0x0f) {
   1.139 +		wrmsr_amd(MSR_K8_FEATURE_MASK, feat_edx, feat_ecx);
   1.140 +		wrmsr_amd(MSR_K8_EXT_FEATURE_MASK, extfeat_edx, extfeat_ecx);
   1.141 +	}
   1.142 +}
   1.143 +
   1.144 +/*
   1.145   * amd_flush_filter={on,off}. Forcibly Enable or disable the TLB flush
   1.146   * filter on AMD 64-bit processors.
   1.147   */
   1.148 @@ -115,7 +249,7 @@ static void check_disable_c1e(unsigned i
   1.149  		on_each_cpu(disable_c1e, NULL, 1, 1);
   1.150  }
   1.151  
   1.152 -static void __init init_amd(struct cpuinfo_x86 *c)
   1.153 +static void __devinit init_amd(struct cpuinfo_x86 *c)
   1.154  {
   1.155  	u32 l, h;
   1.156  	int mbytes = num_physpages >> (20-PAGE_SHIFT);
   1.157 @@ -368,6 +502,8 @@ static void __init init_amd(struct cpuin
   1.158  	if ((smp_processor_id() == 1) && c1_ramping_may_cause_clock_drift(c))
   1.159  		disable_c1_ramping();
   1.160  
   1.161 +	set_cpuidmask(c);
   1.162 +
   1.163  	start_svm(c);
   1.164  }
   1.165  
     2.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     2.2 +++ b/xen/arch/x86/cpu/amd.h	Fri Aug 29 11:13:41 2008 +0100
     2.3 @@ -0,0 +1,103 @@
     2.4 +/*
     2.5 + * amd.h - AMD processor specific definitions
     2.6 + */
     2.7 +
     2.8 +#ifndef __AMD_H__
     2.9 +#define __AMD_H__
    2.10 +
    2.11 +#include <asm/cpufeature.h>
    2.12 +
    2.13 +/* CPUID masked for use by AMD-V Extended Migration */
    2.14 +
    2.15 +#define X86_FEATURE_BITPOS(_feature_) ((_feature_) % 32)
    2.16 +#define __bit(_x_) (1U << X86_FEATURE_BITPOS(_x_))
    2.17 +
    2.18 +/* Family 0Fh, Revision C */
    2.19 +#define AMD_FEATURES_K8_REV_C_ECX  0
    2.20 +#define AMD_FEATURES_K8_REV_C_EDX (					\
    2.21 +	__bit(X86_FEATURE_FPU)      | __bit(X86_FEATURE_VME)   |	\
    2.22 +	__bit(X86_FEATURE_DE)       | __bit(X86_FEATURE_PSE)   |	\
    2.23 +	__bit(X86_FEATURE_TSC)      | __bit(X86_FEATURE_MSR)   |	\
    2.24 +	__bit(X86_FEATURE_PAE)      | __bit(X86_FEATURE_MCE)   |	\
    2.25 +	__bit(X86_FEATURE_CX8)      | __bit(X86_FEATURE_APIC)  |	\
    2.26 +	__bit(X86_FEATURE_SEP)      | __bit(X86_FEATURE_MTRR)  |	\
    2.27 +	__bit(X86_FEATURE_PGE)      | __bit(X86_FEATURE_MCA)   | 	\
    2.28 +	__bit(X86_FEATURE_CMOV)     | __bit(X86_FEATURE_PAT)   |	\
    2.29 +	__bit(X86_FEATURE_PSE36)    | __bit(X86_FEATURE_CLFLSH)|	\
    2.30 +	__bit(X86_FEATURE_MMX)      | __bit(X86_FEATURE_FXSR)  | 	\
    2.31 +	__bit(X86_FEATURE_XMM)      | __bit(X86_FEATURE_XMM2))
    2.32 +#define AMD_EXTFEATURES_K8_REV_C_ECX  0 
    2.33 +#define AMD_EXTFEATURES_K8_REV_C_EDX  (					\
    2.34 +	__bit(X86_FEATURE_FPU)      | __bit(X86_FEATURE_VME)   |	\
    2.35 +	__bit(X86_FEATURE_DE)       | __bit(X86_FEATURE_PSE)   |	\
    2.36 +	__bit(X86_FEATURE_TSC)      | __bit(X86_FEATURE_MSR)   |	\
    2.37 +	__bit(X86_FEATURE_PAE)      | __bit(X86_FEATURE_MCE)   |	\
    2.38 +	__bit(X86_FEATURE_CX8)      | __bit(X86_FEATURE_APIC)  |	\
    2.39 +	__bit(X86_FEATURE_SYSCALL)  | __bit(X86_FEATURE_MTRR)  |	\
    2.40 +	__bit(X86_FEATURE_PGE)      | __bit(X86_FEATURE_MCA)   |	\
    2.41 +	__bit(X86_FEATURE_CMOV)     | __bit(X86_FEATURE_PAT)   |	\
    2.42 +	__bit(X86_FEATURE_PSE36)    | __bit(X86_FEATURE_NX)    |	\
    2.43 +	__bit(X86_FEATURE_MMXEXT)   | __bit(X86_FEATURE_MMX)   |	\
    2.44 +	__bit(X86_FEATURE_FXSR)     | __bit(X86_FEATURE_LM)    |	\
    2.45 +	__bit(X86_FEATURE_3DNOWEXT) | __bit(X86_FEATURE_3DNOW))
    2.46 +
    2.47 +/* Family 0Fh, Revision D */
    2.48 +#define AMD_FEATURES_K8_REV_D_ECX         AMD_FEATURES_K8_REV_C_ECX
    2.49 +#define AMD_FEATURES_K8_REV_D_EDX         AMD_FEATURES_K8_REV_C_EDX
    2.50 +#define AMD_EXTFEATURES_K8_REV_D_ECX     (AMD_EXTFEATURES_K8_REV_C_ECX |\
    2.51 +	__bit(X86_FEATURE_LAHF_LM))
    2.52 +#define AMD_EXTFEATURES_K8_REV_D_EDX     (AMD_EXTFEATURES_K8_REV_C_EDX |\
    2.53 +	__bit(X86_FEATURE_FFXSR))
    2.54 +
    2.55 +/* Family 0Fh, Revision E */
    2.56 +#define AMD_FEATURES_K8_REV_E_ECX        (AMD_FEATURES_K8_REV_D_ECX |	\
    2.57 +	__bit(X86_FEATURE_XMM3))
    2.58 +#define AMD_FEATURES_K8_REV_E_EDX        (AMD_FEATURES_K8_REV_D_EDX | 	\
    2.59 +	__bit(X86_FEATURE_HT))
    2.60 +#define AMD_EXTFEATURES_K8_REV_E_ECX     (AMD_EXTFEATURES_K8_REV_D_ECX |\
    2.61 +	__bit(X86_FEATURE_CMP_LEGACY)) 
    2.62 +#define AMD_EXTFEATURES_K8_REV_E_EDX      AMD_EXTFEATURES_K8_REV_D_EDX
    2.63 +
    2.64 +/* Family 0Fh, Revision F */
    2.65 +#define AMD_FEATURES_K8_REV_F_ECX        (AMD_FEATURES_K8_REV_E_ECX | 	\
    2.66 +	__bit(X86_FEATURE_CX16))
    2.67 +#define AMD_FEATURES_K8_REV_F_EDX         AMD_FEATURES_K8_REV_E_EDX
    2.68 +#define AMD_EXTFEATURES_K8_REV_F_ECX     (AMD_EXTFEATURES_K8_REV_E_ECX |\
    2.69 +	__bit(X86_FEATURE_SVME) | __bit(X86_FEATURE_EXTAPICSPACE) |	\
    2.70 +	__bit(X86_FEATURE_ALTMOVCR))
    2.71 +#define AMD_EXTFEATURES_K8_REV_F_EDX     (AMD_EXTFEATURES_K8_REV_E_EDX |\
    2.72 +	__bit(X86_FEATURE_RDTSCP))
    2.73 +
    2.74 +/* Family 0Fh, Revision G */
    2.75 +#define AMD_FEATURES_K8_REV_G_ECX         AMD_FEATURES_K8_REV_F_ECX
    2.76 +#define AMD_FEATURES_K8_REV_G_EDX         AMD_FEATURES_K8_REV_F_EDX
    2.77 +#define AMD_EXTFEATURES_K8_REV_G_ECX     (AMD_EXTFEATURES_K8_REV_F_ECX |\
    2.78 +	__bit(X86_FEATURE_3DNOWPF))
    2.79 +#define AMD_EXTFEATURES_K8_REV_G_EDX      AMD_EXTFEATURES_K8_REV_F_EDX
    2.80 +
    2.81 +/* Family 10h, Revision B */
    2.82 +#define AMD_FEATURES_FAM10h_REV_B_ECX    (AMD_FEATURES_K8_REV_F_ECX | 	\
    2.83 +	__bit(X86_FEATURE_POPCNT) | __bit(X86_FEATURE_MWAIT))
    2.84 +#define AMD_FEATURES_FAM10h_REV_B_EDX     AMD_FEATURES_K8_REV_F_EDX
    2.85 +#define AMD_EXTFEATURES_FAM10h_REV_B_ECX (AMD_EXTFEATURES_K8_REV_F_ECX |\
    2.86 +	__bit(X86_FEATURE_ABM) | __bit(X86_FEATURE_SSE4A) | 		\
    2.87 +	__bit(X86_FEATURE_MISALIGNSSE) | __bit(X86_FEATURE_OSVW) | 	\
    2.88 +	__bit(X86_FEATURE_IBS))
    2.89 +#define AMD_EXTFEATURES_FAM10h_REV_B_EDX (AMD_EXTFEATURES_K8_REV_F_EDX |\
    2.90 +	__bit(X86_FEATURE_PAGE1GB))
    2.91 +
    2.92 +/* Family 10h, Revision C */
    2.93 +#define AMD_FEATURES_FAM10h_REV_C_ECX     AMD_FEATURES_FAM10h_REV_B_ECX
    2.94 +#define AMD_FEATURES_FAM10h_REV_C_EDX     AMD_FEATURES_FAM10h_REV_B_EDX
    2.95 +#define AMD_EXTFEATURES_FAM10h_REV_C_ECX (AMD_EXTFEATURES_FAM10h_REV_B_ECX |\
    2.96 +	__bit(X86_FEATURE_SKINIT) | __bit(X86_FEATURE_WDT))
    2.97 +#define AMD_EXTFEATURES_FAM10h_REV_C_EDX  AMD_EXTFEATURES_FAM10h_REV_B_EDX
    2.98 +
    2.99 +/* Family 11h, Revision B */
   2.100 +#define AMD_FEATURES_FAM11h_REV_B_ECX     AMD_FEATURES_K8_REV_G_ECX
   2.101 +#define AMD_FEATURES_FAM11h_REV_B_EDX     AMD_FEATURES_K8_REV_G_EDX
   2.102 +#define AMD_EXTFEATURES_FAM11h_REV_B_ECX (AMD_EXTFEATURES_K8_REV_G_ECX |\
   2.103 +	__bit(X86_FEATURE_SKINIT))
   2.104 +#define AMD_EXTFEATURES_FAM11h_REV_B_EDX  AMD_EXTFEATURES_K8_REV_G_EDX
   2.105 +
   2.106 +#endif /* __AMD_H__ */
     3.1 --- a/xen/include/asm-x86/msr-index.h	Fri Aug 29 10:46:46 2008 +0100
     3.2 +++ b/xen/include/asm-x86/msr-index.h	Fri Aug 29 11:13:41 2008 +0100
     3.3 @@ -187,6 +187,9 @@
     3.4  #define MSR_K8_VM_CR			0xc0010114
     3.5  #define MSR_K8_VM_HSAVE_PA		0xc0010117
     3.6  
     3.7 +#define MSR_K8_FEATURE_MASK		0xc0011004
     3.8 +#define MSR_K8_EXT_FEATURE_MASK		0xc0011005
     3.9 +
    3.10  /* MSR_K8_VM_CR bits: */
    3.11  #define _K8_VMCR_SVME_DISABLE		4
    3.12  #define K8_VMCR_SVME_DISABLE		(1 << _K8_VMCR_SVME_DISABLE)