]> xenbits.xensource.com Git - xen.git/commitdiff
x86: use optimal NOPs to fill the SMEP/SMAP placeholders
authorJan Beulich <jbeulich@suse.com>
Fri, 13 May 2016 17:13:54 +0000 (18:13 +0100)
committerAndrew Cooper <andrew.cooper3@citrix.com>
Fri, 13 May 2016 17:15:55 +0000 (18:15 +0100)
Alternatives patching code picks the most suitable NOPs for the
running system, so simply use it to replace the pre-populated ones.

Use an arbitrary, always available feature to key off from, but
hide this behind the new X86_FEATURE_ALWAYS.

Signed-off-by: Jan Beulich <jbeulich@suse.com>
Reviewed-by: Andrew Cooper <andrew.cooper3@citrix.com>
Release-acked-by: Wei Liu <wei.liu2@citrix.com>
xen/arch/x86/x86_64/compat/entry.S
xen/include/asm-x86/asm_defns.h
xen/include/asm-x86/cpufeature.h

index 52518c56710297c0188b3df86d00f93193df8fe6..272345507e5e020fc1fbdc4a91bb30dd9df1a10a 100644 (file)
@@ -175,12 +175,7 @@ compat_bad_hypercall:
 ENTRY(compat_restore_all_guest)
         ASSERT_INTERRUPTS_DISABLED
 .Lcr4_orig:
-        ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */
-        ASM_NOP2 /* jpe   .Lcr4_alt_end */
-        ASM_NOP8 /* mov   CPUINFO_cr4...(%rsp), %rax */
-        ASM_NOP6 /* and   $..., %rax */
-        ASM_NOP8 /* mov   %rax, CPUINFO_cr4...(%rsp) */
-        ASM_NOP3 /* mov   %rax, %cr4 */
+        .skip (.Lcr4_alt_end - .Lcr4_alt) - (. - .Lcr4_orig), 0x90
 .Lcr4_orig_end:
         .pushsection .altinstr_replacement, "ax"
 .Lcr4_alt:
@@ -192,6 +187,7 @@ ENTRY(compat_restore_all_guest)
         mov   %rax, %cr4
 .Lcr4_alt_end:
         .section .altinstructions, "a"
+        altinstruction_entry .Lcr4_orig, .Lcr4_orig, X86_FEATURE_ALWAYS, 12, 0
         altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMEP, \
                              (.Lcr4_orig_end - .Lcr4_orig), \
                              (.Lcr4_alt_end - .Lcr4_alt)
index 297bfdbe492c4830c2173c06af9d61794cf26e05..963e6eae91a1ff90a7d103aae71428cc10445cac 100644 (file)
@@ -204,6 +204,7 @@ void ret_from_intr(void);
         662: __ASM_##op;                                               \
         .popsection;                                                   \
         .pushsection .altinstructions, "a";                            \
+        altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0;     \
         altinstruction_entry 661b, 662b, X86_FEATURE_SMAP, 3, 3;       \
         .popsection
 
@@ -215,6 +216,7 @@ void ret_from_intr(void);
         .pushsection .altinstr_replacement, "ax";                  \
         668: call cr4_pv32_restore;                                \
         .section .altinstructions, "a";                            \
+        altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \
         altinstruction_entry 667b, 668b, X86_FEATURE_SMEP, 5, 5;   \
         altinstruction_entry 667b, 668b, X86_FEATURE_SMAP, 5, 5;   \
         .popsection
index 97c7e9e1d19aa09e5e85ad4f9a87cd70f53cb6c3..9c492060c918333aa07078b2793698256de9b502 100644 (file)
@@ -30,6 +30,9 @@
 #define cpufeat_bit(idx)       ((idx) % 32)
 #define cpufeat_mask(idx)      (_AC(1, U) << cpufeat_bit(idx))
 
+/* An alias of a feature we know is always going to be present. */
+#define X86_FEATURE_ALWAYS      X86_FEATURE_LM
+
 #if !defined(__ASSEMBLY__) && !defined(X86_FEATURES_ONLY)
 #include <xen/bitops.h>