]> xenbits.xensource.com Git - xen.git/commitdiff
x86: use optimal NOPs to fill the SMEP/SMAP placeholders
authorJan Beulich <jbeulich@suse.com>
Fri, 27 May 2016 12:47:08 +0000 (14:47 +0200)
committerJan Beulich <jbeulich@suse.com>
Fri, 27 May 2016 12:47:08 +0000 (14:47 +0200)
Alternatives patching code picks the most suitable NOPs for the
running system, so simply use it to replace the pre-populated ones.

Use an arbitrary, always available feature to key off from, but
hide this behind the new X86_FEATURE_ALWAYS.

Signed-off-by: Jan Beulich <jbeulich@suse.com>
Reviewed-by: Andrew Cooper <andrew.cooper3@citrix.com>
x86/compat: correct SMEP/SMAP NOPs patching

Correct the number of single byte NOPs we want to be replaced in case
neither SMEP nor SMAP are available.

Also simplify the expression adding these NOPs - at that location .
equals .Lcr4_orig, and removing that part of the expression fixes a
bogus ".space or fill with negative value, ignored" warning by very old
gas (which actually is what made me look at those constructs again).

Signed-off-by: Jan Beulich <jbeulich@suse.com>
Reviewed-by: Wei Liu <wei.liu2@citrix.com>
Reviewed-by: Andrew Cooper <andrew.cooper3@citrix.com>
master commit: 01a0bd0a7d72be638a359db3f8cf551123467d29
master date: 2016-05-13 18:15:55 +0100
master commit: f5610009529628314c9d1d52b00715fe855fcf06
master date: 2016-05-26 17:26:24 +0100

xen/arch/x86/x86_64/compat/entry.S
xen/include/asm-x86/asm_defns.h
xen/include/asm-x86/cpufeature.h

index 2fe56e2b289f7fe0f74688f130864df69898fdd6..94bc3a3c75c1be7f73229fb5ab0066fb647eb412 100644 (file)
@@ -175,14 +175,7 @@ compat_bad_hypercall:
 ENTRY(compat_restore_all_guest)
         ASSERT_INTERRUPTS_DISABLED
 .Lcr4_orig:
-        ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */
-        ASM_NOP2 /* jpe   .Lcr4_alt_end */
-        ASM_NOP8 /* mov   CPUINFO_cr4...(%rsp), %rax */
-        ASM_NOP6 /* and   $..., %rax */
-        ASM_NOP8 /* mov   %rax, CPUINFO_cr4...(%rsp) */
-        ASM_NOP3 /* mov   %rax, %cr4 */
-        ASM_NOP8 /* cmp   %rax, CPUINFO_cr4...(%rsp) */
-        ASM_NOP2 /* jne   1b */
+        .skip .Lcr4_alt_end - .Lcr4_alt, 0x90
 .Lcr4_orig_end:
         .pushsection .altinstr_replacement, "ax"
 .Lcr4_alt:
@@ -207,6 +200,8 @@ ENTRY(compat_restore_all_guest)
         jne   1b
 .Lcr4_alt_end:
         .section .altinstructions, "a"
+        altinstruction_entry .Lcr4_orig, .Lcr4_orig, X86_FEATURE_ALWAYS, \
+                             (.Lcr4_orig_end - .Lcr4_orig), 0
         altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMEP, \
                              (.Lcr4_orig_end - .Lcr4_orig), \
                              (.Lcr4_alt_end - .Lcr4_alt)
index 41ffeffe8039a6f8ab360f6c2dc30d28e9e6a49a..d59d46ad35d71f19c71a7df52a16ebe8c49795ab 100644 (file)
@@ -173,6 +173,7 @@ void ret_from_intr(void);
         662: __ASM_##op;                                               \
         .popsection;                                                   \
         .pushsection .altinstructions, "a";                            \
+        altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0;     \
         altinstruction_entry 661b, 662b, X86_FEATURE_SMAP, 3, 3;       \
         .popsection
 
@@ -184,6 +185,7 @@ void ret_from_intr(void);
         .pushsection .altinstr_replacement, "ax";                  \
         668: call cr4_pv32_restore;                                \
         .section .altinstructions, "a";                            \
+        altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \
         altinstruction_entry 667b, 668b, X86_FEATURE_SMEP, 5, 5;   \
         altinstruction_entry 667b, 668b, X86_FEATURE_SMAP, 5, 5;   \
         .popsection
index 4caa4e6b3419f27f8119def257c52976e18fd793..e861bcf0ad0ca82aad2bca28ef2a5e208a59b499 100644 (file)
 #define X86_FEATURE_ADX                (7*32+19) /* ADCX, ADOX instructions */
 #define X86_FEATURE_SMAP       (7*32+20) /* Supervisor Mode Access Prevention */
 
+/* An alias of a feature we know is always going to be present. */
+#define X86_FEATURE_ALWAYS      X86_FEATURE_LM
+
 #ifndef __ASSEMBLY__
 #define cpu_has(c, bit)                test_bit(bit, (c)->x86_capability)
 #define boot_cpu_has(bit)      test_bit(bit, boot_cpu_data.x86_capability)