]> xenbits.xensource.com Git - people/dwmw2/xen.git/commitdiff
x86/alternatives: fully leverage automatic NOP filling
authorJan Beulich <jbeulich@suse.com>
Wed, 29 Aug 2018 14:30:54 +0000 (16:30 +0200)
committerJan Beulich <jbeulich@suse.com>
Wed, 29 Aug 2018 14:30:54 +0000 (16:30 +0200)
As of commit 4008c71d7a ("x86/alt: Support for automatic padding
calculations") there's no point having explict ASM_NOPn instances in
alternatives anymore - drop them. As a result also drop the asm/nops.h
inclusion from alternative.h, adding explicit inclusions in the two
remaining C files needing them.

While touching it also move the CR4_PV32_RESTORE definition out of the
SMAP-specific conditional into a more general one.

Signed-off-by: Jan Beulich <jbeulich@suse.com>
Acked-by: Andrew Cooper <andrew.cooper3@citrix.com>
xen/arch/x86/alternative.c
xen/arch/x86/flushtlb.c
xen/include/asm-x86/asm_defns.h
xen/include/asm-x86/spec_ctrl.h

index 1ae49239d8afe2c025f4e034bd29c466aaa9099e..aec4d8db914b7b26adaa2d9aec2886060533214f 100644 (file)
@@ -24,6 +24,7 @@
 #include <asm/system.h>
 #include <asm/traps.h>
 #include <asm/nmi.h>
+#include <asm/nops.h>
 #include <xen/livepatch.h>
 
 #define MAX_PATCH_LEN (255-1)
index 8f04fc08d5ea861c7738fa095bf28c0df4cb5d79..7d79d9048d9024de38cfcb248ca17ced6ac36ce9 100644 (file)
@@ -12,6 +12,7 @@
 #include <xen/softirq.h>
 #include <asm/flushtlb.h>
 #include <asm/invpcid.h>
+#include <asm/nops.h>
 #include <asm/page.h>
 #include <asm/pv/domain.h>
 #include <asm/spec_ctrl.h>
@@ -208,7 +209,7 @@ unsigned int flush_area_local(const void *va, unsigned int flags)
              c->x86_clflush_size && c->x86_cache_size && sz &&
              ((sz >> 10) < c->x86_cache_size) )
         {
-            alternative(ASM_NOP3, "sfence", X86_FEATURE_CLFLUSHOPT);
+            alternative("", "sfence", X86_FEATURE_CLFLUSHOPT);
             for ( i = 0; i < sz; i += c->x86_clflush_size )
                 alternative_input(".byte " __stringify(NOP_DS_PREFIX) ";"
                                   " clflush %0",
index fad5ca5787e90cdf095a5f0fcb695d0af4891fbd..e0096834e6dbb60c26cb6ed9d108b3c03edb0181 100644 (file)
@@ -193,30 +193,19 @@ void ret_from_intr(void);
 #define __ASM_STAC      .byte 0x0f,0x01,0xcb
 
 #ifdef __ASSEMBLY__
-#define ASM_STAC                                        \
-    ALTERNATIVE __stringify(ASM_NOP3),                  \
-        __stringify(__ASM_STAC), X86_FEATURE_XEN_SMAP
-
-#define ASM_CLAC                                        \
-    ALTERNATIVE __stringify(ASM_NOP3),                  \
-        __stringify(__ASM_CLAC), X86_FEATURE_XEN_SMAP
-
-#define CR4_PV32_RESTORE                                \
-    ALTERNATIVE_2 __stringify(ASM_NOP5),                \
-        "call cr4_pv32_restore", X86_FEATURE_XEN_SMEP,  \
-        "call cr4_pv32_restore", X86_FEATURE_XEN_SMAP
-
+#define ASM_STAC ALTERNATIVE "", __stringify(__ASM_STAC), X86_FEATURE_XEN_SMAP
+#define ASM_CLAC ALTERNATIVE "", __stringify(__ASM_CLAC), X86_FEATURE_XEN_SMAP
 #else
 static always_inline void clac(void)
 {
     /* Note: a barrier is implicit in alternative() */
-    alternative(ASM_NOP3, __stringify(__ASM_CLAC), X86_FEATURE_XEN_SMAP);
+    alternative("", __stringify(__ASM_CLAC), X86_FEATURE_XEN_SMAP);
 }
 
 static always_inline void stac(void)
 {
     /* Note: a barrier is implicit in alternative() */
-    alternative(ASM_NOP3, __stringify(__ASM_STAC), X86_FEATURE_XEN_SMAP);
+    alternative("", __stringify(__ASM_STAC), X86_FEATURE_XEN_SMAP);
 }
 #endif
 
@@ -325,6 +314,11 @@ static always_inline void stac(void)
         subq  $-(UREGS_error_code-UREGS_r15+\adj), %rsp
 .endm
 
+#define CR4_PV32_RESTORE                               \
+    ALTERNATIVE_2 "",                                  \
+        "call cr4_pv32_restore", X86_FEATURE_XEN_SMEP, \
+        "call cr4_pv32_restore", X86_FEATURE_XEN_SMAP
+
 #endif
 
 #ifdef CONFIG_PERF_COUNTERS
index 8f8aad40bbd8994d5e402bd9335c4e03ccb6d12e..e7d946ecb61e1697bea517bd4c5a3cbe0bed764f 100644 (file)
@@ -72,7 +72,7 @@ static always_inline void spec_ctrl_enter_idle(struct cpu_info *info)
     barrier();
     info->spec_ctrl_flags |= SCF_use_shadow;
     barrier();
-    asm volatile ( ALTERNATIVE(ASM_NOP3, "wrmsr", X86_FEATURE_SC_MSR_IDLE)
+    asm volatile ( ALTERNATIVE("", "wrmsr", X86_FEATURE_SC_MSR_IDLE)
                    :: "a" (val), "c" (MSR_SPEC_CTRL), "d" (0) : "memory" );
 }
 
@@ -87,7 +87,7 @@ static always_inline void spec_ctrl_exit_idle(struct cpu_info *info)
      */
     info->spec_ctrl_flags &= ~SCF_use_shadow;
     barrier();
-    asm volatile ( ALTERNATIVE(ASM_NOP3, "wrmsr", X86_FEATURE_SC_MSR_IDLE)
+    asm volatile ( ALTERNATIVE("", "wrmsr", X86_FEATURE_SC_MSR_IDLE)
                    :: "a" (val), "c" (MSR_SPEC_CTRL), "d" (0) : "memory" );
 }