Alternatives patching code picks the most suitable NOPs for the
running system, so simply use it to replace the pre-populated ones.
Use an arbitrary, always available feature to key off from, but
hide this behind the new X86_FEATURE_ALWAYS.
Signed-off-by: Jan Beulich <jbeulich@suse.com>
Reviewed-by: Andrew Cooper <andrew.cooper3@citrix.com>
x86/compat: correct SMEP/SMAP NOPs patching
Correct the number of single byte NOPs we want to be replaced in case
neither SMEP nor SMAP are available.
Also simplify the expression adding these NOPs - at that location .
equals .Lcr4_orig, and removing that part of the expression fixes a
bogus ".space or fill with negative value, ignored" warning by very old
gas (which actually is what made me look at those constructs again).
Signed-off-by: Jan Beulich <jbeulich@suse.com>
Reviewed-by: Wei Liu <wei.liu2@citrix.com>
Reviewed-by: Andrew Cooper <andrew.cooper3@citrix.com>
master commit:
01a0bd0a7d72be638a359db3f8cf551123467d29
master date: 2016-05-13 18:15:55 +0100
master commit:
f5610009529628314c9d1d52b00715fe855fcf06
master date: 2016-05-26 17:26:24 +0100
ENTRY(compat_restore_all_guest)
ASSERT_INTERRUPTS_DISABLED
.Lcr4_orig:
- ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */
- ASM_NOP2 /* jpe .Lcr4_alt_end */
- ASM_NOP8 /* mov CPUINFO_cr4...(%rsp), %rax */
- ASM_NOP6 /* and $..., %rax */
- ASM_NOP8 /* mov %rax, CPUINFO_cr4...(%rsp) */
- ASM_NOP3 /* mov %rax, %cr4 */
- ASM_NOP8 /* cmp %rax, CPUINFO_cr4...(%rsp) */
- ASM_NOP2 /* jne 1b */
+ .skip .Lcr4_alt_end - .Lcr4_alt, 0x90
.Lcr4_orig_end:
.pushsection .altinstr_replacement, "ax"
.Lcr4_alt:
jne 1b
.Lcr4_alt_end:
.section .altinstructions, "a"
+ altinstruction_entry .Lcr4_orig, .Lcr4_orig, X86_FEATURE_ALWAYS, \
+ (.Lcr4_orig_end - .Lcr4_orig), 0
altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMEP, \
(.Lcr4_orig_end - .Lcr4_orig), \
(.Lcr4_alt_end - .Lcr4_alt)
662: __ASM_##op; \
.popsection; \
.pushsection .altinstructions, "a"; \
+ altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0; \
altinstruction_entry 661b, 662b, X86_FEATURE_SMAP, 3, 3; \
.popsection
.pushsection .altinstr_replacement, "ax"; \
668: call cr4_pv32_restore; \
.section .altinstructions, "a"; \
+ altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \
altinstruction_entry 667b, 668b, X86_FEATURE_SMEP, 5, 5; \
altinstruction_entry 667b, 668b, X86_FEATURE_SMAP, 5, 5; \
.popsection
#define X86_FEATURE_ADX (7*32+19) /* ADCX, ADOX instructions */
#define X86_FEATURE_SMAP (7*32+20) /* Supervisor Mode Access Prevention */
+/* An alias of a feature we know is always going to be present. */
+#define X86_FEATURE_ALWAYS X86_FEATURE_LM
+
#ifndef __ASSEMBLY__
#define cpu_has(c, bit) test_bit(bit, (c)->x86_capability)
#define boot_cpu_has(bit) test_bit(bit, boot_cpu_data.x86_capability)