SPEC_CTRL_EXIT_TO_PV /* Req: a=spec_ctrl %rsp=regs/cpuinfo, Clob: cd */
RESTORE_ALL
- testw $TRAP_syscall,4(%rsp)
+ testw $TRAP_syscall, EFRAME_entry_vector(%rsp)
jz iret_exit_to_guest
- movq 24(%rsp),%r11 # RFLAGS
+ mov EFRAME_eflags(%rsp), %r11
andq $~(X86_EFLAGS_IOPL | X86_EFLAGS_VM), %r11
orq $X86_EFLAGS_IF,%r11
/* Don't use SYSRET path if the return address is not canonical. */
- movq 8(%rsp),%rcx
+ mov EFRAME_rip(%rsp), %rcx
sarq $47,%rcx
incl %ecx
cmpl $1,%ecx
ALTERNATIVE "", rag_clrssbsy, X86_FEATURE_XEN_SHSTK
#endif
- movq 8(%rsp), %rcx # RIP
- cmpw $FLAT_USER_CS32,16(%rsp)# CS
- movq 32(%rsp),%rsp # RSP
+ mov EFRAME_rip(%rsp), %rcx
+ cmpw $FLAT_USER_CS32, EFRAME_cs(%rsp)
+ mov EFRAME_rsp(%rsp), %rsp
je 1f
sysretq
1: sysretl
ALIGN
.Lrestore_rcx_iret_exit_to_guest:
- movq 8(%rsp), %rcx # RIP
+ mov EFRAME_rip(%rsp), %rcx
/* No special register assumptions. */
iret_exit_to_guest:
- andl $~(X86_EFLAGS_IOPL | X86_EFLAGS_VM), 24(%rsp)
- orl $X86_EFLAGS_IF,24(%rsp)
+ andl $~(X86_EFLAGS_IOPL | X86_EFLAGS_VM), EFRAME_eflags(%rsp)
+ orl $X86_EFLAGS_IF, EFRAME_eflags(%rsp)
addq $8,%rsp
.Lft0: iretq
_ASM_PRE_EXTABLE(.Lft0, handle_exception)
pushq $FLAT_KERNEL_CS64
pushq %rcx
pushq $0
- movl $TRAP_syscall, 4(%rsp)
+ movl $TRAP_syscall, EFRAME_entry_vector(%rsp)
SAVE_ALL
SPEC_CTRL_ENTRY_FROM_PV /* Req: %rsp=regs/cpuinfo, %rdx=0, Clob: acd */
pushq $FLAT_USER_CS32
pushq %rcx
pushq $0
- movl $TRAP_syscall, 4(%rsp)
+ movl $TRAP_syscall, EFRAME_entry_vector(%rsp)
SAVE_ALL
SPEC_CTRL_ENTRY_FROM_PV /* Req: %rsp=regs/cpuinfo, %rdx=0, Clob: acd */
pushq $3 /* ring 3 null cs */
pushq $0 /* null rip */
pushq $0
- movl $TRAP_syscall, 4(%rsp)
+ movl $TRAP_syscall, EFRAME_entry_vector(%rsp)
SAVE_ALL
SPEC_CTRL_ENTRY_FROM_PV /* Req: %rsp=regs/cpuinfo, %rdx=0, Clob: acd */
ENDBR64
ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
pushq $0
- movl $0x80, 4(%rsp)
+ movl $0x80, EFRAME_entry_vector(%rsp)
SAVE_ALL
SPEC_CTRL_ENTRY_FROM_PV /* Req: %rsp=regs/cpuinfo, %rdx=0, Clob: acd */
.section .init.text, "ax", @progbits
ENTRY(early_page_fault)
ENDBR64
- movl $X86_EXC_PF, 4(%rsp)
+ movl $X86_EXC_PF, EFRAME_entry_vector(%rsp)
SAVE_ALL
movq %rsp, %rdi
call do_early_page_fault
ENTRY(entry_PF)
ENDBR64
- movl $X86_EXC_PF, 4(%rsp)
+ movl $X86_EXC_PF, EFRAME_entry_vector(%rsp)
/* No special register assumptions. */
GLOBAL(handle_exception)
ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
ENTRY(entry_DE)
ENDBR64
pushq $0
- movl $X86_EXC_DE, 4(%rsp)
+ movl $X86_EXC_DE, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_MF)
ENDBR64
pushq $0
- movl $X86_EXC_MF, 4(%rsp)
+ movl $X86_EXC_MF, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_XM)
ENDBR64
pushq $0
- movl $X86_EXC_XM, 4(%rsp)
+ movl $X86_EXC_XM, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_NM)
ENDBR64
pushq $0
- movl $X86_EXC_NM, 4(%rsp)
+ movl $X86_EXC_NM, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_DB)
ENDBR64
pushq $0
- movl $X86_EXC_DB, 4(%rsp)
+ movl $X86_EXC_DB, EFRAME_entry_vector(%rsp)
jmp handle_ist_exception
ENTRY(entry_BP)
ENDBR64
pushq $0
- movl $X86_EXC_BP, 4(%rsp)
+ movl $X86_EXC_BP, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_OF)
ENDBR64
pushq $0
- movl $X86_EXC_OF, 4(%rsp)
+ movl $X86_EXC_OF, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_BR)
ENDBR64
pushq $0
- movl $X86_EXC_BR, 4(%rsp)
+ movl $X86_EXC_BR, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_UD)
ENDBR64
pushq $0
- movl $X86_EXC_UD, 4(%rsp)
+ movl $X86_EXC_UD, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_TS)
ENDBR64
- movl $X86_EXC_TS, 4(%rsp)
+ movl $X86_EXC_TS, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_NP)
ENDBR64
- movl $X86_EXC_NP, 4(%rsp)
+ movl $X86_EXC_NP, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_SS)
ENDBR64
- movl $X86_EXC_SS, 4(%rsp)
+ movl $X86_EXC_SS, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_GP)
ENDBR64
- movl $X86_EXC_GP, 4(%rsp)
+ movl $X86_EXC_GP, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_AC)
ENDBR64
- movl $X86_EXC_AC, 4(%rsp)
+ movl $X86_EXC_AC, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_CP)
ENDBR64
- movl $X86_EXC_CP, 4(%rsp)
+ movl $X86_EXC_CP, EFRAME_entry_vector(%rsp)
jmp handle_exception
ENTRY(entry_DF)
ENDBR64
- movl $X86_EXC_DF, 4(%rsp)
+ movl $X86_EXC_DF, EFRAME_entry_vector(%rsp)
/* Set AC to reduce chance of further SMAP faults */
ALTERNATIVE "", stac, X86_FEATURE_XEN_SMAP
SAVE_ALL
ENTRY(entry_NMI)
ENDBR64
pushq $0
- movl $X86_EXC_NMI, 4(%rsp)
+ movl $X86_EXC_NMI, EFRAME_entry_vector(%rsp)
handle_ist_exception:
ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
SAVE_ALL
ENTRY(entry_MC)
ENDBR64
pushq $0
- movl $X86_EXC_MC, 4(%rsp)
+ movl $X86_EXC_MC, EFRAME_entry_vector(%rsp)
jmp handle_ist_exception
/* No op trap handler. Required for kexec crash path. */
1:
ENDBR64
pushq $0
- movb $vec,4(%rsp)
+ movb $vec, EFRAME_entry_vector(%rsp)
jmp common_interrupt
entrypoint 1b
test $8,%spl /* 64bit exception frames are 16 byte aligned, but the word */
jz 2f /* size is 8 bytes. Check whether the processor gave us an */
pushq $0 /* error code, and insert an empty one if not. */
-2: movb $vec,4(%rsp)
+2: movb $vec, EFRAME_entry_vector(%rsp)
jmp handle_exception
entrypoint 1b