mov %eax, %ds
mov %eax, %es
- push %esp /* struct cpu_regs * */
+ mov %esp, %eax /* struct cpu_regs * */
call do_exception
- add $4, %esp
RESTORE_ALL
ret
ENDFUNC(entry_ret_to_kernel)
-ENTRY(exec_user_param)
- /*
- * 2*4(%esp) ulong p1
- * 1*4(%esp) ulong (*fn)(ulong)
- * 0*4(%esp) return address
- */
+ENTRY(exec_user_param) /* %eax = ulong (*fn)(ulong p1), %edx = ulong p1 */
push %ebp
/* Prepare to "call" exec_user_stub(). */
- mov (1+1)*4(%esp), %eax /* Pass fn() in %eax */
- mov (1+2)*4(%esp), %ecx /* Pass p1 in %ecx */
push $1f /* Fake return addr as if we'd called exec_user_stub(). */
mov %esp, %ebp /* Stash %esp for entry_ret_to_kernel(). */
pushf /* EFLAGS */
/* Apply and/or masks to eflags. */
- mov exec_user_efl_and_mask, %edx
- and %edx, (%esp)
- mov exec_user_efl_or_mask, %edx
- or %edx, (%esp)
+ mov exec_user_efl_and_mask, %ecx
+ and %ecx, (%esp)
+ mov exec_user_efl_or_mask, %ecx
+ or %ecx, (%esp)
push exec_user_cs /* CS */
push $exec_user_stub /* EIP */
ENDFUNC(exec_user_param)
.pushsection .text.user, "ax", @progbits
-ENTRY(exec_user_stub)
- /*
- * For SMEP/SMAP safety, no shared stack can be used, so all
- * parameters are passed in registers.
- */
- push %ecx /* Push p1 for fn()'s call frame. */
- call *%eax /* fn(p1) */
+ENTRY(exec_user_stub) /* %eax = ulong (*fn)(ulong p1), %edx = ulong p1 */
+ xchg %eax, %edx /* Swap p1 to be first parameter to fn(). */
+ call *%edx /* fn(p1) */
int $X86_VEC_RET2KERN /* Return to kernel privilege. */
ENDFUNC(exec_user_stub)
mov %eax, %ds
mov %eax, %es
- push %esp /* struct cpu_regs * */
+ mov %esp, %eax /* struct cpu_regs * */
call do_evtchn
- add $4, %esp
RESTORE_ALL
mov %eax, %ds
mov %eax, %es
- push %esp /* struct cpu_regs * */
+ mov %esp, %eax /* struct cpu_regs * */
call do_syscall
- add $4, %esp
RESTORE_ALL
mov %eax, %ds
mov %eax, %es
- push %esp /* struct cpu_regs * */
+ mov %esp, %eax /* struct cpu_regs * */
call do_sysenter
- add $4, %esp
RESTORE_ALL
* Switch segment if necessary. The old segment is preserved on the
* stack for the duration of the test.
*/
- .local stack_adj
- stack_adj = 0
-
.if \load_seg
.ifeqs "\seg", "none"
push %ds
push $(GDTE_AVAIL1 << 3 | 3)
pop %\seg
.endif
- stack_adj = 1
.endif
#endif
- /* No exception if we don't fault. Also reused by the 64bit case. */
- xor %eax, %eax
-
/* The bottom bit of 'addr' encodes FEP. */
#ifdef __i386__
- testb $1, (1 + stack_adj)*4(%esp)
+ testb $1, %al
#else
testb $1, %dil
#endif
+
+ /*
+ * No exception if we don't fault.
+ * Reused by the 64bit case, and careful to not clobber flags.
+ */
+ mov $0, %eax
+
jz 1f
_ASM_XEN_FEP
* Switch segment if necessary. The old segment is preserved on the
* stack for the duration of the test.
*/
- .local stack_adj
- stack_adj = 0
-
.if \load_seg
.ifeqs "\seg", "none"
push %ds
push $(GDTE_AVAIL1 << 3 | 3)
pop %\seg
.endif
- stack_adj = 1
.endif
#endif
/* Move 'addr' into \reg */
#ifdef __i386__
- mov (1 + stack_adj)*4(%esp), %\reg
+ mov %eax, %\reg
#else
mov %rdi, %\reg
#endif