if ( IS_DEFINED(CONFIG_32BIT) )
ASSERT((regs->cs & 3) == 3);
- regs->_sp = regs->bp;
+ regs->_sp = regs->bx;
}
void do_syscall(struct cpu_regs *regs)
static void __user_text user_syscall(void)
{
asm volatile (/* Stash the stack pointer before clobbering it. */
- "mov %%" _ASM_SP ", %%" _ASM_BP ";"
+ "mov %%" _ASM_SP ", %%" _ASM_BX ";"
"btc $%c[bit], %%" _ASM_SP ";"
"mov %[ss], %%ss;"
[ss] "m" (user_ss),
"X" (ex_check_UD)
#ifdef __x86_64__
- : "rbp", "rcx", "r11"
+ : "rbx", "rcx", "r11"
#else
- : "ebp", "ecx", "edx"
+ : "ebx", "ecx", "edx"
#endif
);
}
static void __user_text user_syscall_compat(void)
{
asm volatile (/* Stash the stack pointer before clobbering it. */
- "mov %%" _ASM_SP ", %%" _ASM_BP ";"
+ "mov %%" _ASM_SP ", %%" _ASM_BX ";"
/* Drop to a 32bit compat code segment. */
"push $%c[cs32];"
[cs64] "i" (__USER_CS),
"X" (ex_check_UD)
#ifdef __x86_64__
- : "rbp", "rcx", "r11"
+ : "rbx", "rcx", "r11"
#else
- : "ebp", "ecx", "edx"
+ : "ebx", "ecx", "edx"
#endif
);
}