call *%ebx
call exit_thread
+ENTRY(__arch_switch_threads)
+ movl 4(%esp), %ecx /* prev */
+ movl 8(%esp), %edx /* next */
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl %esp, (%ecx) /* save ESP */
+ movl (%edx), %esp /* restore ESP */
+ movl $1f, 4(%ecx) /* save EIP */
+ pushl 4(%edx) /* restore EIP */
+ ret
+1:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
call exit_thread
+ENTRY(__arch_switch_threads)
+ pushq %rbp
+ pushq %rbx
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp, (%rdi) /* save ESP */
+ movq (%rsi), %rsp /* restore ESP */
+ movq $1f, 8(%rdi) /* save EIP */
+ pushq 8(%rsi) /* restore EIP */
+ ret
+1:
+ popq %r15
+ popq %r14
+ popq %r13
+ popq %r12
+ popq %rbx
+ popq %rbp
+ ret
return *current;
}
-#ifdef __i386__
-#define arch_switch_threads(prev, next) do { \
- unsigned long esi,edi; \
- __asm__ __volatile__("pushfl\n\t" \
- "pushl %%ebp\n\t" \
- "movl %%esp,%0\n\t" /* save ESP */ \
- "movl %4,%%esp\n\t" /* restore ESP */ \
- "movl $1f,%1\n\t" /* save EIP */ \
- "pushl %5\n\t" /* restore EIP */ \
- "ret\n\t" \
- "1:\t" \
- "popl %%ebp\n\t" \
- "popfl" \
- :"=m" (prev->sp),"=m" (prev->ip), \
- "=S" (esi),"=D" (edi) \
- :"m" (next->sp),"m" (next->ip), \
- "2" (prev), "d" (next)); \
-} while (0)
-#elif __x86_64__
-#define arch_switch_threads(prev, next) do { \
- unsigned long rsi,rdi; \
- __asm__ __volatile__("pushfq\n\t" \
- "pushq %%rbp\n\t" \
- "movq %%rsp,%0\n\t" /* save RSP */ \
- "movq %4,%%rsp\n\t" /* restore RSP */ \
- "movq $1f,%1\n\t" /* save RIP */ \
- "pushq %5\n\t" /* restore RIP */ \
- "ret\n\t" \
- "1:\t" \
- "popq %%rbp\n\t" \
- "popfq" \
- :"=m" (prev->sp),"=m" (prev->ip), \
- "=S" (rsi),"=D" (rdi) \
- :"m" (next->sp),"m" (next->ip), \
- "2" (prev), "d" (next)); \
-} while (0)
-#endif
+extern void __arch_switch_threads(unsigned long *prevctx, unsigned long *nextctx);
+#define arch_switch_threads(prev,next) __arch_switch_threads(&(prev)->sp, &(next)->sp)