(0b1101 << 6) /* D, A, I, F, only IRQ's unmasked */)
#define UK_SYSCALL_EXECENV_PROLOGUE_DEFINE(pname, fname, x, ...) \
- long __used __noreturn __attribute__((optimize("O3"))) \
- pname(UK_ARG_MAPx(x, UK_S_ARG_LONG_MAYBE_UNUSED, __VA_ARGS__)) \
- { \
- __asm__ __volatile__( \
+ long __used \
+ pname(UK_ARG_MAPx(x, UK_S_ARG_LONG_MAYBE_UNUSED, __VA_ARGS__)); \
+ __asm__ ( \
+ ".global " STRINGIFY(pname) "\n\t" \
+ "" STRINGIFY(pname) ":\n\t" \
"/* No IRQ's during register saving please */\n\t" \
"msr daifset, #2\n\t" \
"/* Use TPIDRRO_EL0 as a scratch register. This\n\t" \
"ldr x9, [sp, #" STRINGIFY(__SP_OFFSET) "]\n\t" \
"mov sp, x9\n\t" \
"ret\n\t" \
- :: \
- ); \
- }
+ );
#endif /* !__ASSEMBLY__ */
#include <uk/essentials.h>
#define UK_SYSCALL_EXECENV_PROLOGUE_DEFINE(pname, fname, x, ...) \
- long __used __naked __noreturn \
- pname(UK_ARG_MAPx(x, UK_S_ARG_LONG_MAYBE_UNUSED, __VA_ARGS__)) \
- { \
- __asm__ __volatile__( \
+ long __used \
+ pname(UK_ARG_MAPx(x, UK_S_ARG_LONG_MAYBE_UNUSED, __VA_ARGS__)); \
+ __asm__ ( \
+ ".global " STRINGIFY(pname) "\n\t" \
+ "" STRINGIFY(pname) ":\n\t" \
"cli\n\t" \
"/* Switch to the per-CPU auxiliary stack */\n\t" \
"/* AMD64 SysV ABI: r11 is scratch register */\n\t" \
- "movq %%rsp, %%r11\n\t" \
- "movq %%gs:(" STRINGIFY(LCPU_AUXSP_OFFSET) "), %%rsp\n\t"\
+ "movq %rsp, %r11\n\t" \
+ "movq %gs:(" STRINGIFY(LCPU_AUXSP_OFFSET) "), %rsp\n\t"\
"/* Auxiliary stack is already ECTX aligned */\n\t" \
"/* Make room for `struct UKARCH_EXECENV` */\n\t" \
"subq $(" STRINGIFY(UKARCH_EXECENV_SIZE - \
- __REGS_SIZEOF)" ), %%rsp\n\t" \
+ __REGS_SIZEOF)" ), %rsp\n\t" \
"/* Now build stack frame beginning with 5 pointers\n\t"\
" * in the classical iretq/`struct __regs` format\n\t" \
" */\n\t" \
" */\n\t" \
"pushq $(0x10)\n\t" \
"/* Push saving original rsp stored in r11 */\n\t" \
- "pushq %%r11\n\t" \
+ "pushq %r11\n\t" \
"/* Push EFLAGS register. Additionally, since we\n\t" \
" * pushed it with IRQs disabled, it won't have\n\t" \
" * the corresponding bit flag set, making it look\n\t" \
" * manually set the flag.\n\t" \
" */\n\t" \
"pushfq\n\t" \
- "orq $(" STRINGIFY(X86_EFLAGS_IF) "), 0(%%rsp)\n\t" \
+ "orq $(" STRINGIFY(X86_EFLAGS_IF) "), 0(%rsp)\n\t" \
"/* Push code segment, GDT code segment selector:\n\t" \
" * [15: 3]: Selector Index - first GDT entry\n\t" \
" * [ 2: 2]: Table Indicator - GDT, table 0\n\t" \
" * [ 1: 0]: Requestor Privilege Level - ring 0\n\t" \
" */\n\t" \
"pushq $(0x8)\n\t" \
- "/* Save caller next rip, this part here\n\t" \
- " * is the reason why we depend on `__naked`, as we\n\t"\
- " * rely on the aforementioned rip being placed at\n\t" \
+ "/* Save caller next rip, this part here.\n\t" \
+ " * Rely on the aforementioned rip being placed at\n\t" \
" * rsp + 8 initially w.r.t. `call` instruction.\n\t" \
" */\n\t" \
- "movq (%%r11), %%r11\n\t" \
- "pushq %%r11\n\t" \
+ "movq (%r11), %r11\n\t" \
+ "pushq %r11\n\t" \
"/* Now just push the rest of `struct __regs` */\n\t" \
- "pushq %%rax\n\t" \
- "pushq %%rdi\n\t" \
- "pushq %%rsi\n\t" \
- "pushq %%rdx\n\t" \
- "pushq %%rcx\n\t" \
- "pushq %%rax\n\t" \
- "pushq %%r8\n\t" \
- "pushq %%r9\n\t" \
- "pushq %%r10\n\t" \
- "pushq %%r11\n\t" \
- "pushq %%rbx\n\t" \
- "pushq %%rbp\n\t" \
- "pushq %%r12\n\t" \
- "pushq %%r13\n\t" \
- "pushq %%r14\n\t" \
- "pushq %%r15\n\t" \
- "subq $(" STRINGIFY(__REGS_PAD_SIZE) "), %%rsp\n\t" \
+ "pushq %rax\n\t" \
+ "pushq %rdi\n\t" \
+ "pushq %rsi\n\t" \
+ "pushq %rdx\n\t" \
+ "pushq %rcx\n\t" \
+ "pushq %rax\n\t" \
+ "pushq %r8\n\t" \
+ "pushq %r9\n\t" \
+ "pushq %r10\n\t" \
+ "pushq %r11\n\t" \
+ "pushq %rbx\n\t" \
+ "pushq %rbp\n\t" \
+ "pushq %r12\n\t" \
+ "pushq %r13\n\t" \
+ "pushq %r14\n\t" \
+ "pushq %r15\n\t" \
+ "subq $(" STRINGIFY(__REGS_PAD_SIZE) "), %rsp\n\t" \
"/* ECTX at slot w.r.t. `struct UKARCH_EXECENV` */\n\t" \
- "movq %%rsp, %%rdi\n\t" \
+ "movq %rsp, %rdi\n\t" \
"addq $(" STRINGIFY(__REGS_SIZEOF + \
- UKARCH_SYSCTX_SIZE) "), %%rdi\n\t" \
+ UKARCH_SYSCTX_SIZE) "), %rdi\n\t" \
"call ukarch_ectx_store\n\t" \
"/* SYSCTX at slot w.r.t. `struct UKARCH_EXECENV` */\n\t"\
- "movq %%rsp, %%rdi\n\t" \
- "addq $(" STRINGIFY(__REGS_SIZEOF) "), %%rdi\n\t" \
+ "movq %rsp, %rdi\n\t" \
+ "addq $(" STRINGIFY(__REGS_SIZEOF) "), %rdi\n\t" \
"call ukarch_sysctx_store\n\t" \
- "movq %%rsp, %%rdi\n\t" \
+ "movq %rsp, %rdi\n\t" \
"sti\n\t" \
"call " STRINGIFY(fname) "\n\t" \
- "addq $(" STRINGIFY(__REGS_PAD_SIZE) "), %%rsp\n\t" \
+ "addq $(" STRINGIFY(__REGS_PAD_SIZE) "), %rsp\n\t" \
"/* Only restore callee preserved regs (ABI) */\n\t" \
- "popq %%r15\n\t" \
- "popq %%r14\n\t" \
- "popq %%r13\n\t" \
- "popq %%r12\n\t" \
- "popq %%rbp\n\t" \
- "popq %%rbx\n\t" \
+ "popq %r15\n\t" \
+ "popq %r14\n\t" \
+ "popq %r13\n\t" \
+ "popq %r12\n\t" \
+ "popq %rbp\n\t" \
+ "popq %rbx\n\t" \
"/* Restore rsp from where it was stored */\n\t" \
- "movq 104(%%rsp), %%rsp\n\t" \
+ "movq 104(%rsp), %rsp\n\t" \
"ret\n\t" \
- :: \
- ); \
- }
+ );
#endif /* !__ASSEMBLY__ */