From: Sergiu Moga Date: Mon, 10 Mar 2025 16:24:54 +0000 (+0200) Subject: lib/syscall_shim: Use global scope inline asm for execenv prologue X-Git-Url: http://xenbits.xensource.com/gitweb?a=commitdiff_plain;h=f38bce203c352de98ba81da2b53561e77cbf74c9;p=unikraft%2Funikraft.git lib/syscall_shim: Use global scope inline asm for execenv prologue Avoid having to use weird and unconventional function attributes by writing the inline assembly in a global scope. This is especially useful for ARM64 as it does not support the naked attribute on GCC. Signed-off-by: Sergiu Moga Approved-by: Andrei Tatar Reviewed-by: Andrei Tatar Reviewed-by: Michalis Pappas GitHub-Closes: #1598 --- diff --git a/lib/syscall_shim/arch/arm64/include/arch/syscall_prologue.h b/lib/syscall_shim/arch/arm64/include/arch/syscall_prologue.h index 2212d5f1f..c04659bf1 100644 --- a/lib/syscall_shim/arch/arm64/include/arch/syscall_prologue.h +++ b/lib/syscall_shim/arch/arm64/include/arch/syscall_prologue.h @@ -38,10 +38,11 @@ (0b1101 << 6) /* D, A, I, F, only IRQ's unmasked */) #define UK_SYSCALL_EXECENV_PROLOGUE_DEFINE(pname, fname, x, ...) \ - long __used __noreturn __attribute__((optimize("O3"))) \ - pname(UK_ARG_MAPx(x, UK_S_ARG_LONG_MAYBE_UNUSED, __VA_ARGS__)) \ - { \ - __asm__ __volatile__( \ + long __used \ + pname(UK_ARG_MAPx(x, UK_S_ARG_LONG_MAYBE_UNUSED, __VA_ARGS__)); \ + __asm__ ( \ + ".global " STRINGIFY(pname) "\n\t" \ + "" STRINGIFY(pname) ":\n\t" \ "/* No IRQ's during register saving please */\n\t" \ "msr daifset, #2\n\t" \ "/* Use TPIDRRO_EL0 as a scratch register. This\n\t" \ @@ -137,8 +138,6 @@ "ldr x9, [sp, #" STRINGIFY(__SP_OFFSET) "]\n\t" \ "mov sp, x9\n\t" \ "ret\n\t" \ - :: \ - ); \ - } + ); #endif /* !__ASSEMBLY__ */ diff --git a/lib/syscall_shim/arch/x86_64/include/arch/syscall_prologue.h b/lib/syscall_shim/arch/x86_64/include/arch/syscall_prologue.h index 43a0737ae..395fb66f2 100644 --- a/lib/syscall_shim/arch/x86_64/include/arch/syscall_prologue.h +++ b/lib/syscall_shim/arch/x86_64/include/arch/syscall_prologue.h @@ -24,19 +24,20 @@ #include #define UK_SYSCALL_EXECENV_PROLOGUE_DEFINE(pname, fname, x, ...) \ - long __used __naked __noreturn \ - pname(UK_ARG_MAPx(x, UK_S_ARG_LONG_MAYBE_UNUSED, __VA_ARGS__)) \ - { \ - __asm__ __volatile__( \ + long __used \ + pname(UK_ARG_MAPx(x, UK_S_ARG_LONG_MAYBE_UNUSED, __VA_ARGS__)); \ + __asm__ ( \ + ".global " STRINGIFY(pname) "\n\t" \ + "" STRINGIFY(pname) ":\n\t" \ "cli\n\t" \ "/* Switch to the per-CPU auxiliary stack */\n\t" \ "/* AMD64 SysV ABI: r11 is scratch register */\n\t" \ - "movq %%rsp, %%r11\n\t" \ - "movq %%gs:(" STRINGIFY(LCPU_AUXSP_OFFSET) "), %%rsp\n\t"\ + "movq %rsp, %r11\n\t" \ + "movq %gs:(" STRINGIFY(LCPU_AUXSP_OFFSET) "), %rsp\n\t"\ "/* Auxiliary stack is already ECTX aligned */\n\t" \ "/* Make room for `struct UKARCH_EXECENV` */\n\t" \ "subq $(" STRINGIFY(UKARCH_EXECENV_SIZE - \ - __REGS_SIZEOF)" ), %%rsp\n\t" \ + __REGS_SIZEOF)" ), %rsp\n\t" \ "/* Now build stack frame beginning with 5 pointers\n\t"\ " * in the classical iretq/`struct __regs` format\n\t" \ " */\n\t" \ @@ -47,7 +48,7 @@ " */\n\t" \ "pushq $(0x10)\n\t" \ "/* Push saving original rsp stored in r11 */\n\t" \ - "pushq %%r11\n\t" \ + "pushq %r11\n\t" \ "/* Push EFLAGS register. Additionally, since we\n\t" \ " * pushed it with IRQs disabled, it won't have\n\t" \ " * the corresponding bit flag set, making it look\n\t" \ @@ -56,63 +57,60 @@ " * manually set the flag.\n\t" \ " */\n\t" \ "pushfq\n\t" \ - "orq $(" STRINGIFY(X86_EFLAGS_IF) "), 0(%%rsp)\n\t" \ + "orq $(" STRINGIFY(X86_EFLAGS_IF) "), 0(%rsp)\n\t" \ "/* Push code segment, GDT code segment selector:\n\t" \ " * [15: 3]: Selector Index - first GDT entry\n\t" \ " * [ 2: 2]: Table Indicator - GDT, table 0\n\t" \ " * [ 1: 0]: Requestor Privilege Level - ring 0\n\t" \ " */\n\t" \ "pushq $(0x8)\n\t" \ - "/* Save caller next rip, this part here\n\t" \ - " * is the reason why we depend on `__naked`, as we\n\t"\ - " * rely on the aforementioned rip being placed at\n\t" \ + "/* Save caller next rip, this part here.\n\t" \ + " * Rely on the aforementioned rip being placed at\n\t" \ " * rsp + 8 initially w.r.t. `call` instruction.\n\t" \ " */\n\t" \ - "movq (%%r11), %%r11\n\t" \ - "pushq %%r11\n\t" \ + "movq (%r11), %r11\n\t" \ + "pushq %r11\n\t" \ "/* Now just push the rest of `struct __regs` */\n\t" \ - "pushq %%rax\n\t" \ - "pushq %%rdi\n\t" \ - "pushq %%rsi\n\t" \ - "pushq %%rdx\n\t" \ - "pushq %%rcx\n\t" \ - "pushq %%rax\n\t" \ - "pushq %%r8\n\t" \ - "pushq %%r9\n\t" \ - "pushq %%r10\n\t" \ - "pushq %%r11\n\t" \ - "pushq %%rbx\n\t" \ - "pushq %%rbp\n\t" \ - "pushq %%r12\n\t" \ - "pushq %%r13\n\t" \ - "pushq %%r14\n\t" \ - "pushq %%r15\n\t" \ - "subq $(" STRINGIFY(__REGS_PAD_SIZE) "), %%rsp\n\t" \ + "pushq %rax\n\t" \ + "pushq %rdi\n\t" \ + "pushq %rsi\n\t" \ + "pushq %rdx\n\t" \ + "pushq %rcx\n\t" \ + "pushq %rax\n\t" \ + "pushq %r8\n\t" \ + "pushq %r9\n\t" \ + "pushq %r10\n\t" \ + "pushq %r11\n\t" \ + "pushq %rbx\n\t" \ + "pushq %rbp\n\t" \ + "pushq %r12\n\t" \ + "pushq %r13\n\t" \ + "pushq %r14\n\t" \ + "pushq %r15\n\t" \ + "subq $(" STRINGIFY(__REGS_PAD_SIZE) "), %rsp\n\t" \ "/* ECTX at slot w.r.t. `struct UKARCH_EXECENV` */\n\t" \ - "movq %%rsp, %%rdi\n\t" \ + "movq %rsp, %rdi\n\t" \ "addq $(" STRINGIFY(__REGS_SIZEOF + \ - UKARCH_SYSCTX_SIZE) "), %%rdi\n\t" \ + UKARCH_SYSCTX_SIZE) "), %rdi\n\t" \ "call ukarch_ectx_store\n\t" \ "/* SYSCTX at slot w.r.t. `struct UKARCH_EXECENV` */\n\t"\ - "movq %%rsp, %%rdi\n\t" \ - "addq $(" STRINGIFY(__REGS_SIZEOF) "), %%rdi\n\t" \ + "movq %rsp, %rdi\n\t" \ + "addq $(" STRINGIFY(__REGS_SIZEOF) "), %rdi\n\t" \ "call ukarch_sysctx_store\n\t" \ - "movq %%rsp, %%rdi\n\t" \ + "movq %rsp, %rdi\n\t" \ "sti\n\t" \ "call " STRINGIFY(fname) "\n\t" \ - "addq $(" STRINGIFY(__REGS_PAD_SIZE) "), %%rsp\n\t" \ + "addq $(" STRINGIFY(__REGS_PAD_SIZE) "), %rsp\n\t" \ "/* Only restore callee preserved regs (ABI) */\n\t" \ - "popq %%r15\n\t" \ - "popq %%r14\n\t" \ - "popq %%r13\n\t" \ - "popq %%r12\n\t" \ - "popq %%rbp\n\t" \ - "popq %%rbx\n\t" \ + "popq %r15\n\t" \ + "popq %r14\n\t" \ + "popq %r13\n\t" \ + "popq %r12\n\t" \ + "popq %rbp\n\t" \ + "popq %rbx\n\t" \ "/* Restore rsp from where it was stored */\n\t" \ - "movq 104(%%rsp), %%rsp\n\t" \ + "movq 104(%rsp), %rsp\n\t" \ "ret\n\t" \ - :: \ - ); \ - } + ); #endif /* !__ASSEMBLY__ */