movq $0, STACK_CPUINFO_FIELD(xen_cr3)(%rbx)
.Lcstar_cr3_okay:
- GET_CURRENT(bx)
+ movq STACK_CPUINFO_FIELD(current_vcpu)(%rbx), %rbx
movq VCPU_domain(%rbx),%rcx
cmpb $0,DOMAIN_is_32bit_pv(%rcx)
je switch_to_kernel
movq $0, STACK_CPUINFO_FIELD(xen_cr3)(%rbx)
.Llstar_cr3_okay:
- __GET_CURRENT(bx)
+ movq STACK_CPUINFO_FIELD(current_vcpu)(%rbx), %rbx
testb $TF_kernel_mode,VCPU_thread_flags(%rbx)
jz switch_to_kernel
movq $0, STACK_CPUINFO_FIELD(xen_cr3)(%rbx)
.Lsyse_cr3_okay:
- __GET_CURRENT(bx)
+ movq STACK_CPUINFO_FIELD(current_vcpu)(%rbx), %rbx
cmpb $0,VCPU_sysenter_disables_events(%rbx)
movq VCPU_sysenter_addr(%rbx),%rax
setne %cl
call check_for_unexpected_msi
UNLIKELY_END(msi_check)
- __GET_CURRENT(bx)
+ movq STACK_CPUINFO_FIELD(current_vcpu)(%rbx), %rbx
/* Check that the callback is non-null. */
leaq VCPU_int80_bounce(%rbx),%rdx
GET_STACK_END(ax)
leaq STACK_CPUINFO_FIELD(guest_cpu_user_regs)(%rax),%rsp
# create_bounce_frame() temporarily clobbers CS.RPL. Fix up.
- __GET_CURRENT(ax)
+ movq STACK_CPUINFO_FIELD(current_vcpu)(%rax), %rax
movq VCPU_domain(%rax),%rax
testb $1,DOMAIN_is_32bit_pv(%rax)
setz %al
GET_STACK_END(reg); \
addq $STACK_CPUINFO_FIELD(field), %r##reg
-#define __GET_CURRENT(reg) \
- movq STACK_CPUINFO_FIELD(current_vcpu)(%r##reg), %r##reg
#define GET_CURRENT(reg) \
GET_STACK_END(reg); \
- __GET_CURRENT(reg)
+ movq STACK_CPUINFO_FIELD(current_vcpu)(%r##reg), %r##reg
#ifndef NDEBUG
#define ASSERT_NOT_IN_ATOMIC \