/* this function must always be used to load data in the segment
cache: it synchronizes the hflags with the segment cache values */
static inline void cpu_x86_load_seg_cache(CPUX86State *env,
- int seg_reg, unsigned int selector,
+ X86Seg seg_reg, unsigned int selector,
target_ulong base,
unsigned int limit,
unsigned int flags)
/* cpu-exec.c */
/* the following helpers are only usable in user mode simulation as
they can trigger unexpected exceptions */
-void cpu_x86_load_seg(CPUX86State *s, int seg_reg, int selector);
+void cpu_x86_load_seg(CPUX86State *s, X86Seg seg_reg, int selector);
void cpu_x86_fsave(CPUX86State *s, target_ulong ptr, int data32);
void cpu_x86_frstor(CPUX86State *s, target_ulong ptr, int data32);
void cpu_x86_fxsave(CPUX86State *s, target_ulong ptr);
return 0;
}
-static int x86_cpu_gdb_load_seg(X86CPU *cpu, int sreg, uint8_t *mem_buf)
+static int x86_cpu_gdb_load_seg(X86CPU *cpu, X86Seg sreg, uint8_t *mem_buf)
{
CPUX86State *env = &cpu->env;
uint16_t selector = ldl_p(mem_buf);
}
}
-static void tss_load_seg(CPUX86State *env, int seg_reg, int selector, int cpl,
- uintptr_t retaddr)
+static void tss_load_seg(CPUX86State *env, X86Seg seg_reg, int selector,
+ int cpl, uintptr_t retaddr)
{
uint32_t e1, e2;
int rpl, dpl;
env->hflags2 &= ~HF2_NMI_MASK;
}
-static inline void validate_seg(CPUX86State *env, int seg_reg, int cpl)
+static inline void validate_seg(CPUX86State *env, X86Seg seg_reg, int cpl)
{
int dpl;
uint32_t e2;
}
#if defined(CONFIG_USER_ONLY)
-void cpu_x86_load_seg(CPUX86State *env, int seg_reg, int selector)
+void cpu_x86_load_seg(CPUX86State *env, X86Seg seg_reg, int selector)
{
if (!(env->cr[0] & CR0_PE_MASK) || (env->eflags & VM_MASK)) {
int dpl = (env->eflags & VM_MASK) ? 3 : 0;
}
}
-static inline void gen_op_movl_T0_seg(DisasContext *s, int seg_reg)
+static inline void gen_op_movl_T0_seg(DisasContext *s, X86Seg seg_reg)
{
tcg_gen_ld32u_tl(s->T0, cpu_env,
offsetof(CPUX86State,segs[seg_reg].selector));
}
-static inline void gen_op_movl_seg_T0_vm(DisasContext *s, int seg_reg)
+static inline void gen_op_movl_seg_T0_vm(DisasContext *s, X86Seg seg_reg)
{
tcg_gen_ext16u_tl(s->T0, s->T0);
tcg_gen_st32_tl(s->T0, cpu_env,
/* move T0 to seg_reg and compute if the CPU state may change. Never
call this function with seg_reg == R_CS */
-static void gen_movl_seg_T0(DisasContext *s, int seg_reg)
+static void gen_movl_seg_T0(DisasContext *s, X86Seg seg_reg)
{
if (s->pe && !s->vm86) {
tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0);