static int
protmode_load_seg(
enum x86_segment seg,
- uint16_t sel,
+ uint16_t sel, bool_t is_ret,
struct x86_emulate_ctxt *ctxt,
const struct x86_emulate_ops *ops)
{
/* Code segment? */
if ( !(desc.b & (1u<<11)) )
goto raise_exn;
- /* Non-conforming segment: check DPL against RPL. */
- if ( ((desc.b & (6u<<9)) != (6u<<9)) && (dpl != rpl) )
+ if ( is_ret
+ ? /*
+ * Really rpl < cpl, but our sole caller doesn't handle
+ * privilege level changes.
+ */
+ rpl != cpl || (desc.b & (1 << 10) ? dpl > rpl : dpl != rpl)
+ : desc.b & (1 << 10)
+ /* Conforming segment: check DPL against CPL. */
+ ? dpl > cpl
+ /* Non-conforming segment: check RPL and DPL against CPL. */
+ : rpl > cpl || dpl != cpl )
+ goto raise_exn;
+ /* 64-bit code segments (L bit set) must have D bit clear. */
+ if ( in_longmode(ctxt, ops) &&
+ (desc.b & (1 << 21)) && (desc.b & (1 << 22)) )
goto raise_exn;
+ sel = (sel ^ rpl) | cpl;
break;
case x86_seg_ss:
/* Writable data segment? */
static int
load_seg(
enum x86_segment seg,
- uint16_t sel,
+ uint16_t sel, bool_t is_ret,
struct x86_emulate_ctxt *ctxt,
const struct x86_emulate_ops *ops)
{
return X86EMUL_UNHANDLEABLE;
if ( in_protmode(ctxt, ops) )
- return protmode_load_seg(seg, sel, ctxt, ops);
+ return protmode_load_seg(seg, sel, is_ret, ctxt, ops);
return realmode_load_seg(seg, sel, ctxt, ops);
}
if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
&dst.val, op_bytes, ctxt, ops)) != 0 )
goto done;
- if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
+ if ( (rc = load_seg(src.val, dst.val, 0, ctxt, ops)) != 0 )
return rc;
break;
enum x86_segment seg = decode_segment(modrm_reg);
generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
generate_exception_if(seg == x86_seg_cs, EXC_UD, -1);
- if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
+ if ( (rc = load_seg(seg, src.val, 0, ctxt, ops)) != 0 )
goto done;
if ( seg == x86_seg_ss )
ctxt->retire.flags.mov_ss = 1;
&_regs.eip, op_bytes, ctxt)) )
goto done;
- if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
+ if ( (rc = load_seg(x86_seg_cs, sel, 0, ctxt, ops)) != 0 )
goto done;
_regs.eip = eip;
break;
if ( (rc = read_ulong(src.mem.seg, src.mem.off + src.bytes,
&sel, 2, ctxt, ops)) != 0 )
goto done;
- if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
+ if ( (rc = load_seg(dst.val, sel, 0, ctxt, ops)) != 0 )
goto done;
dst.val = src.val;
break;
&dst.val, op_bytes, ctxt, ops)) ||
(rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
&src.val, op_bytes, ctxt, ops)) ||
- (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
+ (rc = load_seg(x86_seg_cs, src.val, 1, ctxt, ops)) )
goto done;
_regs.eip = dst.val;
break;
_regs.eflags &= mask;
_regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
_regs.eip = eip;
- if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
+ if ( (rc = load_seg(x86_seg_cs, cs, 1, ctxt, ops)) != 0 )
goto done;
break;
}
generate_exception_if(mode_64bit(), EXC_UD, -1);
eip = insn_fetch_bytes(op_bytes);
sel = insn_fetch_type(uint16_t);
- if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
+ if ( (rc = load_seg(x86_seg_cs, sel, 0, ctxt, ops)) != 0 )
goto done;
_regs.eip = eip;
break;
goto done;
}
- if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
+ if ( (rc = load_seg(x86_seg_cs, sel, 0, ctxt, ops)) != 0 )
goto done;
_regs.eip = dst.val;
generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
generate_exception_if(!mode_ring0(), EXC_GP, 0);
if ( (rc = load_seg((modrm_reg & 1) ? x86_seg_tr : x86_seg_ldtr,
- src.val, ctxt, ops)) != 0 )
+ src.val, 0, ctxt, ops)) != 0 )
goto done;
break;