ia64/xen-unstable

changeset 13334:9518bff7c762

[XEN] Fill in some more opcode space in the emulator.
Signed-off-by: Keir Fraser <keir@xensource.com>
author kfraser@localhost.localdomain
date Tue Jan 09 12:04:41 2007 +0000 (2007-01-09)
parents 717f64715f32
children 2dba70eb5bd5
files xen/arch/x86/x86_emulate.c
line diff
     1.1 --- a/xen/arch/x86/x86_emulate.c	Tue Jan 09 11:52:15 2007 +0000
     1.2 +++ b/xen/arch/x86/x86_emulate.c	Tue Jan 09 12:04:41 2007 +0000
     1.3 @@ -62,19 +62,19 @@ static uint8_t opcode_table[256] = {
     1.4      /* 0x20 - 0x27 */
     1.5      ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
     1.6      ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
     1.7 -    ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
     1.8 +    ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
     1.9      /* 0x28 - 0x2F */
    1.10      ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
    1.11      ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
    1.12 -    ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
    1.13 +    ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
    1.14      /* 0x30 - 0x37 */
    1.15      ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
    1.16      ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
    1.17 -    ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
    1.18 +    ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
    1.19      /* 0x38 - 0x3F */
    1.20      ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
    1.21      ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
    1.22 -    ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
    1.23 +    ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
    1.24      /* 0x40 - 0x4F */
    1.25      ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
    1.26      ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
    1.27 @@ -113,7 +113,8 @@ static uint8_t opcode_table[256] = {
    1.28      ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
    1.29      ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
    1.30      /* 0xA8 - 0xAF */
    1.31 -    0, 0, ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
    1.32 +    ByteOp|DstReg|SrcImm, DstReg|SrcImm,
    1.33 +    ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
    1.34      ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
    1.35      /* 0xB0 - 0xB7 */
    1.36      ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
    1.37 @@ -131,7 +132,7 @@ static uint8_t opcode_table[256] = {
    1.38      /* 0xD0 - 0xD7 */
    1.39      ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM, 
    1.40      ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM, 
    1.41 -    0, 0, 0, 0,
    1.42 +    ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
    1.43      /* 0xD8 - 0xDF */
    1.44      0, 0, 0, 0, 0, 0, 0, 0,
    1.45      /* 0xE0 - 0xE7 */
    1.46 @@ -228,6 +229,9 @@ struct operand {
    1.47  #define EFLG_PF (1<<2)
    1.48  #define EFLG_CF (1<<0)
    1.49  
    1.50 +/* Exception definitions. */
    1.51 +#define EXC_DE 0
    1.52 +
    1.53  /*
    1.54   * Instruction emulation:
    1.55   * Most instructions are emulated directly via a fragment of inline assembly
    1.56 @@ -408,6 +412,23 @@ do{ __asm__ __volatile__ (              
    1.57  
    1.58  #define mode_64bit() (def_ad_bytes == 8)
    1.59  
    1.60 +#define fail_if(p)                              \
    1.61 +do {                                            \
    1.62 +    rc = (p) ? X86EMUL_UNHANDLEABLE : 0;        \
    1.63 +    if ( rc ) goto done;                        \
    1.64 +} while (0)
    1.65 +
    1.66 +/* In future we will be able to generate arbitrary exceptions. */
    1.67 +#define generate_exception_if(p, e) fail_if(p)
    1.68 +
    1.69 +/* Given byte has even parity (even number of 1s)? */
    1.70 +static int even_parity(uint8_t v)
    1.71 +{
    1.72 +    __asm__ ( "test %%al,%%al; setp %%al"
    1.73 +              : "=a" (v) : "0" (v) );
    1.74 +    return v;
    1.75 +}
    1.76 +
    1.77  /* Update address held in a register, based on addressing mode. */
    1.78  #define _register_address_increment(reg, inc, byte_width)               \
    1.79  do {                                                                    \
    1.80 @@ -942,6 +963,9 @@ x86_emulate(
    1.81          }
    1.82          break;
    1.83  
    1.84 +    case 0xa8 ... 0xa9: /* test imm,%%eax */
    1.85 +        dst.reg = (unsigned long *)&_regs.eax;
    1.86 +        dst.val = dst.orig_val = _regs.eax;
    1.87      case 0x84 ... 0x85: test: /* test */
    1.88          emulate_2op_SrcV("test", src, dst, _regs.eflags);
    1.89          break;
    1.90 @@ -960,8 +984,9 @@ x86_emulate(
    1.91          lock_prefix = 1;
    1.92          break;
    1.93  
    1.94 +    case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
    1.95 +        fail_if((modrm_reg & 7) != 0);
    1.96      case 0x88 ... 0x8b: /* mov */
    1.97 -    case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
    1.98          dst.val = src.val;
    1.99          break;
   1.100  
   1.101 @@ -970,6 +995,7 @@ x86_emulate(
   1.102          break;
   1.103  
   1.104      case 0x8f: /* pop (sole member of Grp1a) */
   1.105 +        fail_if((modrm_reg & 7) != 0);
   1.106          /* 64-bit mode: POP defaults to a 64-bit operand. */
   1.107          if ( mode_64bit() && (dst.bytes == 4) )
   1.108              dst.bytes = 8;
   1.109 @@ -1056,7 +1082,9 @@ x86_emulate(
   1.110          }
   1.111          break;
   1.112  
   1.113 -    case 0xfe ... 0xff: /* Grp4/Grp5 */
   1.114 +    case 0xfe: /* Grp4 */
   1.115 +        fail_if((modrm_reg & 7) >= 2);
   1.116 +    case 0xff: /* Grp5 */
   1.117          switch ( modrm_reg & 7 )
   1.118          {
   1.119          case 0: /* inc */
   1.120 @@ -1080,6 +1108,8 @@ x86_emulate(
   1.121                  goto done;
   1.122              dst.val = dst.orig_val; /* skanky: disable writeback */
   1.123              break;
   1.124 +        case 7:
   1.125 +            fail_if(1);
   1.126          default:
   1.127              goto cannot_emulate;
   1.128          }
   1.129 @@ -1142,6 +1172,43 @@ x86_emulate(
   1.130  
   1.131      switch ( b )
   1.132      {
   1.133 +    case 0x27: /* daa */ {
   1.134 +        uint8_t al = _regs.eax;
   1.135 +        unsigned long tmp;
   1.136 +        fail_if(mode_64bit());
   1.137 +        __asm__ __volatile__ (
   1.138 +            _PRE_EFLAGS("0","4","2") "daa; " _POST_EFLAGS("0","4","2")
   1.139 +            : "=m" (_regs.eflags), "=a" (al), "=&r" (tmp)
   1.140 +            : "a" (al), "i" (EFLAGS_MASK) );
   1.141 +        *(uint8_t *)_regs.eax = al;
   1.142 +        break;
   1.143 +    }
   1.144 +
   1.145 +    case 0x2f: /* das */ {
   1.146 +        uint8_t al = _regs.eax;
   1.147 +        unsigned long tmp;
   1.148 +        fail_if(mode_64bit());
   1.149 +        __asm__ __volatile__ (
   1.150 +            _PRE_EFLAGS("0","4","2") "das; " _POST_EFLAGS("0","4","2")
   1.151 +            : "=m" (_regs.eflags), "=a" (al), "=&r" (tmp)
   1.152 +            : "a" (al), "i" (EFLAGS_MASK) );
   1.153 +        *(uint8_t *)_regs.eax = al;
   1.154 +        break;
   1.155 +    }
   1.156 +
   1.157 +    case 0x37: /* aaa */
   1.158 +    case 0x3f: /* aas */
   1.159 +        fail_if(mode_64bit());
   1.160 +        _regs.eflags &= ~EFLG_CF;
   1.161 +        if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
   1.162 +        {
   1.163 +            ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
   1.164 +            ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
   1.165 +            _regs.eflags |= EFLG_CF | EFLG_AF;
   1.166 +        }
   1.167 +        ((uint8_t *)&_regs.eax)[0] &= 0x0f;
   1.168 +        break;
   1.169 +
   1.170      case 0x40 ... 0x4f: /* inc/dec reg */
   1.171          dst.type  = OP_REG;
   1.172          dst.reg   = decode_register(b & 7, &_regs, 0);
   1.173 @@ -1253,6 +1320,45 @@ x86_emulate(
   1.174              _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
   1.175          break;
   1.176  
   1.177 +    case 0xd4: /* aam */ {
   1.178 +        unsigned int base = insn_fetch_type(uint8_t);
   1.179 +        uint8_t al = _regs.eax;
   1.180 +        fail_if(mode_64bit());
   1.181 +        generate_exception_if(base == 0, EXC_DE);
   1.182 +        *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
   1.183 +        _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
   1.184 +        _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
   1.185 +        _regs.eflags |= (( int8_t)_regs.eax <  0) ? EFLG_SF : 0;
   1.186 +        _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
   1.187 +        break;
   1.188 +    }
   1.189 +
   1.190 +    case 0xd5: /* aad */ {
   1.191 +        unsigned int base = insn_fetch_type(uint8_t);
   1.192 +        uint16_t ax = _regs.eax;
   1.193 +        fail_if(mode_64bit());
   1.194 +        *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
   1.195 +        _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
   1.196 +        _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
   1.197 +        _regs.eflags |= (( int8_t)_regs.eax <  0) ? EFLG_SF : 0;
   1.198 +        _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
   1.199 +        break;
   1.200 +    }
   1.201 +
   1.202 +    case 0xd6: /* salc */
   1.203 +        fail_if(mode_64bit());
   1.204 +        *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
   1.205 +        break;
   1.206 +
   1.207 +    case 0xd7: /* xlat */ {
   1.208 +        unsigned long al = (uint8_t)_regs.eax;
   1.209 +        if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
   1.210 +                             &al, 1, ctxt)) != 0 )
   1.211 +            goto done;
   1.212 +        *(uint8_t *)&_regs.eax = al;
   1.213 +        break;
   1.214 +    }
   1.215 +
   1.216      case 0xe3: /* jcxz/jecxz (short) */ {
   1.217          int rel = insn_fetch_type(int8_t);
   1.218          if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :