ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 13283:a22258c9fe75

[XEN] Remove an out-of-date comment.
Signed-off-by: Keir Fraser <keir@xensource.com>
author kfraser@localhost.localdomain
date Fri Jan 05 15:57:23 2007 +0000 (2007-01-05)
parents df00f7a98821
children b648a579b0b4
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 */
9 #ifndef __XEN__
10 #include <stddef.h>
11 #include <stdint.h>
12 #include <public/xen.h>
13 #else
14 #include <xen/config.h>
15 #include <xen/types.h>
16 #include <xen/lib.h>
17 #include <asm/regs.h>
18 #undef cmpxchg
19 #endif
20 #include <asm-x86/x86_emulate.h>
22 /* Operand sizes: 8-bit operands or specified/overridden size. */
23 #define ByteOp (1<<0) /* 8-bit operands. */
24 /* Destination operand type. */
25 #define DstBitBase (0<<1) /* Memory operand, bit string. */
26 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
27 #define DstReg (2<<1) /* Register operand. */
28 #define DstMem (3<<1) /* Memory operand. */
29 #define DstMask (3<<1)
30 /* Source operand type. */
31 #define SrcNone (0<<3) /* No source operand. */
32 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
33 #define SrcReg (1<<3) /* Register operand. */
34 #define SrcMem (2<<3) /* Memory operand. */
35 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
36 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
37 #define SrcImm (5<<3) /* Immediate operand. */
38 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
39 #define SrcMask (7<<3)
40 /* Generic ModRM decode. */
41 #define ModRM (1<<6)
42 /* Destination is only written; never read. */
43 #define Mov (1<<7)
45 static uint8_t opcode_table[256] = {
46 /* 0x00 - 0x07 */
47 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
48 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
49 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
50 /* 0x08 - 0x0F */
51 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
52 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
53 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
54 /* 0x10 - 0x17 */
55 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
56 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
57 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
58 /* 0x18 - 0x1F */
59 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
60 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
61 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
62 /* 0x20 - 0x27 */
63 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
64 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
65 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
66 /* 0x28 - 0x2F */
67 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
68 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
69 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
70 /* 0x30 - 0x37 */
71 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
72 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
73 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
74 /* 0x38 - 0x3F */
75 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
76 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
77 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
78 /* 0x40 - 0x4F */
79 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
80 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
81 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
82 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
83 /* 0x50 - 0x5F */
84 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
85 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
86 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
87 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
88 /* 0x60 - 0x6F */
89 0, 0, 0, DstReg|SrcMem32|ModRM|Mov /* movsxd (x86/64) */,
90 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
91 /* 0x70 - 0x7F */
92 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
93 /* 0x80 - 0x87 */
94 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
95 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
96 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
97 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
98 /* 0x88 - 0x8F */
99 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
100 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
101 0, DstReg|SrcNone|ModRM, 0, DstMem|SrcNone|ModRM|Mov,
102 /* 0x90 - 0x97 */
103 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
104 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
105 /* 0x98 - 0x9F */
106 0, 0, 0, 0, 0, 0, 0, 0,
107 /* 0xA0 - 0xA7 */
108 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
109 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
110 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
111 /* 0xA8 - 0xAF */
112 0, 0, ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
113 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
114 /* 0xB0 - 0xBF */
115 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
116 /* 0xC0 - 0xC7 */
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM, 0, 0,
118 0, 0, ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
119 /* 0xC8 - 0xCF */
120 0, 0, 0, 0, 0, 0, 0, 0,
121 /* 0xD0 - 0xD7 */
122 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
123 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
124 0, 0, 0, 0,
125 /* 0xD8 - 0xDF */
126 0, 0, 0, 0, 0, 0, 0, 0,
127 /* 0xE0 - 0xEF */
128 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
129 /* 0xF0 - 0xF7 */
130 0, 0, 0, 0,
131 0, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
132 /* 0xF8 - 0xFF */
133 ImplicitOps, ImplicitOps, 0, 0,
134 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
135 };
137 static uint8_t twobyte_table[256] = {
138 /* 0x00 - 0x0F */
139 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
140 /* 0x10 - 0x1F */
141 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0, 0, 0, 0, 0, 0,
142 /* 0x20 - 0x2F */
143 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
144 /* 0x30 - 0x3F */
145 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
146 /* 0x40 - 0x47 */
147 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
148 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
149 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
150 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
151 /* 0x48 - 0x4F */
152 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
153 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
154 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
155 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
156 /* 0x50 - 0x5F */
157 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
158 /* 0x60 - 0x6F */
159 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
160 /* 0x70 - 0x7F */
161 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
162 /* 0x80 - 0x8F */
163 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
164 /* 0x90 - 0x9F */
165 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
166 /* 0xA0 - 0xA7 */
167 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, 0,
168 /* 0xA8 - 0xAF */
169 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, 0,
170 /* 0xB0 - 0xB7 */
171 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
172 0, DstBitBase|SrcReg|ModRM,
173 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
174 /* 0xB8 - 0xBF */
175 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
176 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
177 /* 0xC0 - 0xC7 */
178 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
179 0, 0, 0, ImplicitOps|ModRM,
180 /* 0xC8 - 0xCF */
181 0, 0, 0, 0, 0, 0, 0, 0,
182 /* 0xD0 - 0xDF */
183 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
184 /* 0xE0 - 0xEF */
185 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
186 /* 0xF0 - 0xFF */
187 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
188 };
190 /* Type, address-of, and value of an instruction's operand. */
191 struct operand {
192 enum { OP_REG, OP_MEM, OP_IMM } type;
193 unsigned int bytes;
194 unsigned long val, orig_val;
195 union {
196 /* OP_REG: Pointer to register field. */
197 unsigned long *reg;
198 /* OP_MEM: Segment and offset. */
199 struct {
200 enum x86_segment seg;
201 unsigned long off;
202 } mem;
203 };
204 };
206 /* EFLAGS bit definitions. */
207 #define EFLG_OF (1<<11)
208 #define EFLG_DF (1<<10)
209 #define EFLG_SF (1<<7)
210 #define EFLG_ZF (1<<6)
211 #define EFLG_AF (1<<4)
212 #define EFLG_PF (1<<2)
213 #define EFLG_CF (1<<0)
215 /*
216 * Instruction emulation:
217 * Most instructions are emulated directly via a fragment of inline assembly
218 * code. This allows us to save/restore EFLAGS and thus very easily pick up
219 * any modified flags.
220 */
222 #if defined(__x86_64__)
223 #define _LO32 "k" /* force 32-bit operand */
224 #define _STK "%%rsp" /* stack pointer */
225 #elif defined(__i386__)
226 #define _LO32 "" /* force 32-bit operand */
227 #define _STK "%%esp" /* stack pointer */
228 #endif
230 /*
231 * These EFLAGS bits are restored from saved value during emulation, and
232 * any changes are written back to the saved value after emulation.
233 */
234 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
236 /* Before executing instruction: restore necessary bits in EFLAGS. */
237 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
238 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */\
239 "push %"_sav"; " \
240 "movl %"_msk",%"_LO32 _tmp"; " \
241 "andl %"_LO32 _tmp",("_STK"); " \
242 "pushf; " \
243 "notl %"_LO32 _tmp"; " \
244 "andl %"_LO32 _tmp",("_STK"); " \
245 "pop %"_tmp"; " \
246 "orl %"_LO32 _tmp",("_STK"); " \
247 "popf; " \
248 /* _sav &= ~msk; */ \
249 "movl %"_msk",%"_LO32 _tmp"; " \
250 "notl %"_LO32 _tmp"; " \
251 "andl %"_LO32 _tmp",%"_sav"; "
253 /* After executing instruction: write-back necessary bits in EFLAGS. */
254 #define _POST_EFLAGS(_sav, _msk, _tmp) \
255 /* _sav |= EFLAGS & _msk; */ \
256 "pushf; " \
257 "pop %"_tmp"; " \
258 "andl %"_msk",%"_LO32 _tmp"; " \
259 "orl %"_LO32 _tmp",%"_sav"; "
261 /* Raw emulation: instruction has two explicit operands. */
262 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
263 do{ unsigned long _tmp; \
264 switch ( (_dst).bytes ) \
265 { \
266 case 2: \
267 __asm__ __volatile__ ( \
268 _PRE_EFLAGS("0","4","2") \
269 _op"w %"_wx"3,%1; " \
270 _POST_EFLAGS("0","4","2") \
271 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
272 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
273 break; \
274 case 4: \
275 __asm__ __volatile__ ( \
276 _PRE_EFLAGS("0","4","2") \
277 _op"l %"_lx"3,%1; " \
278 _POST_EFLAGS("0","4","2") \
279 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
280 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
281 break; \
282 case 8: \
283 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
284 break; \
285 } \
286 } while (0)
287 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
288 do{ unsigned long _tmp; \
289 switch ( (_dst).bytes ) \
290 { \
291 case 1: \
292 __asm__ __volatile__ ( \
293 _PRE_EFLAGS("0","4","2") \
294 _op"b %"_bx"3,%1; " \
295 _POST_EFLAGS("0","4","2") \
296 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
297 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
298 break; \
299 default: \
300 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
301 break; \
302 } \
303 } while (0)
304 /* Source operand is byte-sized and may be restricted to just %cl. */
305 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
306 __emulate_2op(_op, _src, _dst, _eflags, \
307 "b", "c", "b", "c", "b", "c", "b", "c")
308 /* Source operand is byte, word, long or quad sized. */
309 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
310 __emulate_2op(_op, _src, _dst, _eflags, \
311 "b", "q", "w", "r", _LO32, "r", "", "r")
312 /* Source operand is word, long or quad sized. */
313 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
314 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
315 "w", "r", _LO32, "r", "", "r")
317 /* Instruction has only one explicit operand (no source operand). */
318 #define emulate_1op(_op,_dst,_eflags) \
319 do{ unsigned long _tmp; \
320 switch ( (_dst).bytes ) \
321 { \
322 case 1: \
323 __asm__ __volatile__ ( \
324 _PRE_EFLAGS("0","3","2") \
325 _op"b %1; " \
326 _POST_EFLAGS("0","3","2") \
327 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
328 : "i" (EFLAGS_MASK) ); \
329 break; \
330 case 2: \
331 __asm__ __volatile__ ( \
332 _PRE_EFLAGS("0","3","2") \
333 _op"w %1; " \
334 _POST_EFLAGS("0","3","2") \
335 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
336 : "i" (EFLAGS_MASK) ); \
337 break; \
338 case 4: \
339 __asm__ __volatile__ ( \
340 _PRE_EFLAGS("0","3","2") \
341 _op"l %1; " \
342 _POST_EFLAGS("0","3","2") \
343 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
344 : "i" (EFLAGS_MASK) ); \
345 break; \
346 case 8: \
347 __emulate_1op_8byte(_op, _dst, _eflags); \
348 break; \
349 } \
350 } while (0)
352 /* Emulate an instruction with quadword operands (x86/64 only). */
353 #if defined(__x86_64__)
354 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
355 do{ __asm__ __volatile__ ( \
356 _PRE_EFLAGS("0","4","2") \
357 _op"q %"_qx"3,%1; " \
358 _POST_EFLAGS("0","4","2") \
359 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
360 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
361 } while (0)
362 #define __emulate_1op_8byte(_op, _dst, _eflags) \
363 do{ __asm__ __volatile__ ( \
364 _PRE_EFLAGS("0","3","2") \
365 _op"q %1; " \
366 _POST_EFLAGS("0","3","2") \
367 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
368 : "i" (EFLAGS_MASK) ); \
369 } while (0)
370 #elif defined(__i386__)
371 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
372 #define __emulate_1op_8byte(_op, _dst, _eflags)
373 #endif /* __i386__ */
375 /* Fetch next part of the instruction being emulated. */
376 #define insn_fetch_bytes(_size) \
377 ({ unsigned long _x; \
378 rc = ops->insn_fetch(x86_seg_cs, _regs.eip, &_x, (_size), ctxt); \
379 if ( rc != 0 ) \
380 goto done; \
381 _regs.eip += (_size); \
382 _x; \
383 })
384 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
386 #define truncate_ea(ea) \
387 ({ unsigned long __ea = (ea); \
388 ((ad_bytes == sizeof(unsigned long)) ? __ea : \
389 (__ea & ((1UL << (ad_bytes << 3)) - 1))); \
390 })
392 /* Update address held in a register, based on addressing mode. */
393 #define register_address_increment(reg, inc) \
394 do { \
395 int _inc = (inc); /* signed type ensures sign extension to long */ \
396 if ( ad_bytes == sizeof(unsigned long) ) \
397 (reg) += _inc; \
398 else \
399 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
400 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
401 } while (0)
403 void *
404 decode_register(
405 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
406 {
407 void *p;
409 switch ( modrm_reg )
410 {
411 case 0: p = &regs->eax; break;
412 case 1: p = &regs->ecx; break;
413 case 2: p = &regs->edx; break;
414 case 3: p = &regs->ebx; break;
415 case 4: p = (highbyte_regs ?
416 ((unsigned char *)&regs->eax + 1) :
417 (unsigned char *)&regs->esp); break;
418 case 5: p = (highbyte_regs ?
419 ((unsigned char *)&regs->ecx + 1) :
420 (unsigned char *)&regs->ebp); break;
421 case 6: p = (highbyte_regs ?
422 ((unsigned char *)&regs->edx + 1) :
423 (unsigned char *)&regs->esi); break;
424 case 7: p = (highbyte_regs ?
425 ((unsigned char *)&regs->ebx + 1) :
426 (unsigned char *)&regs->edi); break;
427 #if defined(__x86_64__)
428 case 8: p = &regs->r8; break;
429 case 9: p = &regs->r9; break;
430 case 10: p = &regs->r10; break;
431 case 11: p = &regs->r11; break;
432 case 12: p = &regs->r12; break;
433 case 13: p = &regs->r13; break;
434 case 14: p = &regs->r14; break;
435 case 15: p = &regs->r15; break;
436 #endif
437 default: p = NULL; break;
438 }
440 return p;
441 }
443 int
444 x86_emulate(
445 struct x86_emulate_ctxt *ctxt,
446 struct x86_emulate_ops *ops)
447 {
448 /* Shadow copy of register state. Committed on successful emulation. */
449 struct cpu_user_regs _regs = *ctxt->regs;
451 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
452 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
453 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
454 int rc = 0;
455 struct operand src, dst;
456 int mode = ctxt->mode;
458 /* Data operand effective address (usually computed from ModRM). */
459 struct operand ea;
461 /* Default is a memory operand relative to segment DS. */
462 ea.type = OP_MEM;
463 ea.mem.seg = x86_seg_ds;
464 ea.mem.off = 0;
466 switch ( mode )
467 {
468 case X86EMUL_MODE_REAL:
469 case X86EMUL_MODE_PROT16:
470 op_bytes = ad_bytes = 2;
471 break;
472 case X86EMUL_MODE_PROT32:
473 op_bytes = ad_bytes = 4;
474 break;
475 #ifdef __x86_64__
476 case X86EMUL_MODE_PROT64:
477 op_bytes = 4;
478 ad_bytes = 8;
479 break;
480 #endif
481 default:
482 return -1;
483 }
485 /* Prefix bytes. */
486 for ( i = 0; i < 8; i++ )
487 {
488 switch ( b = insn_fetch_type(uint8_t) )
489 {
490 case 0x66: /* operand-size override */
491 op_bytes ^= 6; /* switch between 2/4 bytes */
492 break;
493 case 0x67: /* address-size override */
494 if ( mode == X86EMUL_MODE_PROT64 )
495 ad_bytes ^= 12; /* switch between 4/8 bytes */
496 else
497 ad_bytes ^= 6; /* switch between 2/4 bytes */
498 break;
499 case 0x2e: /* CS override */
500 ea.mem.seg = x86_seg_cs;
501 break;
502 case 0x3e: /* DS override */
503 ea.mem.seg = x86_seg_ds;
504 break;
505 case 0x26: /* ES override */
506 ea.mem.seg = x86_seg_es;
507 break;
508 case 0x64: /* FS override */
509 ea.mem.seg = x86_seg_fs;
510 break;
511 case 0x65: /* GS override */
512 ea.mem.seg = x86_seg_gs;
513 break;
514 case 0x36: /* SS override */
515 ea.mem.seg = x86_seg_ss;
516 break;
517 case 0xf0: /* LOCK */
518 lock_prefix = 1;
519 break;
520 case 0xf2: /* REPNE/REPNZ */
521 case 0xf3: /* REP/REPE/REPZ */
522 rep_prefix = 1;
523 break;
524 case 0x40 ... 0x4f: /* REX */
525 if ( mode != X86EMUL_MODE_PROT64 )
526 goto done_prefixes;
527 rex_prefix = b;
528 continue;
529 default:
530 goto done_prefixes;
531 }
533 /* Any legacy prefix after a REX prefix nullifies its effect. */
534 rex_prefix = 0;
535 }
536 done_prefixes:
538 if ( rex_prefix & 8 ) /* REX.W */
539 op_bytes = 8;
541 /* Opcode byte(s). */
542 d = opcode_table[b];
543 if ( d == 0 )
544 {
545 /* Two-byte opcode? */
546 if ( b == 0x0f )
547 {
548 twobyte = 1;
549 b = insn_fetch_type(uint8_t);
550 d = twobyte_table[b];
551 }
553 /* Unrecognised? */
554 if ( d == 0 )
555 goto cannot_emulate;
556 }
558 /* ModRM and SIB bytes. */
559 if ( d & ModRM )
560 {
561 modrm = insn_fetch_type(uint8_t);
562 modrm_mod = (modrm & 0xc0) >> 6;
563 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
564 modrm_rm = modrm & 0x07;
566 if ( modrm_mod == 3 )
567 {
568 modrm_rm |= (rex_prefix & 1) << 3;
569 ea.type = OP_REG;
570 ea.reg = decode_register(
571 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
572 }
573 else if ( ad_bytes == 2 )
574 {
575 /* 16-bit ModR/M decode. */
576 switch ( modrm_rm )
577 {
578 case 0: ea.mem.off = _regs.ebx + _regs.esi; break;
579 case 1: ea.mem.off = _regs.ebx + _regs.edi; break;
580 case 2: ea.mem.off = _regs.ebp + _regs.esi; break;
581 case 3: ea.mem.off = _regs.ebp + _regs.edi; break;
582 case 4: ea.mem.off = _regs.esi; break;
583 case 5: ea.mem.off = _regs.edi; break;
584 case 6: ea.mem.off = _regs.ebp; break;
585 case 7: ea.mem.off = _regs.ebx; break;
586 }
587 switch ( modrm_mod )
588 {
589 case 0:
590 if ( modrm_rm == 6 )
591 ea.mem.off = insn_fetch_type(int16_t);
592 break;
593 case 1:
594 ea.mem.off += insn_fetch_type(int8_t);
595 break;
596 case 2:
597 ea.mem.off += insn_fetch_type(int16_t);
598 break;
599 }
600 ea.mem.off = truncate_ea(ea.mem.off);
601 }
602 else
603 {
604 /* 32/64-bit ModR/M decode. */
605 if ( modrm_rm == 4 )
606 {
607 sib = insn_fetch_type(uint8_t);
608 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
609 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
610 if ( sib_index != 4 )
611 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
612 ea.mem.off <<= (sib >> 6) & 3;
613 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
614 ea.mem.off += insn_fetch_type(int32_t);
615 else if ( (sib_base == 4) && !twobyte && (b == 0x8f) )
616 /* POP <rm> must have its EA calculated post increment. */
617 ea.mem.off += _regs.esp +
618 (((mode == X86EMUL_MODE_PROT64) && (op_bytes == 4))
619 ? 8 : op_bytes);
620 else
621 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
622 }
623 else
624 {
625 modrm_rm |= (rex_prefix & 1) << 3;
626 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
627 }
628 switch ( modrm_mod )
629 {
630 case 0:
631 if ( (modrm_rm & 7) != 5 )
632 break;
633 ea.mem.off = insn_fetch_type(int32_t);
634 if ( mode != X86EMUL_MODE_PROT64 )
635 break;
636 /* Relative to RIP of next instruction. Argh! */
637 ea.mem.off += _regs.eip;
638 if ( (d & SrcMask) == SrcImm )
639 ea.mem.off += (d & ByteOp) ? 1 :
640 ((op_bytes == 8) ? 4 : op_bytes);
641 else if ( (d & SrcMask) == SrcImmByte )
642 ea.mem.off += 1;
643 else if ( ((b == 0xf6) || (b == 0xf7)) &&
644 ((modrm_reg & 7) <= 1) )
645 /* Special case in Grp3: test has immediate operand. */
646 ea.mem.off += (d & ByteOp) ? 1
647 : ((op_bytes == 8) ? 4 : op_bytes);
648 break;
649 case 1:
650 ea.mem.off += insn_fetch_type(int8_t);
651 break;
652 case 2:
653 ea.mem.off += insn_fetch_type(int32_t);
654 break;
655 }
656 ea.mem.off = truncate_ea(ea.mem.off);
657 }
658 }
660 /* Special instructions do their own operand decoding. */
661 if ( (d & DstMask) == ImplicitOps )
662 goto special_insn;
664 /* Decode and fetch the source operand: register, memory or immediate. */
665 switch ( d & SrcMask )
666 {
667 case SrcNone:
668 break;
669 case SrcReg:
670 src.type = OP_REG;
671 if ( d & ByteOp )
672 {
673 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
674 src.val = *(uint8_t *)src.reg;
675 src.bytes = 1;
676 }
677 else
678 {
679 src.reg = decode_register(modrm_reg, &_regs, 0);
680 switch ( (src.bytes = op_bytes) )
681 {
682 case 2: src.val = *(uint16_t *)src.reg; break;
683 case 4: src.val = *(uint32_t *)src.reg; break;
684 case 8: src.val = *(uint64_t *)src.reg; break;
685 }
686 }
687 break;
688 case SrcMem16:
689 ea.bytes = 2;
690 goto srcmem_common;
691 case SrcMem32:
692 ea.bytes = 4;
693 goto srcmem_common;
694 case SrcMem:
695 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
696 srcmem_common:
697 src = ea;
698 if ( src.type == OP_REG )
699 {
700 switch ( src.bytes )
701 {
702 case 1: src.val = *(uint8_t *)src.reg; break;
703 case 2: src.val = *(uint16_t *)src.reg; break;
704 case 4: src.val = *(uint32_t *)src.reg; break;
705 case 8: src.val = *(uint64_t *)src.reg; break;
706 }
707 }
708 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
709 &src.val, src.bytes, ctxt)) )
710 goto done;
711 break;
712 case SrcImm:
713 src.type = OP_IMM;
714 src.bytes = (d & ByteOp) ? 1 : op_bytes;
715 if ( src.bytes == 8 ) src.bytes = 4;
716 /* NB. Immediates are sign-extended as necessary. */
717 switch ( src.bytes )
718 {
719 case 1: src.val = insn_fetch_type(int8_t); break;
720 case 2: src.val = insn_fetch_type(int16_t); break;
721 case 4: src.val = insn_fetch_type(int32_t); break;
722 }
723 break;
724 case SrcImmByte:
725 src.type = OP_IMM;
726 src.bytes = 1;
727 src.val = insn_fetch_type(int8_t);
728 break;
729 }
731 /* Decode and fetch the destination operand: register or memory. */
732 switch ( d & DstMask )
733 {
734 case DstReg:
735 dst.type = OP_REG;
736 if ( d & ByteOp )
737 {
738 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
739 dst.val = *(uint8_t *)dst.reg;
740 dst.bytes = 1;
741 }
742 else
743 {
744 dst.reg = decode_register(modrm_reg, &_regs, 0);
745 switch ( (dst.bytes = op_bytes) )
746 {
747 case 2: dst.val = *(uint16_t *)dst.reg; break;
748 case 4: dst.val = *(uint32_t *)dst.reg; break;
749 case 8: dst.val = *(uint64_t *)dst.reg; break;
750 }
751 }
752 break;
753 case DstBitBase:
754 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
755 {
756 src.val &= (op_bytes << 3) - 1;
757 }
758 else
759 {
760 /*
761 * EA += BitOffset DIV op_bytes*8
762 * BitOffset = BitOffset MOD op_byte*8
763 * DIV truncates towards negative infinity.
764 * MOD always produces a positive result.
765 */
766 if ( op_bytes == 2 )
767 src.val = (int16_t)src.val;
768 else if ( op_bytes == 4 )
769 src.val = (int32_t)src.val;
770 if ( (long)src.val < 0 )
771 {
772 unsigned long byte_offset;
773 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
774 ea.mem.off -= byte_offset;
775 src.val = (byte_offset << 3) + src.val;
776 }
777 else
778 {
779 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
780 src.val &= (op_bytes << 3) - 1;
781 }
782 }
783 /* Becomes a normal DstMem operation from here on. */
784 d = (d & ~DstMask) | DstMem;
785 case DstMem:
786 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
787 dst = ea;
788 if ( dst.type == OP_REG )
789 {
790 switch ( dst.bytes )
791 {
792 case 1: dst.val = *(uint8_t *)dst.reg; break;
793 case 2: dst.val = *(uint16_t *)dst.reg; break;
794 case 4: dst.val = *(uint32_t *)dst.reg; break;
795 case 8: dst.val = *(uint64_t *)dst.reg; break;
796 }
797 }
798 else if ( !(d & Mov) && /* optimisation - avoid slow emulated read */
799 (rc = ops->read(dst.mem.seg, dst.mem.off,
800 &dst.val, dst.bytes, ctxt)) )
801 goto done;
802 break;
803 }
804 dst.orig_val = dst.val;
806 if ( twobyte )
807 goto twobyte_insn;
809 switch ( b )
810 {
811 case 0x04 ... 0x05: /* add imm,%%eax */
812 dst.reg = (unsigned long *)&_regs.eax;
813 dst.val = dst.orig_val = _regs.eax;
814 case 0x00 ... 0x03: add: /* add */
815 emulate_2op_SrcV("add", src, dst, _regs.eflags);
816 break;
817 case 0x0c ... 0x0d: /* or imm,%%eax */
818 dst.reg = (unsigned long *)&_regs.eax;
819 dst.val = dst.orig_val = _regs.eax;
820 case 0x08 ... 0x0b: or: /* or */
821 emulate_2op_SrcV("or", src, dst, _regs.eflags);
822 break;
823 case 0x14 ... 0x15: /* adc imm,%%eax */
824 dst.reg = (unsigned long *)&_regs.eax;
825 dst.val = dst.orig_val = _regs.eax;
826 case 0x10 ... 0x13: adc: /* adc */
827 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
828 break;
829 case 0x1c ... 0x1d: /* sbb imm,%%eax */
830 dst.reg = (unsigned long *)&_regs.eax;
831 dst.val = dst.orig_val = _regs.eax;
832 case 0x18 ... 0x1b: sbb: /* sbb */
833 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
834 break;
835 case 0x24 ... 0x25: /* and imm,%%eax */
836 dst.reg = (unsigned long *)&_regs.eax;
837 dst.val = dst.orig_val = _regs.eax;
838 case 0x20 ... 0x23: and: /* and */
839 emulate_2op_SrcV("and", src, dst, _regs.eflags);
840 break;
841 case 0x2c ... 0x2d: /* sub imm,%%eax */
842 dst.reg = (unsigned long *)&_regs.eax;
843 dst.val = dst.orig_val = _regs.eax;
844 case 0x28 ... 0x2b: sub: /* sub */
845 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
846 break;
847 case 0x34 ... 0x35: /* xor imm,%%eax */
848 dst.reg = (unsigned long *)&_regs.eax;
849 dst.val = dst.orig_val = _regs.eax;
850 case 0x30 ... 0x33: xor: /* xor */
851 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
852 break;
853 case 0x3c ... 0x3d: /* cmp imm,%%eax */
854 dst.reg = (unsigned long *)&_regs.eax;
855 dst.val = dst.orig_val = _regs.eax;
856 case 0x38 ... 0x3b: cmp: /* cmp */
857 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
858 break;
859 case 0x63: /* movsxd */
860 if ( mode != X86EMUL_MODE_PROT64 )
861 goto cannot_emulate;
862 dst.val = (int32_t)src.val;
863 break;
864 case 0x80 ... 0x83: /* Grp1 */
865 switch ( modrm_reg & 7 )
866 {
867 case 0: goto add;
868 case 1: goto or;
869 case 2: goto adc;
870 case 3: goto sbb;
871 case 4: goto and;
872 case 5: goto sub;
873 case 6: goto xor;
874 case 7: goto cmp;
875 }
876 break;
877 case 0x84 ... 0x85: test: /* test */
878 emulate_2op_SrcV("test", src, dst, _regs.eflags);
879 break;
880 case 0x86 ... 0x87: xchg: /* xchg */
881 /* Write back the register source. */
882 switch ( dst.bytes )
883 {
884 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
885 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
886 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
887 case 8: *src.reg = dst.val; break;
888 }
889 /* Write back the memory destination with implicit LOCK prefix. */
890 dst.val = src.val;
891 lock_prefix = 1;
892 break;
893 case 0x88 ... 0x8b: /* mov */
894 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
895 dst.val = src.val;
896 break;
897 case 0x8d: /* lea */
898 dst.val = ea.mem.off;
899 break;
900 case 0x8f: /* pop (sole member of Grp1a) */
901 /* 64-bit mode: POP defaults to a 64-bit operand. */
902 if ( (mode == X86EMUL_MODE_PROT64) && (dst.bytes == 4) )
903 dst.bytes = 8;
904 if ( (rc = ops->read(x86_seg_ss, truncate_ea(_regs.esp),
905 &dst.val, dst.bytes, ctxt)) != 0 )
906 goto done;
907 register_address_increment(_regs.esp, dst.bytes);
908 break;
909 case 0xc0 ... 0xc1: grp2: /* Grp2 */
910 switch ( modrm_reg & 7 )
911 {
912 case 0: /* rol */
913 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
914 break;
915 case 1: /* ror */
916 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
917 break;
918 case 2: /* rcl */
919 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
920 break;
921 case 3: /* rcr */
922 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
923 break;
924 case 4: /* sal/shl */
925 case 6: /* sal/shl */
926 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
927 break;
928 case 5: /* shr */
929 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
930 break;
931 case 7: /* sar */
932 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
933 break;
934 }
935 break;
936 case 0xd0 ... 0xd1: /* Grp2 */
937 src.val = 1;
938 goto grp2;
939 case 0xd2 ... 0xd3: /* Grp2 */
940 src.val = _regs.ecx;
941 goto grp2;
942 case 0xf6 ... 0xf7: /* Grp3 */
943 switch ( modrm_reg & 7 )
944 {
945 case 0 ... 1: /* test */
946 /* Special case in Grp3: test has an immediate source operand. */
947 src.type = OP_IMM;
948 src.bytes = (d & ByteOp) ? 1 : op_bytes;
949 if ( src.bytes == 8 ) src.bytes = 4;
950 switch ( src.bytes )
951 {
952 case 1: src.val = insn_fetch_type(int8_t); break;
953 case 2: src.val = insn_fetch_type(int16_t); break;
954 case 4: src.val = insn_fetch_type(int32_t); break;
955 }
956 goto test;
957 case 2: /* not */
958 dst.val = ~dst.val;
959 break;
960 case 3: /* neg */
961 emulate_1op("neg", dst, _regs.eflags);
962 break;
963 default:
964 goto cannot_emulate;
965 }
966 break;
967 case 0xfe ... 0xff: /* Grp4/Grp5 */
968 switch ( modrm_reg & 7 )
969 {
970 case 0: /* inc */
971 emulate_1op("inc", dst, _regs.eflags);
972 break;
973 case 1: /* dec */
974 emulate_1op("dec", dst, _regs.eflags);
975 break;
976 case 6: /* push */
977 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
978 if ( (mode == X86EMUL_MODE_PROT64) && (dst.bytes == 4) )
979 {
980 dst.bytes = 8;
981 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
982 &dst.val, 8, ctxt)) != 0 )
983 goto done;
984 }
985 register_address_increment(_regs.esp, -dst.bytes);
986 if ( (rc = ops->write(x86_seg_ss, truncate_ea(_regs.esp),
987 dst.val, dst.bytes, ctxt)) != 0 )
988 goto done;
989 dst.val = dst.orig_val; /* skanky: disable writeback */
990 break;
991 default:
992 goto cannot_emulate;
993 }
994 break;
995 }
997 writeback:
998 if ( (d & Mov) || (dst.orig_val != dst.val) )
999 {
1000 switch ( dst.type )
1002 case OP_REG:
1003 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1004 switch ( dst.bytes )
1006 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1007 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1008 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1009 case 8: *dst.reg = dst.val; break;
1011 break;
1012 case OP_MEM:
1013 if ( lock_prefix )
1014 rc = ops->cmpxchg(
1015 dst.mem.seg, dst.mem.off, dst.orig_val,
1016 dst.val, dst.bytes, ctxt);
1017 else
1018 rc = ops->write(
1019 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1020 if ( rc != 0 )
1021 goto done;
1022 default:
1023 break;
1027 /* Commit shadow register state. */
1028 *ctxt->regs = _regs;
1030 done:
1031 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1033 special_insn:
1034 /* Default action: disable writeback. There may be no dest operand. */
1035 dst.orig_val = dst.val;
1036 if ( twobyte )
1037 goto twobyte_special_insn;
1038 if ( rep_prefix )
1040 if ( _regs.ecx == 0 )
1042 ctxt->regs->eip = _regs.eip;
1043 goto done;
1045 _regs.ecx--;
1046 _regs.eip = ctxt->regs->eip;
1048 switch ( b )
1050 case 0x40 ... 0x4f: /* inc/dec reg */
1051 dst.type = OP_REG;
1052 dst.reg = decode_register(b & 7, &_regs, 0);
1053 dst.bytes = op_bytes;
1054 dst.orig_val = dst.val = *dst.reg;
1055 if ( b & 8 )
1056 emulate_1op("dec", dst, _regs.eflags);
1057 else
1058 emulate_1op("inc", dst, _regs.eflags);
1059 break;
1060 case 0x50 ... 0x57: /* push reg */
1061 dst.type = OP_MEM;
1062 dst.bytes = op_bytes;
1063 if ( (mode == X86EMUL_MODE_PROT64) && (dst.bytes == 4) )
1064 dst.bytes = 8;
1065 dst.val = *(unsigned long *)decode_register(
1066 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1067 register_address_increment(_regs.esp, -dst.bytes);
1068 dst.mem.seg = x86_seg_ss;
1069 dst.mem.off = truncate_ea(_regs.esp);
1070 break;
1071 case 0x58 ... 0x5f: /* pop reg */
1072 dst.type = OP_REG;
1073 dst.reg = decode_register(
1074 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1075 dst.bytes = op_bytes;
1076 if ( (mode == X86EMUL_MODE_PROT64) && (dst.bytes == 4) )
1077 dst.bytes = 8;
1078 if ( (rc = ops->read(x86_seg_ss, truncate_ea(_regs.esp),
1079 &dst.val, dst.bytes, ctxt)) != 0 )
1080 goto done;
1081 register_address_increment(_regs.esp, dst.bytes);
1082 break;
1083 case 0x90: /* nop / xchg %%r8,%%rax */
1084 if ( !(rex_prefix & 1) )
1085 break; /* nop */
1086 case 0x91 ... 0x97: /* xchg reg,%%rax */
1087 src.type = OP_REG;
1088 src.reg = (unsigned long *)&_regs.eax;
1089 src.val = *src.reg;
1090 dst.type = OP_REG;
1091 dst.reg = decode_register(
1092 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1093 dst.val = dst.orig_val = *dst.reg;
1094 goto xchg;
1095 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
1096 /* Source EA is not encoded via ModRM. */
1097 dst.type = OP_REG;
1098 dst.reg = (unsigned long *)&_regs.eax;
1099 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1100 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
1101 &dst.val, dst.bytes, ctxt)) != 0 )
1102 goto done;
1103 break;
1104 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
1105 /* Destination EA is not encoded via ModRM. */
1106 dst.type = OP_MEM;
1107 dst.mem.seg = ea.mem.seg;
1108 dst.mem.off = insn_fetch_bytes(ad_bytes);
1109 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1110 dst.val = (unsigned long)_regs.eax;
1111 break;
1112 case 0xa4 ... 0xa5: /* movs */
1113 dst.type = OP_MEM;
1114 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1115 dst.mem.seg = x86_seg_es;
1116 dst.mem.off = truncate_ea(_regs.edi);
1117 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1118 &dst.val, dst.bytes, ctxt)) != 0 )
1119 goto done;
1120 register_address_increment(
1121 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1122 register_address_increment(
1123 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1124 break;
1125 case 0xaa ... 0xab: /* stos */
1126 dst.type = OP_MEM;
1127 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1128 dst.mem.seg = x86_seg_es;
1129 dst.mem.off = truncate_ea(_regs.edi);
1130 dst.val = _regs.eax;
1131 register_address_increment(
1132 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1133 break;
1134 case 0xac ... 0xad: /* lods */
1135 dst.type = OP_REG;
1136 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1137 dst.reg = (unsigned long *)&_regs.eax;
1138 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1139 &dst.val, dst.bytes, ctxt)) != 0 )
1140 goto done;
1141 register_address_increment(
1142 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1143 break;
1144 case 0xf5: /* cmc */
1145 _regs.eflags ^= EFLG_CF;
1146 break;
1147 case 0xf8: /* clc */
1148 _regs.eflags &= ~EFLG_CF;
1149 break;
1150 case 0xf9: /* stc */
1151 _regs.eflags |= EFLG_CF;
1152 break;
1153 case 0xfc: /* cld */
1154 _regs.eflags &= ~EFLG_DF;
1155 break;
1156 case 0xfd: /* std */
1157 _regs.eflags |= EFLG_DF;
1158 break;
1160 goto writeback;
1162 twobyte_insn:
1163 switch ( b )
1165 case 0x40 ... 0x4f: /* cmov */
1166 dst.val = dst.orig_val = src.val;
1167 d &= ~Mov; /* default to no move */
1168 /* First, assume we're decoding an even cmov opcode (lsb == 0). */
1169 switch ( (b & 15) >> 1 )
1171 case 0: /* cmovo */
1172 d |= (_regs.eflags & EFLG_OF) ? Mov : 0;
1173 break;
1174 case 1: /* cmovb/cmovc/cmovnae */
1175 d |= (_regs.eflags & EFLG_CF) ? Mov : 0;
1176 break;
1177 case 2: /* cmovz/cmove */
1178 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
1179 break;
1180 case 3: /* cmovbe/cmovna */
1181 d |= (_regs.eflags & (EFLG_CF|EFLG_ZF)) ? Mov : 0;
1182 break;
1183 case 4: /* cmovs */
1184 d |= (_regs.eflags & EFLG_SF) ? Mov : 0;
1185 break;
1186 case 5: /* cmovp/cmovpe */
1187 d |= (_regs.eflags & EFLG_PF) ? Mov : 0;
1188 break;
1189 case 7: /* cmovle/cmovng */
1190 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
1191 /* fall through */
1192 case 6: /* cmovl/cmovnge */
1193 d |= (!(_regs.eflags & EFLG_SF) != !(_regs.eflags & EFLG_OF)) ?
1194 Mov : 0;
1195 break;
1197 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1198 d ^= (b & 1) ? Mov : 0;
1199 break;
1200 case 0xb0 ... 0xb1: /* cmpxchg */
1201 /* Save real source value, then compare EAX against destination. */
1202 src.orig_val = src.val;
1203 src.val = _regs.eax;
1204 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1205 /* Always write back. The question is: where to? */
1206 d |= Mov;
1207 if ( _regs.eflags & EFLG_ZF )
1209 /* Success: write back to memory. */
1210 dst.val = src.orig_val;
1212 else
1214 /* Failure: write the value we saw to EAX. */
1215 dst.type = OP_REG;
1216 dst.reg = (unsigned long *)&_regs.eax;
1218 break;
1219 case 0xa3: bt: /* bt */
1220 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
1221 break;
1222 case 0xb3: btr: /* btr */
1223 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
1224 break;
1225 case 0xab: bts: /* bts */
1226 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
1227 break;
1228 case 0xb6: /* movzx rm8,r{16,32,64} */
1229 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
1230 dst.reg = decode_register(modrm_reg, &_regs, 0);
1231 dst.bytes = op_bytes;
1232 dst.val = (uint8_t)src.val;
1233 break;
1234 case 0xb7: /* movzx rm16,r{16,32,64} */
1235 dst.val = (uint16_t)src.val;
1236 break;
1237 case 0xbb: btc: /* btc */
1238 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
1239 break;
1240 case 0xba: /* Grp8 */
1241 switch ( modrm_reg & 3 )
1243 case 0: goto bt;
1244 case 1: goto bts;
1245 case 2: goto btr;
1246 case 3: goto btc;
1248 break;
1249 case 0xbe: /* movsx rm8,r{16,32,64} */
1250 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
1251 dst.reg = decode_register(modrm_reg, &_regs, 0);
1252 dst.bytes = op_bytes;
1253 dst.val = (int8_t)src.val;
1254 break;
1255 case 0xbf: /* movsx rm16,r{16,32,64} */
1256 dst.val = (int16_t)src.val;
1257 break;
1258 case 0xc0 ... 0xc1: /* xadd */
1259 /* Write back the register source. */
1260 switch ( dst.bytes )
1262 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1263 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1264 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1265 case 8: *src.reg = dst.val; break;
1267 goto add;
1269 goto writeback;
1271 twobyte_special_insn:
1272 switch ( b )
1274 case 0x0d: /* GrpP (prefetch) */
1275 case 0x18: /* Grp16 (prefetch/nop) */
1276 break;
1277 case 0xc7: /* Grp9 (cmpxchg8b) */
1278 #if defined(__i386__)
1280 unsigned long old_lo, old_hi;
1281 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
1282 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
1283 goto done;
1284 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
1286 _regs.eax = old_lo;
1287 _regs.edx = old_hi;
1288 _regs.eflags &= ~EFLG_ZF;
1290 else if ( ops->cmpxchg8b == NULL )
1292 rc = X86EMUL_UNHANDLEABLE;
1293 goto done;
1295 else
1297 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
1298 _regs.ebx, _regs.ecx, ctxt)) != 0 )
1299 goto done;
1300 _regs.eflags |= EFLG_ZF;
1302 break;
1304 #elif defined(__x86_64__)
1306 unsigned long old, new;
1307 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
1308 goto done;
1309 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
1310 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
1312 _regs.eax = (uint32_t)(old>>0);
1313 _regs.edx = (uint32_t)(old>>32);
1314 _regs.eflags &= ~EFLG_ZF;
1316 else
1318 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
1319 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
1320 new, 8, ctxt)) != 0 )
1321 goto done;
1322 _regs.eflags |= EFLG_ZF;
1324 break;
1326 #endif
1328 goto writeback;
1330 cannot_emulate:
1331 #ifdef __XEN__
1332 gdprintk(XENLOG_DEBUG, "Instr:");
1333 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
1335 unsigned long x;
1336 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
1337 printk(" %02x", (uint8_t)x);
1339 printk("\n");
1340 #endif
1341 return -1;