ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 13321:9ba91a854787

[XEN] Emulate MOV imm->reg.
Signed-off-by: Keir Fraser <keir@xensource.com>
author kaf24@localhost.localdomain
date Sat Jan 06 15:56:52 2007 +0000 (2007-01-06)
parents b648a579b0b4
children f240c09f08d2
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 */
9 #ifndef __XEN__
10 #include <stddef.h>
11 #include <stdint.h>
12 #include <public/xen.h>
13 #else
14 #include <xen/config.h>
15 #include <xen/types.h>
16 #include <xen/lib.h>
17 #include <asm/regs.h>
18 #undef cmpxchg
19 #endif
20 #include <asm-x86/x86_emulate.h>
22 /* Operand sizes: 8-bit operands or specified/overridden size. */
23 #define ByteOp (1<<0) /* 8-bit operands. */
24 /* Destination operand type. */
25 #define DstBitBase (0<<1) /* Memory operand, bit string. */
26 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
27 #define DstReg (2<<1) /* Register operand. */
28 #define DstMem (3<<1) /* Memory operand. */
29 #define DstMask (3<<1)
30 /* Source operand type. */
31 #define SrcNone (0<<3) /* No source operand. */
32 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
33 #define SrcReg (1<<3) /* Register operand. */
34 #define SrcMem (2<<3) /* Memory operand. */
35 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
36 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
37 #define SrcImm (5<<3) /* Immediate operand. */
38 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
39 #define SrcMask (7<<3)
40 /* Generic ModRM decode. */
41 #define ModRM (1<<6)
42 /* Destination is only written; never read. */
43 #define Mov (1<<7)
45 static uint8_t opcode_table[256] = {
46 /* 0x00 - 0x07 */
47 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
48 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
49 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
50 /* 0x08 - 0x0F */
51 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
52 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
53 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
54 /* 0x10 - 0x17 */
55 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
56 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
57 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
58 /* 0x18 - 0x1F */
59 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
60 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
61 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
62 /* 0x20 - 0x27 */
63 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
64 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
65 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
66 /* 0x28 - 0x2F */
67 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
68 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
69 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
70 /* 0x30 - 0x37 */
71 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
72 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
73 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
74 /* 0x38 - 0x3F */
75 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
76 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
77 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
78 /* 0x40 - 0x4F */
79 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
80 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
81 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
82 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
83 /* 0x50 - 0x5F */
84 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
85 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
86 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
87 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
88 /* 0x60 - 0x6F */
89 0, 0, 0, DstReg|SrcMem32|ModRM|Mov /* movsxd (x86/64) */,
90 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
91 /* 0x70 - 0x7F */
92 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
93 /* 0x80 - 0x87 */
94 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
95 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
96 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
97 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
98 /* 0x88 - 0x8F */
99 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
100 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
101 0, DstReg|SrcNone|ModRM, 0, DstMem|SrcNone|ModRM|Mov,
102 /* 0x90 - 0x97 */
103 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
104 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
105 /* 0x98 - 0x9F */
106 0, 0, 0, 0, 0, 0, 0, 0,
107 /* 0xA0 - 0xA7 */
108 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
109 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
110 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
111 /* 0xA8 - 0xAF */
112 0, 0, ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
113 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
114 /* 0xB0 - 0xB7 */
115 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
116 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
117 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
118 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
119 /* 0xB8 - 0xBF */
120 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
121 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
122 /* 0xC0 - 0xC7 */
123 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM, 0, 0,
124 0, 0, ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
125 /* 0xC8 - 0xCF */
126 0, 0, 0, 0, 0, 0, 0, 0,
127 /* 0xD0 - 0xD7 */
128 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
129 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
130 0, 0, 0, 0,
131 /* 0xD8 - 0xDF */
132 0, 0, 0, 0, 0, 0, 0, 0,
133 /* 0xE0 - 0xEF */
134 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
135 /* 0xF0 - 0xF7 */
136 0, 0, 0, 0,
137 0, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
138 /* 0xF8 - 0xFF */
139 ImplicitOps, ImplicitOps, 0, 0,
140 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
141 };
143 static uint8_t twobyte_table[256] = {
144 /* 0x00 - 0x0F */
145 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
146 /* 0x10 - 0x1F */
147 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0, 0, 0, 0, 0, 0,
148 /* 0x20 - 0x2F */
149 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
150 /* 0x30 - 0x3F */
151 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
152 /* 0x40 - 0x47 */
153 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
154 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
155 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
156 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
157 /* 0x48 - 0x4F */
158 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
159 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
160 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
161 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
162 /* 0x50 - 0x5F */
163 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
164 /* 0x60 - 0x6F */
165 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
166 /* 0x70 - 0x7F */
167 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
168 /* 0x80 - 0x8F */
169 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
170 /* 0x90 - 0x9F */
171 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
172 /* 0xA0 - 0xA7 */
173 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, 0,
174 /* 0xA8 - 0xAF */
175 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, 0,
176 /* 0xB0 - 0xB7 */
177 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
178 0, DstBitBase|SrcReg|ModRM,
179 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
180 /* 0xB8 - 0xBF */
181 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
182 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
183 /* 0xC0 - 0xC7 */
184 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
185 0, 0, 0, ImplicitOps|ModRM,
186 /* 0xC8 - 0xCF */
187 0, 0, 0, 0, 0, 0, 0, 0,
188 /* 0xD0 - 0xDF */
189 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
190 /* 0xE0 - 0xEF */
191 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
192 /* 0xF0 - 0xFF */
193 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
194 };
196 /* Type, address-of, and value of an instruction's operand. */
197 struct operand {
198 enum { OP_REG, OP_MEM, OP_IMM } type;
199 unsigned int bytes;
200 unsigned long val, orig_val;
201 union {
202 /* OP_REG: Pointer to register field. */
203 unsigned long *reg;
204 /* OP_MEM: Segment and offset. */
205 struct {
206 enum x86_segment seg;
207 unsigned long off;
208 } mem;
209 };
210 };
212 /* EFLAGS bit definitions. */
213 #define EFLG_OF (1<<11)
214 #define EFLG_DF (1<<10)
215 #define EFLG_SF (1<<7)
216 #define EFLG_ZF (1<<6)
217 #define EFLG_AF (1<<4)
218 #define EFLG_PF (1<<2)
219 #define EFLG_CF (1<<0)
221 /*
222 * Instruction emulation:
223 * Most instructions are emulated directly via a fragment of inline assembly
224 * code. This allows us to save/restore EFLAGS and thus very easily pick up
225 * any modified flags.
226 */
228 #if defined(__x86_64__)
229 #define _LO32 "k" /* force 32-bit operand */
230 #define _STK "%%rsp" /* stack pointer */
231 #elif defined(__i386__)
232 #define _LO32 "" /* force 32-bit operand */
233 #define _STK "%%esp" /* stack pointer */
234 #endif
236 /*
237 * These EFLAGS bits are restored from saved value during emulation, and
238 * any changes are written back to the saved value after emulation.
239 */
240 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
242 /* Before executing instruction: restore necessary bits in EFLAGS. */
243 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
244 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */\
245 "push %"_sav"; " \
246 "movl %"_msk",%"_LO32 _tmp"; " \
247 "andl %"_LO32 _tmp",("_STK"); " \
248 "pushf; " \
249 "notl %"_LO32 _tmp"; " \
250 "andl %"_LO32 _tmp",("_STK"); " \
251 "pop %"_tmp"; " \
252 "orl %"_LO32 _tmp",("_STK"); " \
253 "popf; " \
254 /* _sav &= ~msk; */ \
255 "movl %"_msk",%"_LO32 _tmp"; " \
256 "notl %"_LO32 _tmp"; " \
257 "andl %"_LO32 _tmp",%"_sav"; "
259 /* After executing instruction: write-back necessary bits in EFLAGS. */
260 #define _POST_EFLAGS(_sav, _msk, _tmp) \
261 /* _sav |= EFLAGS & _msk; */ \
262 "pushf; " \
263 "pop %"_tmp"; " \
264 "andl %"_msk",%"_LO32 _tmp"; " \
265 "orl %"_LO32 _tmp",%"_sav"; "
267 /* Raw emulation: instruction has two explicit operands. */
268 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
269 do{ unsigned long _tmp; \
270 switch ( (_dst).bytes ) \
271 { \
272 case 2: \
273 __asm__ __volatile__ ( \
274 _PRE_EFLAGS("0","4","2") \
275 _op"w %"_wx"3,%1; " \
276 _POST_EFLAGS("0","4","2") \
277 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
278 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
279 break; \
280 case 4: \
281 __asm__ __volatile__ ( \
282 _PRE_EFLAGS("0","4","2") \
283 _op"l %"_lx"3,%1; " \
284 _POST_EFLAGS("0","4","2") \
285 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
286 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
287 break; \
288 case 8: \
289 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
290 break; \
291 } \
292 } while (0)
293 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
294 do{ unsigned long _tmp; \
295 switch ( (_dst).bytes ) \
296 { \
297 case 1: \
298 __asm__ __volatile__ ( \
299 _PRE_EFLAGS("0","4","2") \
300 _op"b %"_bx"3,%1; " \
301 _POST_EFLAGS("0","4","2") \
302 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
303 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
304 break; \
305 default: \
306 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
307 break; \
308 } \
309 } while (0)
310 /* Source operand is byte-sized and may be restricted to just %cl. */
311 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
312 __emulate_2op(_op, _src, _dst, _eflags, \
313 "b", "c", "b", "c", "b", "c", "b", "c")
314 /* Source operand is byte, word, long or quad sized. */
315 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
316 __emulate_2op(_op, _src, _dst, _eflags, \
317 "b", "q", "w", "r", _LO32, "r", "", "r")
318 /* Source operand is word, long or quad sized. */
319 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
320 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
321 "w", "r", _LO32, "r", "", "r")
323 /* Instruction has only one explicit operand (no source operand). */
324 #define emulate_1op(_op,_dst,_eflags) \
325 do{ unsigned long _tmp; \
326 switch ( (_dst).bytes ) \
327 { \
328 case 1: \
329 __asm__ __volatile__ ( \
330 _PRE_EFLAGS("0","3","2") \
331 _op"b %1; " \
332 _POST_EFLAGS("0","3","2") \
333 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
334 : "i" (EFLAGS_MASK) ); \
335 break; \
336 case 2: \
337 __asm__ __volatile__ ( \
338 _PRE_EFLAGS("0","3","2") \
339 _op"w %1; " \
340 _POST_EFLAGS("0","3","2") \
341 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
342 : "i" (EFLAGS_MASK) ); \
343 break; \
344 case 4: \
345 __asm__ __volatile__ ( \
346 _PRE_EFLAGS("0","3","2") \
347 _op"l %1; " \
348 _POST_EFLAGS("0","3","2") \
349 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
350 : "i" (EFLAGS_MASK) ); \
351 break; \
352 case 8: \
353 __emulate_1op_8byte(_op, _dst, _eflags); \
354 break; \
355 } \
356 } while (0)
358 /* Emulate an instruction with quadword operands (x86/64 only). */
359 #if defined(__x86_64__)
360 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
361 do{ __asm__ __volatile__ ( \
362 _PRE_EFLAGS("0","4","2") \
363 _op"q %"_qx"3,%1; " \
364 _POST_EFLAGS("0","4","2") \
365 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
366 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
367 } while (0)
368 #define __emulate_1op_8byte(_op, _dst, _eflags) \
369 do{ __asm__ __volatile__ ( \
370 _PRE_EFLAGS("0","3","2") \
371 _op"q %1; " \
372 _POST_EFLAGS("0","3","2") \
373 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
374 : "i" (EFLAGS_MASK) ); \
375 } while (0)
376 #elif defined(__i386__)
377 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
378 #define __emulate_1op_8byte(_op, _dst, _eflags)
379 #endif /* __i386__ */
381 /* Fetch next part of the instruction being emulated. */
382 #define insn_fetch_bytes(_size) \
383 ({ unsigned long _x; \
384 rc = ops->insn_fetch(x86_seg_cs, _regs.eip, &_x, (_size), ctxt); \
385 if ( rc != 0 ) \
386 goto done; \
387 _regs.eip += (_size); \
388 _x; \
389 })
390 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
392 #define truncate_ea(ea) \
393 ({ unsigned long __ea = (ea); \
394 ((ad_bytes == sizeof(unsigned long)) ? __ea : \
395 (__ea & ((1UL << (ad_bytes << 3)) - 1))); \
396 })
398 /* Update address held in a register, based on addressing mode. */
399 #define register_address_increment(reg, inc) \
400 do { \
401 int _inc = (inc); /* signed type ensures sign extension to long */ \
402 if ( ad_bytes == sizeof(unsigned long) ) \
403 (reg) += _inc; \
404 else \
405 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
406 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
407 } while (0)
409 void *
410 decode_register(
411 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
412 {
413 void *p;
415 switch ( modrm_reg )
416 {
417 case 0: p = &regs->eax; break;
418 case 1: p = &regs->ecx; break;
419 case 2: p = &regs->edx; break;
420 case 3: p = &regs->ebx; break;
421 case 4: p = (highbyte_regs ?
422 ((unsigned char *)&regs->eax + 1) :
423 (unsigned char *)&regs->esp); break;
424 case 5: p = (highbyte_regs ?
425 ((unsigned char *)&regs->ecx + 1) :
426 (unsigned char *)&regs->ebp); break;
427 case 6: p = (highbyte_regs ?
428 ((unsigned char *)&regs->edx + 1) :
429 (unsigned char *)&regs->esi); break;
430 case 7: p = (highbyte_regs ?
431 ((unsigned char *)&regs->ebx + 1) :
432 (unsigned char *)&regs->edi); break;
433 #if defined(__x86_64__)
434 case 8: p = &regs->r8; break;
435 case 9: p = &regs->r9; break;
436 case 10: p = &regs->r10; break;
437 case 11: p = &regs->r11; break;
438 case 12: p = &regs->r12; break;
439 case 13: p = &regs->r13; break;
440 case 14: p = &regs->r14; break;
441 case 15: p = &regs->r15; break;
442 #endif
443 default: p = NULL; break;
444 }
446 return p;
447 }
449 int
450 x86_emulate(
451 struct x86_emulate_ctxt *ctxt,
452 struct x86_emulate_ops *ops)
453 {
454 /* Shadow copy of register state. Committed on successful emulation. */
455 struct cpu_user_regs _regs = *ctxt->regs;
457 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
458 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
459 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
460 int rc = 0;
461 struct operand src, dst;
462 int mode = ctxt->mode;
464 /* Data operand effective address (usually computed from ModRM). */
465 struct operand ea;
467 /* Default is a memory operand relative to segment DS. */
468 ea.type = OP_MEM;
469 ea.mem.seg = x86_seg_ds;
470 ea.mem.off = 0;
472 switch ( mode )
473 {
474 case X86EMUL_MODE_REAL:
475 case X86EMUL_MODE_PROT16:
476 op_bytes = ad_bytes = 2;
477 break;
478 case X86EMUL_MODE_PROT32:
479 op_bytes = ad_bytes = 4;
480 break;
481 #ifdef __x86_64__
482 case X86EMUL_MODE_PROT64:
483 op_bytes = 4;
484 ad_bytes = 8;
485 break;
486 #endif
487 default:
488 return -1;
489 }
491 /* Prefix bytes. */
492 for ( i = 0; i < 8; i++ )
493 {
494 switch ( b = insn_fetch_type(uint8_t) )
495 {
496 case 0x66: /* operand-size override */
497 op_bytes ^= 6; /* switch between 2/4 bytes */
498 break;
499 case 0x67: /* address-size override */
500 if ( mode == X86EMUL_MODE_PROT64 )
501 ad_bytes ^= 12; /* switch between 4/8 bytes */
502 else
503 ad_bytes ^= 6; /* switch between 2/4 bytes */
504 break;
505 case 0x2e: /* CS override */
506 ea.mem.seg = x86_seg_cs;
507 break;
508 case 0x3e: /* DS override */
509 ea.mem.seg = x86_seg_ds;
510 break;
511 case 0x26: /* ES override */
512 ea.mem.seg = x86_seg_es;
513 break;
514 case 0x64: /* FS override */
515 ea.mem.seg = x86_seg_fs;
516 break;
517 case 0x65: /* GS override */
518 ea.mem.seg = x86_seg_gs;
519 break;
520 case 0x36: /* SS override */
521 ea.mem.seg = x86_seg_ss;
522 break;
523 case 0xf0: /* LOCK */
524 lock_prefix = 1;
525 break;
526 case 0xf2: /* REPNE/REPNZ */
527 case 0xf3: /* REP/REPE/REPZ */
528 rep_prefix = 1;
529 break;
530 case 0x40 ... 0x4f: /* REX */
531 if ( mode != X86EMUL_MODE_PROT64 )
532 goto done_prefixes;
533 rex_prefix = b;
534 continue;
535 default:
536 goto done_prefixes;
537 }
539 /* Any legacy prefix after a REX prefix nullifies its effect. */
540 rex_prefix = 0;
541 }
542 done_prefixes:
544 if ( rex_prefix & 8 ) /* REX.W */
545 op_bytes = 8;
547 /* Opcode byte(s). */
548 d = opcode_table[b];
549 if ( d == 0 )
550 {
551 /* Two-byte opcode? */
552 if ( b == 0x0f )
553 {
554 twobyte = 1;
555 b = insn_fetch_type(uint8_t);
556 d = twobyte_table[b];
557 }
559 /* Unrecognised? */
560 if ( d == 0 )
561 goto cannot_emulate;
562 }
564 /* ModRM and SIB bytes. */
565 if ( d & ModRM )
566 {
567 modrm = insn_fetch_type(uint8_t);
568 modrm_mod = (modrm & 0xc0) >> 6;
569 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
570 modrm_rm = modrm & 0x07;
572 if ( modrm_mod == 3 )
573 {
574 modrm_rm |= (rex_prefix & 1) << 3;
575 ea.type = OP_REG;
576 ea.reg = decode_register(
577 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
578 }
579 else if ( ad_bytes == 2 )
580 {
581 /* 16-bit ModR/M decode. */
582 switch ( modrm_rm )
583 {
584 case 0: ea.mem.off = _regs.ebx + _regs.esi; break;
585 case 1: ea.mem.off = _regs.ebx + _regs.edi; break;
586 case 2: ea.mem.off = _regs.ebp + _regs.esi; break;
587 case 3: ea.mem.off = _regs.ebp + _regs.edi; break;
588 case 4: ea.mem.off = _regs.esi; break;
589 case 5: ea.mem.off = _regs.edi; break;
590 case 6: ea.mem.off = _regs.ebp; break;
591 case 7: ea.mem.off = _regs.ebx; break;
592 }
593 switch ( modrm_mod )
594 {
595 case 0:
596 if ( modrm_rm == 6 )
597 ea.mem.off = insn_fetch_type(int16_t);
598 break;
599 case 1:
600 ea.mem.off += insn_fetch_type(int8_t);
601 break;
602 case 2:
603 ea.mem.off += insn_fetch_type(int16_t);
604 break;
605 }
606 ea.mem.off = truncate_ea(ea.mem.off);
607 }
608 else
609 {
610 /* 32/64-bit ModR/M decode. */
611 if ( modrm_rm == 4 )
612 {
613 sib = insn_fetch_type(uint8_t);
614 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
615 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
616 if ( sib_index != 4 )
617 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
618 ea.mem.off <<= (sib >> 6) & 3;
619 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
620 ea.mem.off += insn_fetch_type(int32_t);
621 else if ( (sib_base == 4) && !twobyte && (b == 0x8f) )
622 /* POP <rm> must have its EA calculated post increment. */
623 ea.mem.off += _regs.esp +
624 (((mode == X86EMUL_MODE_PROT64) && (op_bytes == 4))
625 ? 8 : op_bytes);
626 else
627 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
628 }
629 else
630 {
631 modrm_rm |= (rex_prefix & 1) << 3;
632 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
633 }
634 switch ( modrm_mod )
635 {
636 case 0:
637 if ( (modrm_rm & 7) != 5 )
638 break;
639 ea.mem.off = insn_fetch_type(int32_t);
640 if ( mode != X86EMUL_MODE_PROT64 )
641 break;
642 /* Relative to RIP of next instruction. Argh! */
643 ea.mem.off += _regs.eip;
644 if ( (d & SrcMask) == SrcImm )
645 ea.mem.off += (d & ByteOp) ? 1 :
646 ((op_bytes == 8) ? 4 : op_bytes);
647 else if ( (d & SrcMask) == SrcImmByte )
648 ea.mem.off += 1;
649 else if ( ((b == 0xf6) || (b == 0xf7)) &&
650 ((modrm_reg & 7) <= 1) )
651 /* Special case in Grp3: test has immediate operand. */
652 ea.mem.off += (d & ByteOp) ? 1
653 : ((op_bytes == 8) ? 4 : op_bytes);
654 break;
655 case 1:
656 ea.mem.off += insn_fetch_type(int8_t);
657 break;
658 case 2:
659 ea.mem.off += insn_fetch_type(int32_t);
660 break;
661 }
662 ea.mem.off = truncate_ea(ea.mem.off);
663 }
664 }
666 /* Special instructions do their own operand decoding. */
667 if ( (d & DstMask) == ImplicitOps )
668 goto special_insn;
670 /* Decode and fetch the source operand: register, memory or immediate. */
671 switch ( d & SrcMask )
672 {
673 case SrcNone:
674 break;
675 case SrcReg:
676 src.type = OP_REG;
677 if ( d & ByteOp )
678 {
679 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
680 src.val = *(uint8_t *)src.reg;
681 src.bytes = 1;
682 }
683 else
684 {
685 src.reg = decode_register(modrm_reg, &_regs, 0);
686 switch ( (src.bytes = op_bytes) )
687 {
688 case 2: src.val = *(uint16_t *)src.reg; break;
689 case 4: src.val = *(uint32_t *)src.reg; break;
690 case 8: src.val = *(uint64_t *)src.reg; break;
691 }
692 }
693 break;
694 case SrcMem16:
695 ea.bytes = 2;
696 goto srcmem_common;
697 case SrcMem32:
698 ea.bytes = 4;
699 goto srcmem_common;
700 case SrcMem:
701 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
702 srcmem_common:
703 src = ea;
704 if ( src.type == OP_REG )
705 {
706 switch ( src.bytes )
707 {
708 case 1: src.val = *(uint8_t *)src.reg; break;
709 case 2: src.val = *(uint16_t *)src.reg; break;
710 case 4: src.val = *(uint32_t *)src.reg; break;
711 case 8: src.val = *(uint64_t *)src.reg; break;
712 }
713 }
714 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
715 &src.val, src.bytes, ctxt)) )
716 goto done;
717 break;
718 case SrcImm:
719 src.type = OP_IMM;
720 src.bytes = (d & ByteOp) ? 1 : op_bytes;
721 if ( src.bytes == 8 ) src.bytes = 4;
722 /* NB. Immediates are sign-extended as necessary. */
723 switch ( src.bytes )
724 {
725 case 1: src.val = insn_fetch_type(int8_t); break;
726 case 2: src.val = insn_fetch_type(int16_t); break;
727 case 4: src.val = insn_fetch_type(int32_t); break;
728 }
729 break;
730 case SrcImmByte:
731 src.type = OP_IMM;
732 src.bytes = 1;
733 src.val = insn_fetch_type(int8_t);
734 break;
735 }
737 /* Decode and fetch the destination operand: register or memory. */
738 switch ( d & DstMask )
739 {
740 case DstReg:
741 dst.type = OP_REG;
742 if ( d & ByteOp )
743 {
744 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
745 dst.val = *(uint8_t *)dst.reg;
746 dst.bytes = 1;
747 }
748 else
749 {
750 dst.reg = decode_register(modrm_reg, &_regs, 0);
751 switch ( (dst.bytes = op_bytes) )
752 {
753 case 2: dst.val = *(uint16_t *)dst.reg; break;
754 case 4: dst.val = *(uint32_t *)dst.reg; break;
755 case 8: dst.val = *(uint64_t *)dst.reg; break;
756 }
757 }
758 break;
759 case DstBitBase:
760 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
761 {
762 src.val &= (op_bytes << 3) - 1;
763 }
764 else
765 {
766 /*
767 * EA += BitOffset DIV op_bytes*8
768 * BitOffset = BitOffset MOD op_byte*8
769 * DIV truncates towards negative infinity.
770 * MOD always produces a positive result.
771 */
772 if ( op_bytes == 2 )
773 src.val = (int16_t)src.val;
774 else if ( op_bytes == 4 )
775 src.val = (int32_t)src.val;
776 if ( (long)src.val < 0 )
777 {
778 unsigned long byte_offset;
779 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
780 ea.mem.off -= byte_offset;
781 src.val = (byte_offset << 3) + src.val;
782 }
783 else
784 {
785 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
786 src.val &= (op_bytes << 3) - 1;
787 }
788 }
789 /* Becomes a normal DstMem operation from here on. */
790 d = (d & ~DstMask) | DstMem;
791 case DstMem:
792 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
793 dst = ea;
794 if ( dst.type == OP_REG )
795 {
796 switch ( dst.bytes )
797 {
798 case 1: dst.val = *(uint8_t *)dst.reg; break;
799 case 2: dst.val = *(uint16_t *)dst.reg; break;
800 case 4: dst.val = *(uint32_t *)dst.reg; break;
801 case 8: dst.val = *(uint64_t *)dst.reg; break;
802 }
803 }
804 else if ( !(d & Mov) && /* optimisation - avoid slow emulated read */
805 (rc = ops->read(dst.mem.seg, dst.mem.off,
806 &dst.val, dst.bytes, ctxt)) )
807 goto done;
808 break;
809 }
810 dst.orig_val = dst.val;
812 if ( twobyte )
813 goto twobyte_insn;
815 switch ( b )
816 {
817 case 0x04 ... 0x05: /* add imm,%%eax */
818 dst.reg = (unsigned long *)&_regs.eax;
819 dst.val = dst.orig_val = _regs.eax;
820 case 0x00 ... 0x03: add: /* add */
821 emulate_2op_SrcV("add", src, dst, _regs.eflags);
822 break;
823 case 0x0c ... 0x0d: /* or imm,%%eax */
824 dst.reg = (unsigned long *)&_regs.eax;
825 dst.val = dst.orig_val = _regs.eax;
826 case 0x08 ... 0x0b: or: /* or */
827 emulate_2op_SrcV("or", src, dst, _regs.eflags);
828 break;
829 case 0x14 ... 0x15: /* adc imm,%%eax */
830 dst.reg = (unsigned long *)&_regs.eax;
831 dst.val = dst.orig_val = _regs.eax;
832 case 0x10 ... 0x13: adc: /* adc */
833 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
834 break;
835 case 0x1c ... 0x1d: /* sbb imm,%%eax */
836 dst.reg = (unsigned long *)&_regs.eax;
837 dst.val = dst.orig_val = _regs.eax;
838 case 0x18 ... 0x1b: sbb: /* sbb */
839 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
840 break;
841 case 0x24 ... 0x25: /* and imm,%%eax */
842 dst.reg = (unsigned long *)&_regs.eax;
843 dst.val = dst.orig_val = _regs.eax;
844 case 0x20 ... 0x23: and: /* and */
845 emulate_2op_SrcV("and", src, dst, _regs.eflags);
846 break;
847 case 0x2c ... 0x2d: /* sub imm,%%eax */
848 dst.reg = (unsigned long *)&_regs.eax;
849 dst.val = dst.orig_val = _regs.eax;
850 case 0x28 ... 0x2b: sub: /* sub */
851 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
852 break;
853 case 0x34 ... 0x35: /* xor imm,%%eax */
854 dst.reg = (unsigned long *)&_regs.eax;
855 dst.val = dst.orig_val = _regs.eax;
856 case 0x30 ... 0x33: xor: /* xor */
857 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
858 break;
859 case 0x3c ... 0x3d: /* cmp imm,%%eax */
860 dst.reg = (unsigned long *)&_regs.eax;
861 dst.val = dst.orig_val = _regs.eax;
862 case 0x38 ... 0x3b: cmp: /* cmp */
863 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
864 break;
865 case 0x63: /* movsxd */
866 if ( mode != X86EMUL_MODE_PROT64 )
867 goto cannot_emulate;
868 dst.val = (int32_t)src.val;
869 break;
870 case 0x80 ... 0x83: /* Grp1 */
871 switch ( modrm_reg & 7 )
872 {
873 case 0: goto add;
874 case 1: goto or;
875 case 2: goto adc;
876 case 3: goto sbb;
877 case 4: goto and;
878 case 5: goto sub;
879 case 6: goto xor;
880 case 7: goto cmp;
881 }
882 break;
883 case 0x84 ... 0x85: test: /* test */
884 emulate_2op_SrcV("test", src, dst, _regs.eflags);
885 break;
886 case 0x86 ... 0x87: xchg: /* xchg */
887 /* Write back the register source. */
888 switch ( dst.bytes )
889 {
890 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
891 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
892 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
893 case 8: *src.reg = dst.val; break;
894 }
895 /* Write back the memory destination with implicit LOCK prefix. */
896 dst.val = src.val;
897 lock_prefix = 1;
898 break;
899 case 0x88 ... 0x8b: /* mov */
900 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
901 dst.val = src.val;
902 break;
903 case 0x8d: /* lea */
904 dst.val = ea.mem.off;
905 break;
906 case 0x8f: /* pop (sole member of Grp1a) */
907 /* 64-bit mode: POP defaults to a 64-bit operand. */
908 if ( (mode == X86EMUL_MODE_PROT64) && (dst.bytes == 4) )
909 dst.bytes = 8;
910 if ( (rc = ops->read(x86_seg_ss, truncate_ea(_regs.esp),
911 &dst.val, dst.bytes, ctxt)) != 0 )
912 goto done;
913 register_address_increment(_regs.esp, dst.bytes);
914 break;
915 case 0xb0 ... 0xb7: /* mov imm8,r8 */
916 dst.reg = decode_register(
917 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
918 dst.val = src.val;
919 break;
920 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
921 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
922 src.val = ((uint32_t)src.val |
923 ((uint64_t)insn_fetch_type(uint32_t) << 32));
924 dst.reg = decode_register(
925 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
926 dst.val = src.val;
927 break;
928 case 0xc0 ... 0xc1: grp2: /* Grp2 */
929 switch ( modrm_reg & 7 )
930 {
931 case 0: /* rol */
932 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
933 break;
934 case 1: /* ror */
935 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
936 break;
937 case 2: /* rcl */
938 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
939 break;
940 case 3: /* rcr */
941 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
942 break;
943 case 4: /* sal/shl */
944 case 6: /* sal/shl */
945 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
946 break;
947 case 5: /* shr */
948 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
949 break;
950 case 7: /* sar */
951 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
952 break;
953 }
954 break;
955 case 0xd0 ... 0xd1: /* Grp2 */
956 src.val = 1;
957 goto grp2;
958 case 0xd2 ... 0xd3: /* Grp2 */
959 src.val = _regs.ecx;
960 goto grp2;
961 case 0xf6 ... 0xf7: /* Grp3 */
962 switch ( modrm_reg & 7 )
963 {
964 case 0 ... 1: /* test */
965 /* Special case in Grp3: test has an immediate source operand. */
966 src.type = OP_IMM;
967 src.bytes = (d & ByteOp) ? 1 : op_bytes;
968 if ( src.bytes == 8 ) src.bytes = 4;
969 switch ( src.bytes )
970 {
971 case 1: src.val = insn_fetch_type(int8_t); break;
972 case 2: src.val = insn_fetch_type(int16_t); break;
973 case 4: src.val = insn_fetch_type(int32_t); break;
974 }
975 goto test;
976 case 2: /* not */
977 dst.val = ~dst.val;
978 break;
979 case 3: /* neg */
980 emulate_1op("neg", dst, _regs.eflags);
981 break;
982 default:
983 goto cannot_emulate;
984 }
985 break;
986 case 0xfe ... 0xff: /* Grp4/Grp5 */
987 switch ( modrm_reg & 7 )
988 {
989 case 0: /* inc */
990 emulate_1op("inc", dst, _regs.eflags);
991 break;
992 case 1: /* dec */
993 emulate_1op("dec", dst, _regs.eflags);
994 break;
995 case 6: /* push */
996 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
997 if ( (mode == X86EMUL_MODE_PROT64) && (dst.bytes == 4) )
998 {
999 dst.bytes = 8;
1000 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1001 &dst.val, 8, ctxt)) != 0 )
1002 goto done;
1004 register_address_increment(_regs.esp, -dst.bytes);
1005 if ( (rc = ops->write(x86_seg_ss, truncate_ea(_regs.esp),
1006 dst.val, dst.bytes, ctxt)) != 0 )
1007 goto done;
1008 dst.val = dst.orig_val; /* skanky: disable writeback */
1009 break;
1010 default:
1011 goto cannot_emulate;
1013 break;
1016 writeback:
1017 if ( (d & Mov) || (dst.orig_val != dst.val) )
1019 switch ( dst.type )
1021 case OP_REG:
1022 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1023 switch ( dst.bytes )
1025 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1026 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1027 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1028 case 8: *dst.reg = dst.val; break;
1030 break;
1031 case OP_MEM:
1032 if ( lock_prefix )
1033 rc = ops->cmpxchg(
1034 dst.mem.seg, dst.mem.off, dst.orig_val,
1035 dst.val, dst.bytes, ctxt);
1036 else
1037 rc = ops->write(
1038 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1039 if ( rc != 0 )
1040 goto done;
1041 default:
1042 break;
1046 /* Commit shadow register state. */
1047 *ctxt->regs = _regs;
1049 done:
1050 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1052 special_insn:
1053 /* Default action: disable writeback. There may be no dest operand. */
1054 dst.orig_val = dst.val;
1055 if ( twobyte )
1056 goto twobyte_special_insn;
1057 if ( rep_prefix )
1059 if ( _regs.ecx == 0 )
1061 ctxt->regs->eip = _regs.eip;
1062 goto done;
1064 _regs.ecx--;
1065 _regs.eip = ctxt->regs->eip;
1067 switch ( b )
1069 case 0x40 ... 0x4f: /* inc/dec reg */
1070 dst.type = OP_REG;
1071 dst.reg = decode_register(b & 7, &_regs, 0);
1072 dst.bytes = op_bytes;
1073 dst.orig_val = dst.val = *dst.reg;
1074 if ( b & 8 )
1075 emulate_1op("dec", dst, _regs.eflags);
1076 else
1077 emulate_1op("inc", dst, _regs.eflags);
1078 break;
1079 case 0x50 ... 0x57: /* push reg */
1080 dst.type = OP_MEM;
1081 dst.bytes = op_bytes;
1082 if ( (mode == X86EMUL_MODE_PROT64) && (dst.bytes == 4) )
1083 dst.bytes = 8;
1084 dst.val = *(unsigned long *)decode_register(
1085 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1086 register_address_increment(_regs.esp, -dst.bytes);
1087 dst.mem.seg = x86_seg_ss;
1088 dst.mem.off = truncate_ea(_regs.esp);
1089 break;
1090 case 0x58 ... 0x5f: /* pop reg */
1091 dst.type = OP_REG;
1092 dst.reg = decode_register(
1093 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1094 dst.bytes = op_bytes;
1095 if ( (mode == X86EMUL_MODE_PROT64) && (dst.bytes == 4) )
1096 dst.bytes = 8;
1097 if ( (rc = ops->read(x86_seg_ss, truncate_ea(_regs.esp),
1098 &dst.val, dst.bytes, ctxt)) != 0 )
1099 goto done;
1100 register_address_increment(_regs.esp, dst.bytes);
1101 break;
1102 case 0x90: /* nop / xchg %%r8,%%rax */
1103 if ( !(rex_prefix & 1) )
1104 break; /* nop */
1105 case 0x91 ... 0x97: /* xchg reg,%%rax */
1106 src.type = dst.type = OP_REG;
1107 src.bytes = dst.bytes = op_bytes;
1108 src.reg = (unsigned long *)&_regs.eax;
1109 src.val = *src.reg;
1110 dst.reg = decode_register(
1111 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1112 dst.val = dst.orig_val = *dst.reg;
1113 goto xchg;
1114 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
1115 /* Source EA is not encoded via ModRM. */
1116 dst.type = OP_REG;
1117 dst.reg = (unsigned long *)&_regs.eax;
1118 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1119 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
1120 &dst.val, dst.bytes, ctxt)) != 0 )
1121 goto done;
1122 break;
1123 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
1124 /* Destination EA is not encoded via ModRM. */
1125 dst.type = OP_MEM;
1126 dst.mem.seg = ea.mem.seg;
1127 dst.mem.off = insn_fetch_bytes(ad_bytes);
1128 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1129 dst.val = (unsigned long)_regs.eax;
1130 break;
1131 case 0xa4 ... 0xa5: /* movs */
1132 dst.type = OP_MEM;
1133 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1134 dst.mem.seg = x86_seg_es;
1135 dst.mem.off = truncate_ea(_regs.edi);
1136 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1137 &dst.val, dst.bytes, ctxt)) != 0 )
1138 goto done;
1139 register_address_increment(
1140 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1141 register_address_increment(
1142 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1143 break;
1144 case 0xaa ... 0xab: /* stos */
1145 dst.type = OP_MEM;
1146 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1147 dst.mem.seg = x86_seg_es;
1148 dst.mem.off = truncate_ea(_regs.edi);
1149 dst.val = _regs.eax;
1150 register_address_increment(
1151 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1152 break;
1153 case 0xac ... 0xad: /* lods */
1154 dst.type = OP_REG;
1155 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1156 dst.reg = (unsigned long *)&_regs.eax;
1157 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1158 &dst.val, dst.bytes, ctxt)) != 0 )
1159 goto done;
1160 register_address_increment(
1161 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1162 break;
1163 case 0xf5: /* cmc */
1164 _regs.eflags ^= EFLG_CF;
1165 break;
1166 case 0xf8: /* clc */
1167 _regs.eflags &= ~EFLG_CF;
1168 break;
1169 case 0xf9: /* stc */
1170 _regs.eflags |= EFLG_CF;
1171 break;
1172 case 0xfc: /* cld */
1173 _regs.eflags &= ~EFLG_DF;
1174 break;
1175 case 0xfd: /* std */
1176 _regs.eflags |= EFLG_DF;
1177 break;
1179 goto writeback;
1181 twobyte_insn:
1182 switch ( b )
1184 case 0x40 ... 0x4f: /* cmov */
1185 dst.val = dst.orig_val = src.val;
1186 d &= ~Mov; /* default to no move */
1187 /* First, assume we're decoding an even cmov opcode (lsb == 0). */
1188 switch ( (b & 15) >> 1 )
1190 case 0: /* cmovo */
1191 d |= (_regs.eflags & EFLG_OF) ? Mov : 0;
1192 break;
1193 case 1: /* cmovb/cmovc/cmovnae */
1194 d |= (_regs.eflags & EFLG_CF) ? Mov : 0;
1195 break;
1196 case 2: /* cmovz/cmove */
1197 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
1198 break;
1199 case 3: /* cmovbe/cmovna */
1200 d |= (_regs.eflags & (EFLG_CF|EFLG_ZF)) ? Mov : 0;
1201 break;
1202 case 4: /* cmovs */
1203 d |= (_regs.eflags & EFLG_SF) ? Mov : 0;
1204 break;
1205 case 5: /* cmovp/cmovpe */
1206 d |= (_regs.eflags & EFLG_PF) ? Mov : 0;
1207 break;
1208 case 7: /* cmovle/cmovng */
1209 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
1210 /* fall through */
1211 case 6: /* cmovl/cmovnge */
1212 d |= (!(_regs.eflags & EFLG_SF) != !(_regs.eflags & EFLG_OF)) ?
1213 Mov : 0;
1214 break;
1216 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1217 d ^= (b & 1) ? Mov : 0;
1218 break;
1219 case 0xb0 ... 0xb1: /* cmpxchg */
1220 /* Save real source value, then compare EAX against destination. */
1221 src.orig_val = src.val;
1222 src.val = _regs.eax;
1223 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1224 /* Always write back. The question is: where to? */
1225 d |= Mov;
1226 if ( _regs.eflags & EFLG_ZF )
1228 /* Success: write back to memory. */
1229 dst.val = src.orig_val;
1231 else
1233 /* Failure: write the value we saw to EAX. */
1234 dst.type = OP_REG;
1235 dst.reg = (unsigned long *)&_regs.eax;
1237 break;
1238 case 0xa3: bt: /* bt */
1239 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
1240 break;
1241 case 0xb3: btr: /* btr */
1242 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
1243 break;
1244 case 0xab: bts: /* bts */
1245 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
1246 break;
1247 case 0xb6: /* movzx rm8,r{16,32,64} */
1248 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
1249 dst.reg = decode_register(modrm_reg, &_regs, 0);
1250 dst.bytes = op_bytes;
1251 dst.val = (uint8_t)src.val;
1252 break;
1253 case 0xb7: /* movzx rm16,r{16,32,64} */
1254 dst.val = (uint16_t)src.val;
1255 break;
1256 case 0xbb: btc: /* btc */
1257 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
1258 break;
1259 case 0xba: /* Grp8 */
1260 switch ( modrm_reg & 3 )
1262 case 0: goto bt;
1263 case 1: goto bts;
1264 case 2: goto btr;
1265 case 3: goto btc;
1267 break;
1268 case 0xbe: /* movsx rm8,r{16,32,64} */
1269 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
1270 dst.reg = decode_register(modrm_reg, &_regs, 0);
1271 dst.bytes = op_bytes;
1272 dst.val = (int8_t)src.val;
1273 break;
1274 case 0xbf: /* movsx rm16,r{16,32,64} */
1275 dst.val = (int16_t)src.val;
1276 break;
1277 case 0xc0 ... 0xc1: /* xadd */
1278 /* Write back the register source. */
1279 switch ( dst.bytes )
1281 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1282 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1283 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1284 case 8: *src.reg = dst.val; break;
1286 goto add;
1288 goto writeback;
1290 twobyte_special_insn:
1291 switch ( b )
1293 case 0x0d: /* GrpP (prefetch) */
1294 case 0x18: /* Grp16 (prefetch/nop) */
1295 break;
1296 case 0xc7: /* Grp9 (cmpxchg8b) */
1297 #if defined(__i386__)
1299 unsigned long old_lo, old_hi;
1300 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
1301 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
1302 goto done;
1303 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
1305 _regs.eax = old_lo;
1306 _regs.edx = old_hi;
1307 _regs.eflags &= ~EFLG_ZF;
1309 else if ( ops->cmpxchg8b == NULL )
1311 rc = X86EMUL_UNHANDLEABLE;
1312 goto done;
1314 else
1316 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
1317 _regs.ebx, _regs.ecx, ctxt)) != 0 )
1318 goto done;
1319 _regs.eflags |= EFLG_ZF;
1321 break;
1323 #elif defined(__x86_64__)
1325 unsigned long old, new;
1326 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
1327 goto done;
1328 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
1329 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
1331 _regs.eax = (uint32_t)(old>>0);
1332 _regs.edx = (uint32_t)(old>>32);
1333 _regs.eflags &= ~EFLG_ZF;
1335 else
1337 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
1338 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
1339 new, 8, ctxt)) != 0 )
1340 goto done;
1341 _regs.eflags |= EFLG_ZF;
1343 break;
1345 #endif
1347 goto writeback;
1349 cannot_emulate:
1350 #ifdef __XEN__
1351 gdprintk(XENLOG_DEBUG, "Instr:");
1352 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
1354 unsigned long x;
1355 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
1356 printk(" %02x", (uint8_t)x);
1358 printk("\n");
1359 #endif
1360 return -1;