ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 13425:e079f1ff6744

[TESTS] Fix native jump to 1MB absolute address in emulator test.
Fix typo in emulator comment.
Signed-off-by: Keir Fraser <keir@xensource.com>
author kaf24@localhost.localdomain
date Sat Jan 13 21:36:31 2007 +0000 (2007-01-13)
parents ecf6a0a05350
children eb19c2745b80
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 */
9 #ifndef __XEN__
10 #include <stddef.h>
11 #include <stdint.h>
12 #include <public/xen.h>
13 #else
14 #include <xen/config.h>
15 #include <xen/types.h>
16 #include <xen/lib.h>
17 #include <asm/regs.h>
18 #undef cmpxchg
19 #endif
20 #include <asm-x86/x86_emulate.h>
22 /* Operand sizes: 8-bit operands or specified/overridden size. */
23 #define ByteOp (1<<0) /* 8-bit operands. */
24 /* Destination operand type. */
25 #define DstBitBase (0<<1) /* Memory operand, bit string. */
26 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
27 #define DstReg (2<<1) /* Register operand. */
28 #define DstMem (3<<1) /* Memory operand. */
29 #define DstMask (3<<1)
30 /* Source operand type. */
31 #define SrcNone (0<<3) /* No source operand. */
32 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
33 #define SrcReg (1<<3) /* Register operand. */
34 #define SrcMem (2<<3) /* Memory operand. */
35 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
36 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
37 #define SrcImm (5<<3) /* Immediate operand. */
38 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
39 #define SrcMask (7<<3)
40 /* Generic ModRM decode. */
41 #define ModRM (1<<6)
42 /* Destination is only written; never read. */
43 #define Mov (1<<7)
45 static uint8_t opcode_table[256] = {
46 /* 0x00 - 0x07 */
47 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
48 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
49 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
50 /* 0x08 - 0x0F */
51 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
52 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
53 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
54 /* 0x10 - 0x17 */
55 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
56 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
57 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
58 /* 0x18 - 0x1F */
59 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
60 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
61 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
62 /* 0x20 - 0x27 */
63 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
64 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
65 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
66 /* 0x28 - 0x2F */
67 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
68 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
69 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
70 /* 0x30 - 0x37 */
71 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
72 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
73 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
74 /* 0x38 - 0x3F */
75 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
76 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
77 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
78 /* 0x40 - 0x4F */
79 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
80 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
81 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
82 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
83 /* 0x50 - 0x5F */
84 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
85 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
86 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
87 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
88 /* 0x60 - 0x67 */
89 0, 0, 0, DstReg|SrcMem32|ModRM|Mov /* movsxd (x86/64) */,
90 0, 0, 0, 0,
91 /* 0x68 - 0x6F */
92 ImplicitOps|Mov, 0, ImplicitOps|Mov, 0, 0, 0, 0, 0,
93 /* 0x70 - 0x77 */
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 /* 0x78 - 0x7F */
97 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
98 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
99 /* 0x80 - 0x87 */
100 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
101 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
102 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
103 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
104 /* 0x88 - 0x8F */
105 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
106 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
107 0, DstReg|SrcNone|ModRM, 0, DstMem|SrcNone|ModRM|Mov,
108 /* 0x90 - 0x97 */
109 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 /* 0x98 - 0x9F */
112 ImplicitOps, ImplicitOps, 0, 0, 0, 0, ImplicitOps, ImplicitOps,
113 /* 0xA0 - 0xA7 */
114 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
115 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
116 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
117 /* 0xA8 - 0xAF */
118 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
119 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
120 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
121 /* 0xB0 - 0xB7 */
122 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
123 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
124 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
125 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
126 /* 0xB8 - 0xBF */
127 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
128 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
129 /* 0xC0 - 0xC7 */
130 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
131 ImplicitOps, ImplicitOps,
132 0, 0, ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
133 /* 0xC8 - 0xCF */
134 0, 0, 0, 0, 0, 0, 0, 0,
135 /* 0xD0 - 0xD7 */
136 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
137 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
138 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
139 /* 0xD8 - 0xDF */
140 0, 0, 0, 0, 0, 0, 0, 0,
141 /* 0xE0 - 0xE7 */
142 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0,
143 /* 0xE8 - 0xEF */
144 ImplicitOps, ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0,
145 /* 0xF0 - 0xF7 */
146 0, 0, 0, 0,
147 0, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
148 /* 0xF8 - 0xFF */
149 ImplicitOps, ImplicitOps, 0, 0,
150 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
151 };
153 static uint8_t twobyte_table[256] = {
154 /* 0x00 - 0x0F */
155 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
156 /* 0x10 - 0x1F */
157 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0, 0, 0, 0, 0, 0,
158 /* 0x20 - 0x2F */
159 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
160 /* 0x30 - 0x3F */
161 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
162 /* 0x40 - 0x47 */
163 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
164 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
165 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
166 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
167 /* 0x48 - 0x4F */
168 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
169 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
170 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
171 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
172 /* 0x50 - 0x5F */
173 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
174 /* 0x60 - 0x6F */
175 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
176 /* 0x70 - 0x7F */
177 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
178 /* 0x80 - 0x87 */
179 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
180 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
181 /* 0x88 - 0x8F */
182 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
183 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
184 /* 0x90 - 0x97 */
185 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
186 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
187 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
188 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
189 /* 0x98 - 0x9F */
190 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
191 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
192 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
193 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
194 /* 0xA0 - 0xA7 */
195 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, 0,
196 /* 0xA8 - 0xAF */
197 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, 0,
198 /* 0xB0 - 0xB7 */
199 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
200 0, DstBitBase|SrcReg|ModRM,
201 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
202 /* 0xB8 - 0xBF */
203 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
204 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
205 /* 0xC0 - 0xC7 */
206 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
207 0, 0, 0, ImplicitOps|ModRM,
208 /* 0xC8 - 0xCF */
209 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
210 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
211 /* 0xD0 - 0xDF */
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 /* 0xE0 - 0xEF */
214 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
215 /* 0xF0 - 0xFF */
216 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
217 };
219 /* Type, address-of, and value of an instruction's operand. */
220 struct operand {
221 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
222 unsigned int bytes;
223 unsigned long val, orig_val;
224 union {
225 /* OP_REG: Pointer to register field. */
226 unsigned long *reg;
227 /* OP_MEM: Segment and offset. */
228 struct {
229 enum x86_segment seg;
230 unsigned long off;
231 } mem;
232 };
233 };
235 /* EFLAGS bit definitions. */
236 #define EFLG_OF (1<<11)
237 #define EFLG_DF (1<<10)
238 #define EFLG_SF (1<<7)
239 #define EFLG_ZF (1<<6)
240 #define EFLG_AF (1<<4)
241 #define EFLG_PF (1<<2)
242 #define EFLG_CF (1<<0)
244 /* Exception definitions. */
245 #define EXC_DE 0
246 #define EXC_GP 13
248 /*
249 * Instruction emulation:
250 * Most instructions are emulated directly via a fragment of inline assembly
251 * code. This allows us to save/restore EFLAGS and thus very easily pick up
252 * any modified flags.
253 */
255 #if defined(__x86_64__)
256 #define _LO32 "k" /* force 32-bit operand */
257 #define _STK "%%rsp" /* stack pointer */
258 #elif defined(__i386__)
259 #define _LO32 "" /* force 32-bit operand */
260 #define _STK "%%esp" /* stack pointer */
261 #endif
263 /*
264 * These EFLAGS bits are restored from saved value during emulation, and
265 * any changes are written back to the saved value after emulation.
266 */
267 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
269 /* Before executing instruction: restore necessary bits in EFLAGS. */
270 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
271 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */\
272 "push %"_sav"; " \
273 "movl %"_msk",%"_LO32 _tmp"; " \
274 "andl %"_LO32 _tmp",("_STK"); " \
275 "pushf; " \
276 "notl %"_LO32 _tmp"; " \
277 "andl %"_LO32 _tmp",("_STK"); " \
278 "pop %"_tmp"; " \
279 "orl %"_LO32 _tmp",("_STK"); " \
280 "popf; " \
281 /* _sav &= ~msk; */ \
282 "movl %"_msk",%"_LO32 _tmp"; " \
283 "notl %"_LO32 _tmp"; " \
284 "andl %"_LO32 _tmp",%"_sav"; "
286 /* After executing instruction: write-back necessary bits in EFLAGS. */
287 #define _POST_EFLAGS(_sav, _msk, _tmp) \
288 /* _sav |= EFLAGS & _msk; */ \
289 "pushf; " \
290 "pop %"_tmp"; " \
291 "andl %"_msk",%"_LO32 _tmp"; " \
292 "orl %"_LO32 _tmp",%"_sav"; "
294 /* Raw emulation: instruction has two explicit operands. */
295 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
296 do{ unsigned long _tmp; \
297 switch ( (_dst).bytes ) \
298 { \
299 case 2: \
300 __asm__ __volatile__ ( \
301 _PRE_EFLAGS("0","4","2") \
302 _op"w %"_wx"3,%1; " \
303 _POST_EFLAGS("0","4","2") \
304 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
305 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
306 break; \
307 case 4: \
308 __asm__ __volatile__ ( \
309 _PRE_EFLAGS("0","4","2") \
310 _op"l %"_lx"3,%1; " \
311 _POST_EFLAGS("0","4","2") \
312 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
313 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
314 break; \
315 case 8: \
316 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
317 break; \
318 } \
319 } while (0)
320 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
321 do{ unsigned long _tmp; \
322 switch ( (_dst).bytes ) \
323 { \
324 case 1: \
325 __asm__ __volatile__ ( \
326 _PRE_EFLAGS("0","4","2") \
327 _op"b %"_bx"3,%1; " \
328 _POST_EFLAGS("0","4","2") \
329 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
330 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
331 break; \
332 default: \
333 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
334 break; \
335 } \
336 } while (0)
337 /* Source operand is byte-sized and may be restricted to just %cl. */
338 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
339 __emulate_2op(_op, _src, _dst, _eflags, \
340 "b", "c", "b", "c", "b", "c", "b", "c")
341 /* Source operand is byte, word, long or quad sized. */
342 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
343 __emulate_2op(_op, _src, _dst, _eflags, \
344 "b", "q", "w", "r", _LO32, "r", "", "r")
345 /* Source operand is word, long or quad sized. */
346 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
347 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
348 "w", "r", _LO32, "r", "", "r")
350 /* Instruction has only one explicit operand (no source operand). */
351 #define emulate_1op(_op,_dst,_eflags) \
352 do{ unsigned long _tmp; \
353 switch ( (_dst).bytes ) \
354 { \
355 case 1: \
356 __asm__ __volatile__ ( \
357 _PRE_EFLAGS("0","3","2") \
358 _op"b %1; " \
359 _POST_EFLAGS("0","3","2") \
360 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
361 : "i" (EFLAGS_MASK) ); \
362 break; \
363 case 2: \
364 __asm__ __volatile__ ( \
365 _PRE_EFLAGS("0","3","2") \
366 _op"w %1; " \
367 _POST_EFLAGS("0","3","2") \
368 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
369 : "i" (EFLAGS_MASK) ); \
370 break; \
371 case 4: \
372 __asm__ __volatile__ ( \
373 _PRE_EFLAGS("0","3","2") \
374 _op"l %1; " \
375 _POST_EFLAGS("0","3","2") \
376 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
377 : "i" (EFLAGS_MASK) ); \
378 break; \
379 case 8: \
380 __emulate_1op_8byte(_op, _dst, _eflags); \
381 break; \
382 } \
383 } while (0)
385 /* Emulate an instruction with quadword operands (x86/64 only). */
386 #if defined(__x86_64__)
387 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
388 do{ __asm__ __volatile__ ( \
389 _PRE_EFLAGS("0","4","2") \
390 _op"q %"_qx"3,%1; " \
391 _POST_EFLAGS("0","4","2") \
392 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
393 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
394 } while (0)
395 #define __emulate_1op_8byte(_op, _dst, _eflags) \
396 do{ __asm__ __volatile__ ( \
397 _PRE_EFLAGS("0","3","2") \
398 _op"q %1; " \
399 _POST_EFLAGS("0","3","2") \
400 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
401 : "i" (EFLAGS_MASK) ); \
402 } while (0)
403 #elif defined(__i386__)
404 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
405 #define __emulate_1op_8byte(_op, _dst, _eflags)
406 #endif /* __i386__ */
408 /* Fetch next part of the instruction being emulated. */
409 #define insn_fetch_bytes(_size) \
410 ({ unsigned long _x, _eip = _regs.eip; \
411 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
412 _regs.eip += (_size); /* real hardware doesn't truncate */ \
413 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
414 EXC_GP); \
415 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
416 if ( rc ) goto done; \
417 _x; \
418 })
419 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
421 #define _truncate_ea(ea, byte_width) \
422 ({ unsigned long __ea = (ea); \
423 (((byte_width) == sizeof(unsigned long)) ? __ea : \
424 (__ea & ((1UL << ((byte_width) << 3)) - 1))); \
425 })
426 #define truncate_ea(ea) _truncate_ea((ea), ad_bytes)
428 #define mode_64bit() (def_ad_bytes == 8)
430 #define fail_if(p) \
431 do { \
432 rc = (p) ? X86EMUL_UNHANDLEABLE : 0; \
433 if ( rc ) goto done; \
434 } while (0)
436 /* In future we will be able to generate arbitrary exceptions. */
437 #define generate_exception_if(p, e) fail_if(p)
439 /* Given byte has even parity (even number of 1s)? */
440 static int even_parity(uint8_t v)
441 {
442 __asm__ ( "test %%al,%%al; setp %%al"
443 : "=a" (v) : "0" (v) );
444 return v;
445 }
447 /* Update address held in a register, based on addressing mode. */
448 #define _register_address_increment(reg, inc, byte_width) \
449 do { \
450 int _inc = (inc); /* signed type ensures sign extension to long */ \
451 if ( (byte_width) == sizeof(unsigned long) ) \
452 (reg) += _inc; \
453 else if ( mode_64bit() ) \
454 (reg) = ((reg) + _inc) & ((1UL << ((byte_width) << 3)) - 1); \
455 else \
456 (reg) = ((reg) & ~((1UL << ((byte_width) << 3)) - 1)) | \
457 (((reg) + _inc) & ((1UL << ((byte_width) << 3)) - 1)); \
458 } while (0)
459 #define register_address_increment(reg, inc) \
460 _register_address_increment((reg), (inc), ad_bytes)
462 #define jmp_rel(rel) \
463 do { \
464 _regs.eip += (int)(rel); \
465 if ( !mode_64bit() ) \
466 _regs.eip = ((op_bytes == 2) \
467 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
468 } while (0)
470 static int
471 test_cc(
472 unsigned int condition, unsigned int flags)
473 {
474 int rc = 0;
476 switch ( (condition & 15) >> 1 )
477 {
478 case 0: /* o */
479 rc |= (flags & EFLG_OF);
480 break;
481 case 1: /* b/c/nae */
482 rc |= (flags & EFLG_CF);
483 break;
484 case 2: /* z/e */
485 rc |= (flags & EFLG_ZF);
486 break;
487 case 3: /* be/na */
488 rc |= (flags & (EFLG_CF|EFLG_ZF));
489 break;
490 case 4: /* s */
491 rc |= (flags & EFLG_SF);
492 break;
493 case 5: /* p/pe */
494 rc |= (flags & EFLG_PF);
495 break;
496 case 7: /* le/ng */
497 rc |= (flags & EFLG_ZF);
498 /* fall through */
499 case 6: /* l/nge */
500 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
501 break;
502 }
504 /* Odd condition identifiers (lsb == 1) have inverted sense. */
505 return (!!rc ^ (condition & 1));
506 }
508 void *
509 decode_register(
510 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
511 {
512 void *p;
514 switch ( modrm_reg )
515 {
516 case 0: p = &regs->eax; break;
517 case 1: p = &regs->ecx; break;
518 case 2: p = &regs->edx; break;
519 case 3: p = &regs->ebx; break;
520 case 4: p = (highbyte_regs ?
521 ((unsigned char *)&regs->eax + 1) :
522 (unsigned char *)&regs->esp); break;
523 case 5: p = (highbyte_regs ?
524 ((unsigned char *)&regs->ecx + 1) :
525 (unsigned char *)&regs->ebp); break;
526 case 6: p = (highbyte_regs ?
527 ((unsigned char *)&regs->edx + 1) :
528 (unsigned char *)&regs->esi); break;
529 case 7: p = (highbyte_regs ?
530 ((unsigned char *)&regs->ebx + 1) :
531 (unsigned char *)&regs->edi); break;
532 #if defined(__x86_64__)
533 case 8: p = &regs->r8; break;
534 case 9: p = &regs->r9; break;
535 case 10: p = &regs->r10; break;
536 case 11: p = &regs->r11; break;
537 case 12: p = &regs->r12; break;
538 case 13: p = &regs->r13; break;
539 case 14: p = &regs->r14; break;
540 case 15: p = &regs->r15; break;
541 #endif
542 default: p = NULL; break;
543 }
545 return p;
546 }
548 int
549 x86_emulate(
550 struct x86_emulate_ctxt *ctxt,
551 struct x86_emulate_ops *ops)
552 {
553 /* Shadow copy of register state. Committed on successful emulation. */
554 struct cpu_user_regs _regs = *ctxt->regs;
556 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
557 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
558 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
559 unsigned int lock_prefix = 0, rep_prefix = 0;
560 int rc = 0;
561 struct operand src, dst;
563 /* Data operand effective address (usually computed from ModRM). */
564 struct operand ea;
566 /* Default is a memory operand relative to segment DS. */
567 ea.type = OP_MEM;
568 ea.mem.seg = x86_seg_ds;
569 ea.mem.off = 0;
571 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->address_bytes;
572 if ( op_bytes == 8 )
573 {
574 op_bytes = def_op_bytes = 4;
575 #ifndef __x86_64__
576 return -1;
577 #endif
578 }
580 /* Prefix bytes. */
581 for ( ; ; )
582 {
583 switch ( b = insn_fetch_type(uint8_t) )
584 {
585 case 0x66: /* operand-size override */
586 op_bytes = def_op_bytes ^ 6;
587 break;
588 case 0x67: /* address-size override */
589 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
590 break;
591 case 0x2e: /* CS override */
592 ea.mem.seg = x86_seg_cs;
593 break;
594 case 0x3e: /* DS override */
595 ea.mem.seg = x86_seg_ds;
596 break;
597 case 0x26: /* ES override */
598 ea.mem.seg = x86_seg_es;
599 break;
600 case 0x64: /* FS override */
601 ea.mem.seg = x86_seg_fs;
602 break;
603 case 0x65: /* GS override */
604 ea.mem.seg = x86_seg_gs;
605 break;
606 case 0x36: /* SS override */
607 ea.mem.seg = x86_seg_ss;
608 break;
609 case 0xf0: /* LOCK */
610 lock_prefix = 1;
611 break;
612 case 0xf2: /* REPNE/REPNZ */
613 case 0xf3: /* REP/REPE/REPZ */
614 rep_prefix = 1;
615 break;
616 case 0x40 ... 0x4f: /* REX */
617 if ( !mode_64bit() )
618 goto done_prefixes;
619 rex_prefix = b;
620 continue;
621 default:
622 goto done_prefixes;
623 }
625 /* Any legacy prefix after a REX prefix nullifies its effect. */
626 rex_prefix = 0;
627 }
628 done_prefixes:
630 if ( rex_prefix & 8 ) /* REX.W */
631 op_bytes = 8;
633 /* Opcode byte(s). */
634 d = opcode_table[b];
635 if ( d == 0 )
636 {
637 /* Two-byte opcode? */
638 if ( b == 0x0f )
639 {
640 twobyte = 1;
641 b = insn_fetch_type(uint8_t);
642 d = twobyte_table[b];
643 }
645 /* Unrecognised? */
646 if ( d == 0 )
647 goto cannot_emulate;
648 }
650 /* Lock prefix is allowed only on RMW instructions. */
651 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
653 /* ModRM and SIB bytes. */
654 if ( d & ModRM )
655 {
656 modrm = insn_fetch_type(uint8_t);
657 modrm_mod = (modrm & 0xc0) >> 6;
658 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
659 modrm_rm = modrm & 0x07;
661 if ( modrm_mod == 3 )
662 {
663 modrm_rm |= (rex_prefix & 1) << 3;
664 ea.type = OP_REG;
665 ea.reg = decode_register(
666 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
667 }
668 else if ( ad_bytes == 2 )
669 {
670 /* 16-bit ModR/M decode. */
671 switch ( modrm_rm )
672 {
673 case 0: ea.mem.off = _regs.ebx + _regs.esi; break;
674 case 1: ea.mem.off = _regs.ebx + _regs.edi; break;
675 case 2: ea.mem.off = _regs.ebp + _regs.esi; break;
676 case 3: ea.mem.off = _regs.ebp + _regs.edi; break;
677 case 4: ea.mem.off = _regs.esi; break;
678 case 5: ea.mem.off = _regs.edi; break;
679 case 6: ea.mem.off = _regs.ebp; break;
680 case 7: ea.mem.off = _regs.ebx; break;
681 }
682 switch ( modrm_mod )
683 {
684 case 0:
685 if ( modrm_rm == 6 )
686 ea.mem.off = insn_fetch_type(int16_t);
687 break;
688 case 1:
689 ea.mem.off += insn_fetch_type(int8_t);
690 break;
691 case 2:
692 ea.mem.off += insn_fetch_type(int16_t);
693 break;
694 }
695 ea.mem.off = truncate_ea(ea.mem.off);
696 }
697 else
698 {
699 /* 32/64-bit ModR/M decode. */
700 if ( modrm_rm == 4 )
701 {
702 sib = insn_fetch_type(uint8_t);
703 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
704 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
705 if ( sib_index != 4 )
706 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
707 ea.mem.off <<= (sib >> 6) & 3;
708 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
709 ea.mem.off += insn_fetch_type(int32_t);
710 else if ( (sib_base == 4) && !twobyte && (b == 0x8f) )
711 /* POP <rm> must have its EA calculated post increment. */
712 ea.mem.off += _regs.esp +
713 ((mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes);
714 else
715 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
716 }
717 else
718 {
719 modrm_rm |= (rex_prefix & 1) << 3;
720 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
721 }
722 switch ( modrm_mod )
723 {
724 case 0:
725 if ( (modrm_rm & 7) != 5 )
726 break;
727 ea.mem.off = insn_fetch_type(int32_t);
728 if ( !mode_64bit() )
729 break;
730 /* Relative to RIP of next instruction. Argh! */
731 ea.mem.off += _regs.eip;
732 if ( (d & SrcMask) == SrcImm )
733 ea.mem.off += (d & ByteOp) ? 1 :
734 ((op_bytes == 8) ? 4 : op_bytes);
735 else if ( (d & SrcMask) == SrcImmByte )
736 ea.mem.off += 1;
737 else if ( ((b == 0xf6) || (b == 0xf7)) &&
738 ((modrm_reg & 7) <= 1) )
739 /* Special case in Grp3: test has immediate operand. */
740 ea.mem.off += (d & ByteOp) ? 1
741 : ((op_bytes == 8) ? 4 : op_bytes);
742 break;
743 case 1:
744 ea.mem.off += insn_fetch_type(int8_t);
745 break;
746 case 2:
747 ea.mem.off += insn_fetch_type(int32_t);
748 break;
749 }
750 ea.mem.off = truncate_ea(ea.mem.off);
751 }
752 }
754 /* Special instructions do their own operand decoding. */
755 if ( (d & DstMask) == ImplicitOps )
756 goto special_insn;
758 /* Decode and fetch the source operand: register, memory or immediate. */
759 switch ( d & SrcMask )
760 {
761 case SrcNone:
762 break;
763 case SrcReg:
764 src.type = OP_REG;
765 if ( d & ByteOp )
766 {
767 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
768 src.val = *(uint8_t *)src.reg;
769 src.bytes = 1;
770 }
771 else
772 {
773 src.reg = decode_register(modrm_reg, &_regs, 0);
774 switch ( (src.bytes = op_bytes) )
775 {
776 case 2: src.val = *(uint16_t *)src.reg; break;
777 case 4: src.val = *(uint32_t *)src.reg; break;
778 case 8: src.val = *(uint64_t *)src.reg; break;
779 }
780 }
781 break;
782 case SrcMem16:
783 ea.bytes = 2;
784 goto srcmem_common;
785 case SrcMem32:
786 ea.bytes = 4;
787 goto srcmem_common;
788 case SrcMem:
789 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
790 srcmem_common:
791 src = ea;
792 if ( src.type == OP_REG )
793 {
794 switch ( src.bytes )
795 {
796 case 1: src.val = *(uint8_t *)src.reg; break;
797 case 2: src.val = *(uint16_t *)src.reg; break;
798 case 4: src.val = *(uint32_t *)src.reg; break;
799 case 8: src.val = *(uint64_t *)src.reg; break;
800 }
801 }
802 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
803 &src.val, src.bytes, ctxt)) )
804 goto done;
805 break;
806 case SrcImm:
807 src.type = OP_IMM;
808 src.bytes = (d & ByteOp) ? 1 : op_bytes;
809 if ( src.bytes == 8 ) src.bytes = 4;
810 /* NB. Immediates are sign-extended as necessary. */
811 switch ( src.bytes )
812 {
813 case 1: src.val = insn_fetch_type(int8_t); break;
814 case 2: src.val = insn_fetch_type(int16_t); break;
815 case 4: src.val = insn_fetch_type(int32_t); break;
816 }
817 break;
818 case SrcImmByte:
819 src.type = OP_IMM;
820 src.bytes = 1;
821 src.val = insn_fetch_type(int8_t);
822 break;
823 }
825 /* Decode and fetch the destination operand: register or memory. */
826 switch ( d & DstMask )
827 {
828 case DstReg:
829 dst.type = OP_REG;
830 if ( d & ByteOp )
831 {
832 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
833 dst.val = *(uint8_t *)dst.reg;
834 dst.bytes = 1;
835 }
836 else
837 {
838 dst.reg = decode_register(modrm_reg, &_regs, 0);
839 switch ( (dst.bytes = op_bytes) )
840 {
841 case 2: dst.val = *(uint16_t *)dst.reg; break;
842 case 4: dst.val = *(uint32_t *)dst.reg; break;
843 case 8: dst.val = *(uint64_t *)dst.reg; break;
844 }
845 }
846 break;
847 case DstBitBase:
848 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
849 {
850 src.val &= (op_bytes << 3) - 1;
851 }
852 else
853 {
854 /*
855 * EA += BitOffset DIV op_bytes*8
856 * BitOffset = BitOffset MOD op_bytes*8
857 * DIV truncates towards negative infinity.
858 * MOD always produces a positive result.
859 */
860 if ( op_bytes == 2 )
861 src.val = (int16_t)src.val;
862 else if ( op_bytes == 4 )
863 src.val = (int32_t)src.val;
864 if ( (long)src.val < 0 )
865 {
866 unsigned long byte_offset;
867 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
868 ea.mem.off -= byte_offset;
869 src.val = (byte_offset << 3) + src.val;
870 }
871 else
872 {
873 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
874 src.val &= (op_bytes << 3) - 1;
875 }
876 }
877 /* Becomes a normal DstMem operation from here on. */
878 d = (d & ~DstMask) | DstMem;
879 case DstMem:
880 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
881 dst = ea;
882 if ( dst.type == OP_REG )
883 {
884 switch ( dst.bytes )
885 {
886 case 1: dst.val = *(uint8_t *)dst.reg; break;
887 case 2: dst.val = *(uint16_t *)dst.reg; break;
888 case 4: dst.val = *(uint32_t *)dst.reg; break;
889 case 8: dst.val = *(uint64_t *)dst.reg; break;
890 }
891 }
892 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
893 {
894 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
895 &dst.val, dst.bytes, ctxt)) )
896 goto done;
897 dst.orig_val = dst.val;
898 }
899 break;
900 }
902 /* LOCK prefix allowed only on instructions with memory destination. */
903 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
905 if ( twobyte )
906 goto twobyte_insn;
908 switch ( b )
909 {
910 case 0x04 ... 0x05: /* add imm,%%eax */
911 dst.reg = (unsigned long *)&_regs.eax;
912 dst.val = _regs.eax;
913 case 0x00 ... 0x03: add: /* add */
914 emulate_2op_SrcV("add", src, dst, _regs.eflags);
915 break;
917 case 0x0c ... 0x0d: /* or imm,%%eax */
918 dst.reg = (unsigned long *)&_regs.eax;
919 dst.val = _regs.eax;
920 case 0x08 ... 0x0b: or: /* or */
921 emulate_2op_SrcV("or", src, dst, _regs.eflags);
922 break;
924 case 0x14 ... 0x15: /* adc imm,%%eax */
925 dst.reg = (unsigned long *)&_regs.eax;
926 dst.val = _regs.eax;
927 case 0x10 ... 0x13: adc: /* adc */
928 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
929 break;
931 case 0x1c ... 0x1d: /* sbb imm,%%eax */
932 dst.reg = (unsigned long *)&_regs.eax;
933 dst.val = _regs.eax;
934 case 0x18 ... 0x1b: sbb: /* sbb */
935 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
936 break;
938 case 0x24 ... 0x25: /* and imm,%%eax */
939 dst.reg = (unsigned long *)&_regs.eax;
940 dst.val = _regs.eax;
941 case 0x20 ... 0x23: and: /* and */
942 emulate_2op_SrcV("and", src, dst, _regs.eflags);
943 break;
945 case 0x2c ... 0x2d: /* sub imm,%%eax */
946 dst.reg = (unsigned long *)&_regs.eax;
947 dst.val = _regs.eax;
948 case 0x28 ... 0x2b: sub: /* sub */
949 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
950 break;
952 case 0x34 ... 0x35: /* xor imm,%%eax */
953 dst.reg = (unsigned long *)&_regs.eax;
954 dst.val = _regs.eax;
955 case 0x30 ... 0x33: xor: /* xor */
956 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
957 break;
959 case 0x3c ... 0x3d: /* cmp imm,%%eax */
960 dst.reg = (unsigned long *)&_regs.eax;
961 dst.val = _regs.eax;
962 case 0x38 ... 0x3b: cmp: /* cmp */
963 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
964 break;
966 case 0x63: /* movsxd */
967 if ( !mode_64bit() )
968 goto cannot_emulate;
969 dst.val = (int32_t)src.val;
970 break;
972 case 0x80 ... 0x83: /* Grp1 */
973 switch ( modrm_reg & 7 )
974 {
975 case 0: goto add;
976 case 1: goto or;
977 case 2: goto adc;
978 case 3: goto sbb;
979 case 4: goto and;
980 case 5: goto sub;
981 case 6: goto xor;
982 case 7: goto cmp;
983 }
984 break;
986 case 0xa8 ... 0xa9: /* test imm,%%eax */
987 dst.reg = (unsigned long *)&_regs.eax;
988 dst.val = _regs.eax;
989 case 0x84 ... 0x85: test: /* test */
990 emulate_2op_SrcV("test", src, dst, _regs.eflags);
991 break;
993 case 0x86 ... 0x87: xchg: /* xchg */
994 /* Write back the register source. */
995 switch ( dst.bytes )
996 {
997 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
998 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
999 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1000 case 8: *src.reg = dst.val; break;
1002 /* Write back the memory destination with implicit LOCK prefix. */
1003 dst.val = src.val;
1004 lock_prefix = 1;
1005 break;
1007 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1008 fail_if((modrm_reg & 7) != 0);
1009 case 0x88 ... 0x8b: /* mov */
1010 dst.val = src.val;
1011 break;
1013 case 0x8d: /* lea */
1014 dst.val = ea.mem.off;
1015 break;
1017 case 0x8f: /* pop (sole member of Grp1a) */
1018 fail_if((modrm_reg & 7) != 0);
1019 /* 64-bit mode: POP defaults to a 64-bit operand. */
1020 if ( mode_64bit() && (dst.bytes == 4) )
1021 dst.bytes = 8;
1022 if ( (rc = ops->read(x86_seg_ss, truncate_ea(_regs.esp),
1023 &dst.val, dst.bytes, ctxt)) != 0 )
1024 goto done;
1025 register_address_increment(_regs.esp, dst.bytes);
1026 break;
1028 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1029 dst.reg = decode_register(
1030 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1031 dst.val = src.val;
1032 break;
1034 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1035 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1036 src.val = ((uint32_t)src.val |
1037 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1038 dst.reg = decode_register(
1039 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1040 dst.val = src.val;
1041 break;
1043 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1044 switch ( modrm_reg & 7 )
1046 case 0: /* rol */
1047 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1048 break;
1049 case 1: /* ror */
1050 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1051 break;
1052 case 2: /* rcl */
1053 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1054 break;
1055 case 3: /* rcr */
1056 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1057 break;
1058 case 4: /* sal/shl */
1059 case 6: /* sal/shl */
1060 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1061 break;
1062 case 5: /* shr */
1063 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1064 break;
1065 case 7: /* sar */
1066 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1067 break;
1069 break;
1071 case 0xd0 ... 0xd1: /* Grp2 */
1072 src.val = 1;
1073 goto grp2;
1075 case 0xd2 ... 0xd3: /* Grp2 */
1076 src.val = _regs.ecx;
1077 goto grp2;
1079 case 0xf6 ... 0xf7: /* Grp3 */
1080 switch ( modrm_reg & 7 )
1082 case 0 ... 1: /* test */
1083 /* Special case in Grp3: test has an immediate source operand. */
1084 src.type = OP_IMM;
1085 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1086 if ( src.bytes == 8 ) src.bytes = 4;
1087 switch ( src.bytes )
1089 case 1: src.val = insn_fetch_type(int8_t); break;
1090 case 2: src.val = insn_fetch_type(int16_t); break;
1091 case 4: src.val = insn_fetch_type(int32_t); break;
1093 goto test;
1094 case 2: /* not */
1095 dst.val = ~dst.val;
1096 break;
1097 case 3: /* neg */
1098 emulate_1op("neg", dst, _regs.eflags);
1099 break;
1100 default:
1101 goto cannot_emulate;
1103 break;
1105 case 0xfe: /* Grp4 */
1106 fail_if((modrm_reg & 7) >= 2);
1107 case 0xff: /* Grp5 */
1108 switch ( modrm_reg & 7 )
1110 case 0: /* inc */
1111 emulate_1op("inc", dst, _regs.eflags);
1112 break;
1113 case 1: /* dec */
1114 emulate_1op("dec", dst, _regs.eflags);
1115 break;
1116 case 2: /* call (near) */
1117 case 3: /* jmp (near) */
1118 if ( ((op_bytes = dst.bytes) != 8) && mode_64bit() )
1120 dst.bytes = op_bytes = 8;
1121 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1122 &dst.val, 8, ctxt)) != 0 )
1123 goto done;
1125 src.val = _regs.eip;
1126 _regs.eip = dst.val;
1127 if ( (modrm_reg & 7) == 2 )
1128 goto push; /* call */
1129 break;
1130 case 6: /* push */
1131 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1132 if ( mode_64bit() && (dst.bytes == 4) )
1134 dst.bytes = 8;
1135 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1136 &dst.val, 8, ctxt)) != 0 )
1137 goto done;
1139 register_address_increment(_regs.esp, -dst.bytes);
1140 if ( (rc = ops->write(x86_seg_ss, truncate_ea(_regs.esp),
1141 dst.val, dst.bytes, ctxt)) != 0 )
1142 goto done;
1143 dst.type = OP_NONE;
1144 break;
1145 case 7:
1146 fail_if(1);
1147 default:
1148 goto cannot_emulate;
1150 break;
1153 writeback:
1154 switch ( dst.type )
1156 case OP_REG:
1157 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1158 switch ( dst.bytes )
1160 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1161 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1162 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1163 case 8: *dst.reg = dst.val; break;
1165 break;
1166 case OP_MEM:
1167 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1168 /* nothing to do */;
1169 else if ( lock_prefix )
1170 rc = ops->cmpxchg(
1171 dst.mem.seg, dst.mem.off, dst.orig_val,
1172 dst.val, dst.bytes, ctxt);
1173 else
1174 rc = ops->write(
1175 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1176 if ( rc != 0 )
1177 goto done;
1178 default:
1179 break;
1182 /* Commit shadow register state. */
1183 *ctxt->regs = _regs;
1185 done:
1186 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1188 special_insn:
1189 dst.type = OP_NONE;
1191 /*
1192 * The only implicit-operands instruction allowed a LOCK prefix is
1193 * CMPXCHG{8,16}B.
1194 */
1195 generate_exception_if(lock_prefix && (b != 0xc7), EXC_GP);
1197 if ( twobyte )
1198 goto twobyte_special_insn;
1200 if ( rep_prefix )
1202 if ( _regs.ecx == 0 )
1204 ctxt->regs->eip = _regs.eip;
1205 goto done;
1207 _regs.ecx--;
1208 _regs.eip = ctxt->regs->eip;
1211 switch ( b )
1213 case 0x27: /* daa */ {
1214 uint8_t al = _regs.eax;
1215 unsigned long eflags = _regs.eflags;
1216 fail_if(mode_64bit());
1217 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1218 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1220 *(uint8_t *)&_regs.eax += 6;
1221 _regs.eflags |= EFLG_AF;
1223 if ( (al > 0x99) || (eflags & EFLG_CF) )
1225 *(uint8_t *)&_regs.eax += 0x60;
1226 _regs.eflags |= EFLG_CF;
1228 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1229 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1230 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1231 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1232 break;
1235 case 0x2f: /* das */ {
1236 uint8_t al = _regs.eax;
1237 unsigned long eflags = _regs.eflags;
1238 fail_if(mode_64bit());
1239 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1240 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1242 _regs.eflags |= EFLG_AF;
1243 if ( (al < 6) || (eflags & EFLG_CF) )
1244 _regs.eflags |= EFLG_CF;
1245 *(uint8_t *)&_regs.eax -= 6;
1247 if ( (al > 0x99) || (eflags & EFLG_CF) )
1249 *(uint8_t *)&_regs.eax -= 0x60;
1250 _regs.eflags |= EFLG_CF;
1252 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1253 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1254 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1255 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1256 break;
1259 case 0x37: /* aaa */
1260 case 0x3f: /* aas */
1261 fail_if(mode_64bit());
1262 _regs.eflags &= ~EFLG_CF;
1263 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1265 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1266 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1267 _regs.eflags |= EFLG_CF | EFLG_AF;
1269 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1270 break;
1272 case 0x40 ... 0x4f: /* inc/dec reg */
1273 dst.type = OP_REG;
1274 dst.reg = decode_register(b & 7, &_regs, 0);
1275 dst.bytes = op_bytes;
1276 dst.val = *dst.reg;
1277 if ( b & 8 )
1278 emulate_1op("dec", dst, _regs.eflags);
1279 else
1280 emulate_1op("inc", dst, _regs.eflags);
1281 break;
1283 case 0x50 ... 0x57: /* push reg */
1284 src.val = *(unsigned long *)decode_register(
1285 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1286 goto push;
1288 case 0x58 ... 0x5f: /* pop reg */
1289 dst.type = OP_REG;
1290 dst.reg = decode_register(
1291 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1292 dst.bytes = op_bytes;
1293 if ( mode_64bit() && (dst.bytes == 4) )
1294 dst.bytes = 8;
1295 if ( (rc = ops->read(x86_seg_ss, truncate_ea(_regs.esp),
1296 &dst.val, dst.bytes, ctxt)) != 0 )
1297 goto done;
1298 register_address_increment(_regs.esp, dst.bytes);
1299 break;
1301 case 0x68: /* push imm{16,32,64} */
1302 src.val = ((op_bytes == 2)
1303 ? (int32_t)insn_fetch_type(int16_t)
1304 : insn_fetch_type(int32_t));
1305 goto push;
1307 case 0x6a: /* push imm8 */
1308 src.val = insn_fetch_type(int8_t);
1309 push:
1310 d |= Mov; /* force writeback */
1311 dst.type = OP_MEM;
1312 dst.bytes = op_bytes;
1313 if ( mode_64bit() && (dst.bytes == 4) )
1314 dst.bytes = 8;
1315 dst.val = src.val;
1316 register_address_increment(_regs.esp, -dst.bytes);
1317 dst.mem.seg = x86_seg_ss;
1318 dst.mem.off = truncate_ea(_regs.esp);
1319 break;
1321 case 0x70 ... 0x7f: /* jcc (short) */ {
1322 int rel = insn_fetch_type(int8_t);
1323 if ( test_cc(b, _regs.eflags) )
1324 jmp_rel(rel);
1325 break;
1328 case 0x90: /* nop / xchg %%r8,%%rax */
1329 if ( !(rex_prefix & 1) )
1330 break; /* nop */
1332 case 0x91 ... 0x97: /* xchg reg,%%rax */
1333 src.type = dst.type = OP_REG;
1334 src.bytes = dst.bytes = op_bytes;
1335 src.reg = (unsigned long *)&_regs.eax;
1336 src.val = *src.reg;
1337 dst.reg = decode_register(
1338 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1339 dst.val = *dst.reg;
1340 goto xchg;
1342 case 0x98: /* cbw/cwde/cdqe */
1343 switch ( op_bytes )
1345 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
1346 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
1347 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
1349 break;
1351 case 0x99: /* cwd/cdq/cqo */
1352 switch ( op_bytes )
1354 case 2:
1355 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
1356 break;
1357 case 4:
1358 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
1359 break;
1360 case 8:
1361 _regs.edx = (_regs.eax < 0) ? -1 : 0;
1362 break;
1364 break;
1366 case 0x9e: /* sahf */
1367 *(uint8_t *)_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
1368 break;
1370 case 0x9f: /* lahf */
1371 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
1372 break;
1374 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
1375 /* Source EA is not encoded via ModRM. */
1376 dst.type = OP_REG;
1377 dst.reg = (unsigned long *)&_regs.eax;
1378 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1379 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
1380 &dst.val, dst.bytes, ctxt)) != 0 )
1381 goto done;
1382 break;
1384 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
1385 /* Destination EA is not encoded via ModRM. */
1386 dst.type = OP_MEM;
1387 dst.mem.seg = ea.mem.seg;
1388 dst.mem.off = insn_fetch_bytes(ad_bytes);
1389 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1390 dst.val = (unsigned long)_regs.eax;
1391 break;
1393 case 0xa4 ... 0xa5: /* movs */
1394 dst.type = OP_MEM;
1395 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1396 dst.mem.seg = x86_seg_es;
1397 dst.mem.off = truncate_ea(_regs.edi);
1398 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1399 &dst.val, dst.bytes, ctxt)) != 0 )
1400 goto done;
1401 register_address_increment(
1402 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1403 register_address_increment(
1404 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1405 break;
1407 case 0xaa ... 0xab: /* stos */
1408 dst.type = OP_MEM;
1409 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1410 dst.mem.seg = x86_seg_es;
1411 dst.mem.off = truncate_ea(_regs.edi);
1412 dst.val = _regs.eax;
1413 register_address_increment(
1414 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1415 break;
1417 case 0xac ... 0xad: /* lods */
1418 dst.type = OP_REG;
1419 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1420 dst.reg = (unsigned long *)&_regs.eax;
1421 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1422 &dst.val, dst.bytes, ctxt)) != 0 )
1423 goto done;
1424 register_address_increment(
1425 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1426 break;
1428 case 0xc2: /* ret imm16 (near) */
1429 case 0xc3: /* ret (near) */ {
1430 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
1431 op_bytes = mode_64bit() ? 8 : op_bytes;
1432 if ( (rc = ops->read(x86_seg_ss, truncate_ea(_regs.esp),
1433 &dst.val, op_bytes, ctxt)) != 0 )
1434 goto done;
1435 _regs.eip = dst.val;
1436 register_address_increment(_regs.esp, op_bytes + offset);
1437 break;
1440 case 0xd4: /* aam */ {
1441 unsigned int base = insn_fetch_type(uint8_t);
1442 uint8_t al = _regs.eax;
1443 fail_if(mode_64bit());
1444 generate_exception_if(base == 0, EXC_DE);
1445 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
1446 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1447 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1448 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1449 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1450 break;
1453 case 0xd5: /* aad */ {
1454 unsigned int base = insn_fetch_type(uint8_t);
1455 uint16_t ax = _regs.eax;
1456 fail_if(mode_64bit());
1457 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
1458 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1459 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1460 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1461 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1462 break;
1465 case 0xd6: /* salc */
1466 fail_if(mode_64bit());
1467 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
1468 break;
1470 case 0xd7: /* xlat */ {
1471 unsigned long al = (uint8_t)_regs.eax;
1472 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
1473 &al, 1, ctxt)) != 0 )
1474 goto done;
1475 *(uint8_t *)&_regs.eax = al;
1476 break;
1479 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
1480 int rel = insn_fetch_type(int8_t);
1481 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
1482 if ( b == 0xe1 )
1483 do_jmp = !do_jmp; /* loopz */
1484 else if ( b == 0xe2 )
1485 do_jmp = 1; /* loop */
1486 switch ( ad_bytes )
1488 case 2:
1489 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
1490 break;
1491 case 4:
1492 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
1493 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
1494 break;
1495 default: /* case 8: */
1496 do_jmp &= --_regs.ecx != 0;
1497 break;
1499 if ( do_jmp )
1500 jmp_rel(rel);
1501 break;
1504 case 0xe3: /* jcxz/jecxz (short) */ {
1505 int rel = insn_fetch_type(int8_t);
1506 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
1507 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
1508 jmp_rel(rel);
1509 break;
1512 case 0xe8: /* call (near) */ {
1513 int rel = (((op_bytes == 2) && !mode_64bit())
1514 ? (int32_t)insn_fetch_type(int16_t)
1515 : insn_fetch_type(int32_t));
1516 op_bytes = mode_64bit() ? 8 : op_bytes;
1517 src.val = _regs.eip;
1518 jmp_rel(rel);
1519 goto push;
1522 case 0xe9: /* jmp (near) */ {
1523 int rel = (((op_bytes == 2) && !mode_64bit())
1524 ? (int32_t)insn_fetch_type(int16_t)
1525 : insn_fetch_type(int32_t));
1526 jmp_rel(rel);
1527 break;
1530 case 0xeb: /* jmp (short) */
1531 jmp_rel(insn_fetch_type(int8_t));
1532 break;
1534 case 0xf5: /* cmc */
1535 _regs.eflags ^= EFLG_CF;
1536 break;
1538 case 0xf8: /* clc */
1539 _regs.eflags &= ~EFLG_CF;
1540 break;
1542 case 0xf9: /* stc */
1543 _regs.eflags |= EFLG_CF;
1544 break;
1546 case 0xfc: /* cld */
1547 _regs.eflags &= ~EFLG_DF;
1548 break;
1550 case 0xfd: /* std */
1551 _regs.eflags |= EFLG_DF;
1552 break;
1554 goto writeback;
1556 twobyte_insn:
1557 switch ( b )
1559 case 0x40 ... 0x4f: /* cmovcc */
1560 dst.val = src.val;
1561 if ( !test_cc(b, _regs.eflags) )
1562 dst.type = OP_NONE;
1563 break;
1565 case 0x90 ... 0x9f: /* setcc */
1566 dst.val = test_cc(b, _regs.eflags);
1567 break;
1569 case 0xb0 ... 0xb1: /* cmpxchg */
1570 /* Save real source value, then compare EAX against destination. */
1571 src.orig_val = src.val;
1572 src.val = _regs.eax;
1573 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1574 /* Always write back. The question is: where to? */
1575 d |= Mov;
1576 if ( _regs.eflags & EFLG_ZF )
1578 /* Success: write back to memory. */
1579 dst.val = src.orig_val;
1581 else
1583 /* Failure: write the value we saw to EAX. */
1584 dst.type = OP_REG;
1585 dst.reg = (unsigned long *)&_regs.eax;
1587 break;
1589 case 0xa3: bt: /* bt */
1590 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
1591 break;
1593 case 0xb3: btr: /* btr */
1594 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
1595 break;
1597 case 0xab: bts: /* bts */
1598 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
1599 break;
1601 case 0xb6: /* movzx rm8,r{16,32,64} */
1602 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
1603 dst.reg = decode_register(modrm_reg, &_regs, 0);
1604 dst.bytes = op_bytes;
1605 dst.val = (uint8_t)src.val;
1606 break;
1608 case 0xb7: /* movzx rm16,r{16,32,64} */
1609 dst.val = (uint16_t)src.val;
1610 break;
1612 case 0xbb: btc: /* btc */
1613 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
1614 break;
1616 case 0xba: /* Grp8 */
1617 switch ( modrm_reg & 3 )
1619 case 0: goto bt;
1620 case 1: goto bts;
1621 case 2: goto btr;
1622 case 3: goto btc;
1624 break;
1626 case 0xbe: /* movsx rm8,r{16,32,64} */
1627 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
1628 dst.reg = decode_register(modrm_reg, &_regs, 0);
1629 dst.bytes = op_bytes;
1630 dst.val = (int8_t)src.val;
1631 break;
1633 case 0xbf: /* movsx rm16,r{16,32,64} */
1634 dst.val = (int16_t)src.val;
1635 break;
1637 case 0xc0 ... 0xc1: /* xadd */
1638 /* Write back the register source. */
1639 switch ( dst.bytes )
1641 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1642 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1643 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1644 case 8: *src.reg = dst.val; break;
1646 goto add;
1648 goto writeback;
1650 twobyte_special_insn:
1651 switch ( b )
1653 case 0x0d: /* GrpP (prefetch) */
1654 case 0x18: /* Grp16 (prefetch/nop) */
1655 break;
1657 case 0x80 ... 0x8f: /* jcc (near) */ {
1658 int rel = (((op_bytes == 2) && !mode_64bit())
1659 ? (int32_t)insn_fetch_type(int16_t)
1660 : insn_fetch_type(int32_t));
1661 if ( test_cc(b, _regs.eflags) )
1662 jmp_rel(rel);
1663 break;
1666 case 0xc7: /* Grp9 (cmpxchg8b) */
1667 #if defined(__i386__)
1669 unsigned long old_lo, old_hi;
1670 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
1671 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
1672 goto done;
1673 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
1675 _regs.eax = old_lo;
1676 _regs.edx = old_hi;
1677 _regs.eflags &= ~EFLG_ZF;
1679 else if ( ops->cmpxchg8b == NULL )
1681 rc = X86EMUL_UNHANDLEABLE;
1682 goto done;
1684 else
1686 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
1687 _regs.ebx, _regs.ecx, ctxt)) != 0 )
1688 goto done;
1689 _regs.eflags |= EFLG_ZF;
1691 break;
1693 #elif defined(__x86_64__)
1695 unsigned long old, new;
1696 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
1697 goto done;
1698 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
1699 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
1701 _regs.eax = (uint32_t)(old>>0);
1702 _regs.edx = (uint32_t)(old>>32);
1703 _regs.eflags &= ~EFLG_ZF;
1705 else
1707 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
1708 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
1709 new, 8, ctxt)) != 0 )
1710 goto done;
1711 _regs.eflags |= EFLG_ZF;
1713 break;
1715 #endif
1717 case 0xc8 ... 0xcf: /* bswap */
1718 dst.type = OP_REG;
1719 dst.reg = decode_register(b & 7, &_regs, 0);
1720 dst.val = *dst.reg;
1721 switch ( dst.bytes = op_bytes )
1723 case 2:
1724 dst.val = (((dst.val & 0x00FFUL) << 8) |
1725 ((dst.val & 0xFF00UL) >> 8));
1726 break;
1727 case 4:
1728 dst.val = (((dst.val & 0x000000FFUL) << 24) |
1729 ((dst.val & 0x0000FF00UL) << 8) |
1730 ((dst.val & 0x00FF0000UL) >> 8) |
1731 ((dst.val & 0xFF000000UL) >> 24));
1732 break;
1733 #ifdef __x86_64__
1734 case 8:
1735 dst.val = (((dst.val & 0x00000000000000FFUL) << 56) |
1736 ((dst.val & 0x000000000000FF00UL) << 40) |
1737 ((dst.val & 0x0000000000FF0000UL) << 24) |
1738 ((dst.val & 0x00000000FF000000UL) << 8) |
1739 ((dst.val & 0x000000FF00000000UL) >> 8) |
1740 ((dst.val & 0x0000FF0000000000UL) >> 24) |
1741 ((dst.val & 0x00FF000000000000UL) >> 40) |
1742 ((dst.val & 0xFF00000000000000UL) >> 56));
1743 break;
1744 #endif
1746 break;
1748 goto writeback;
1750 cannot_emulate:
1751 #ifdef __XEN__
1752 gdprintk(XENLOG_DEBUG, "Instr:");
1753 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
1755 unsigned long x;
1756 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
1757 printk(" %02x", (uint8_t)x);
1759 printk("\n");
1760 #endif
1761 return -1;