ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 16427:fd3f6d814f6d

x86: single step after instruction emulation

Inject single step trap after emulating instructions if guest's
EFLAGS.TF is set.

Signed-off-by: Jan Beulich <jbeulich@novell.com>
Signed-off-by: Keir Fraser <keir.fraser@eu.citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Thu Nov 22 18:28:47 2007 +0000 (2007-11-22)
parents 3e397fa3a6ad
children 2e7fcea74cb1
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <public/xen.h>
28 #else
29 #include <xen/config.h>
30 #include <xen/types.h>
31 #include <xen/lib.h>
32 #include <asm/regs.h>
33 #undef cmpxchg
34 #endif
35 #include <asm-x86/x86_emulate.h>
37 /* Operand sizes: 8-bit operands or specified/overridden size. */
38 #define ByteOp (1<<0) /* 8-bit operands. */
39 /* Destination operand type. */
40 #define DstBitBase (0<<1) /* Memory operand, bit string. */
41 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
42 #define DstReg (2<<1) /* Register operand. */
43 #define DstMem (3<<1) /* Memory operand. */
44 #define DstMask (3<<1)
45 /* Source operand type. */
46 #define SrcNone (0<<3) /* No source operand. */
47 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
48 #define SrcReg (1<<3) /* Register operand. */
49 #define SrcMem (2<<3) /* Memory operand. */
50 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
51 #define SrcImm (4<<3) /* Immediate operand. */
52 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
53 #define SrcMask (7<<3)
54 /* Generic ModRM decode. */
55 #define ModRM (1<<6)
56 /* Destination is only written; never read. */
57 #define Mov (1<<7)
59 static uint8_t opcode_table[256] = {
60 /* 0x00 - 0x07 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
64 /* 0x08 - 0x0F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
68 /* 0x10 - 0x17 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
72 /* 0x18 - 0x1F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
76 /* 0x20 - 0x27 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
80 /* 0x28 - 0x2F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
84 /* 0x30 - 0x37 */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
88 /* 0x38 - 0x3F */
89 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
90 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
91 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
92 /* 0x40 - 0x4F */
93 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 /* 0x50 - 0x5F */
98 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 /* 0x60 - 0x67 */
103 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
104 0, 0, 0, 0,
105 /* 0x68 - 0x6F */
106 ImplicitOps|Mov, DstMem|SrcImm|ModRM|Mov,
107 ImplicitOps|Mov, DstMem|SrcImmByte|ModRM|Mov,
108 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
109 /* 0x70 - 0x77 */
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 /* 0x78 - 0x7F */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x80 - 0x87 */
116 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
118 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 /* 0x88 - 0x8F */
121 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
122 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
123 0, DstReg|SrcNone|ModRM, 0, DstMem|SrcNone|ModRM|Mov,
124 /* 0x90 - 0x97 */
125 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
126 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 /* 0x98 - 0x9F */
128 ImplicitOps, ImplicitOps, 0, 0, 0, 0, ImplicitOps, ImplicitOps,
129 /* 0xA0 - 0xA7 */
130 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
131 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
132 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
133 /* 0xA8 - 0xAF */
134 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
135 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
136 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
137 /* 0xB0 - 0xB7 */
138 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
139 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
140 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
141 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
142 /* 0xB8 - 0xBF */
143 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
144 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
145 /* 0xC0 - 0xC7 */
146 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
147 ImplicitOps, ImplicitOps,
148 0, 0, ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
149 /* 0xC8 - 0xCF */
150 0, 0, 0, 0, 0, 0, 0, 0,
151 /* 0xD0 - 0xD7 */
152 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
153 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
154 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
155 /* 0xD8 - 0xDF */
156 0, 0, 0, 0, 0, 0, 0, 0,
157 /* 0xE0 - 0xE7 */
158 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
159 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
160 /* 0xE8 - 0xEF */
161 ImplicitOps, ImplicitOps, 0, ImplicitOps,
162 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
163 /* 0xF0 - 0xF7 */
164 0, 0, 0, 0,
165 0, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
166 /* 0xF8 - 0xFF */
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
169 };
171 static uint8_t twobyte_table[256] = {
172 /* 0x00 - 0x07 */
173 0, 0, 0, 0, 0, ImplicitOps, 0, 0,
174 /* 0x08 - 0x0F */
175 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
176 /* 0x10 - 0x17 */
177 0, 0, 0, 0, 0, 0, 0, 0,
178 /* 0x18 - 0x1F */
179 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
180 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
181 /* 0x20 - 0x27 */
182 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
183 0, 0, 0, 0,
184 /* 0x28 - 0x2F */
185 0, 0, 0, 0, 0, 0, 0, 0,
186 /* 0x30 - 0x37 */
187 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0,
188 /* 0x38 - 0x3F */
189 0, 0, 0, 0, 0, 0, 0, 0,
190 /* 0x40 - 0x47 */
191 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
192 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
193 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
194 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
195 /* 0x48 - 0x4F */
196 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
197 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 /* 0x50 - 0x5F */
201 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 /* 0x60 - 0x6F */
203 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
204 /* 0x70 - 0x7F */
205 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
206 /* 0x80 - 0x87 */
207 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
208 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
209 /* 0x88 - 0x8F */
210 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
211 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
212 /* 0x90 - 0x97 */
213 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
214 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
215 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
216 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
217 /* 0x98 - 0x9F */
218 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
219 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 /* 0xA0 - 0xA7 */
223 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, 0,
224 /* 0xA8 - 0xAF */
225 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, DstReg|SrcMem|ModRM,
226 /* 0xB0 - 0xB7 */
227 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
228 0, DstBitBase|SrcReg|ModRM,
229 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
230 /* 0xB8 - 0xBF */
231 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
232 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
233 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
234 /* 0xC0 - 0xC7 */
235 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
236 0, 0, 0, ImplicitOps|ModRM,
237 /* 0xC8 - 0xCF */
238 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
239 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
240 /* 0xD0 - 0xDF */
241 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
242 /* 0xE0 - 0xEF */
243 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
244 /* 0xF0 - 0xFF */
245 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
246 };
248 /* Type, address-of, and value of an instruction's operand. */
249 struct operand {
250 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
251 unsigned int bytes;
252 unsigned long val, orig_val;
253 union {
254 /* OP_REG: Pointer to register field. */
255 unsigned long *reg;
256 /* OP_MEM: Segment and offset. */
257 struct {
258 enum x86_segment seg;
259 unsigned long off;
260 } mem;
261 };
262 };
264 /* EFLAGS bit definitions. */
265 #define EFLG_RF (1<<16)
266 #define EFLG_OF (1<<11)
267 #define EFLG_DF (1<<10)
268 #define EFLG_IF (1<<9)
269 #define EFLG_SF (1<<7)
270 #define EFLG_ZF (1<<6)
271 #define EFLG_AF (1<<4)
272 #define EFLG_PF (1<<2)
273 #define EFLG_CF (1<<0)
275 /* Exception definitions. */
276 #define EXC_DE 0
277 #define EXC_BR 5
278 #define EXC_UD 6
279 #define EXC_GP 13
281 /*
282 * Instruction emulation:
283 * Most instructions are emulated directly via a fragment of inline assembly
284 * code. This allows us to save/restore EFLAGS and thus very easily pick up
285 * any modified flags.
286 */
288 #if defined(__x86_64__)
289 #define _LO32 "k" /* force 32-bit operand */
290 #define _STK "%%rsp" /* stack pointer */
291 #define _BYTES_PER_LONG "8"
292 #elif defined(__i386__)
293 #define _LO32 "" /* force 32-bit operand */
294 #define _STK "%%esp" /* stack pointer */
295 #define _BYTES_PER_LONG "4"
296 #endif
298 /*
299 * These EFLAGS bits are restored from saved value during emulation, and
300 * any changes are written back to the saved value after emulation.
301 */
302 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
304 /* Before executing instruction: restore necessary bits in EFLAGS. */
305 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
306 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
307 "movl %"_sav",%"_LO32 _tmp"; " \
308 "push %"_tmp"; " \
309 "push %"_tmp"; " \
310 "movl %"_msk",%"_LO32 _tmp"; " \
311 "andl %"_LO32 _tmp",("_STK"); " \
312 "pushf; " \
313 "notl %"_LO32 _tmp"; " \
314 "andl %"_LO32 _tmp",("_STK"); " \
315 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
316 "pop %"_tmp"; " \
317 "orl %"_LO32 _tmp",("_STK"); " \
318 "popf; " \
319 "pop %"_sav"; "
321 /* After executing instruction: write-back necessary bits in EFLAGS. */
322 #define _POST_EFLAGS(_sav, _msk, _tmp) \
323 /* _sav |= EFLAGS & _msk; */ \
324 "pushf; " \
325 "pop %"_tmp"; " \
326 "andl %"_msk",%"_LO32 _tmp"; " \
327 "orl %"_LO32 _tmp",%"_sav"; "
329 /* Raw emulation: instruction has two explicit operands. */
330 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
331 do{ unsigned long _tmp; \
332 switch ( (_dst).bytes ) \
333 { \
334 case 2: \
335 asm volatile ( \
336 _PRE_EFLAGS("0","4","2") \
337 _op"w %"_wx"3,%1; " \
338 _POST_EFLAGS("0","4","2") \
339 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
340 : _wy ((_src).val), "i" (EFLAGS_MASK), \
341 "m" (_eflags), "m" ((_dst).val) ); \
342 break; \
343 case 4: \
344 asm volatile ( \
345 _PRE_EFLAGS("0","4","2") \
346 _op"l %"_lx"3,%1; " \
347 _POST_EFLAGS("0","4","2") \
348 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
349 : _ly ((_src).val), "i" (EFLAGS_MASK), \
350 "m" (_eflags), "m" ((_dst).val) ); \
351 break; \
352 case 8: \
353 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
354 break; \
355 } \
356 } while (0)
357 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
358 do{ unsigned long _tmp; \
359 switch ( (_dst).bytes ) \
360 { \
361 case 1: \
362 asm volatile ( \
363 _PRE_EFLAGS("0","4","2") \
364 _op"b %"_bx"3,%1; " \
365 _POST_EFLAGS("0","4","2") \
366 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
367 : _by ((_src).val), "i" (EFLAGS_MASK), \
368 "m" (_eflags), "m" ((_dst).val) ); \
369 break; \
370 default: \
371 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
372 break; \
373 } \
374 } while (0)
375 /* Source operand is byte-sized and may be restricted to just %cl. */
376 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
377 __emulate_2op(_op, _src, _dst, _eflags, \
378 "b", "c", "b", "c", "b", "c", "b", "c")
379 /* Source operand is byte, word, long or quad sized. */
380 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
381 __emulate_2op(_op, _src, _dst, _eflags, \
382 "b", "q", "w", "r", _LO32, "r", "", "r")
383 /* Source operand is word, long or quad sized. */
384 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
385 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
386 "w", "r", _LO32, "r", "", "r")
388 /* Instruction has only one explicit operand (no source operand). */
389 #define emulate_1op(_op,_dst,_eflags) \
390 do{ unsigned long _tmp; \
391 switch ( (_dst).bytes ) \
392 { \
393 case 1: \
394 asm volatile ( \
395 _PRE_EFLAGS("0","3","2") \
396 _op"b %1; " \
397 _POST_EFLAGS("0","3","2") \
398 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
399 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
400 break; \
401 case 2: \
402 asm volatile ( \
403 _PRE_EFLAGS("0","3","2") \
404 _op"w %1; " \
405 _POST_EFLAGS("0","3","2") \
406 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
407 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
408 break; \
409 case 4: \
410 asm volatile ( \
411 _PRE_EFLAGS("0","3","2") \
412 _op"l %1; " \
413 _POST_EFLAGS("0","3","2") \
414 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
415 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
416 break; \
417 case 8: \
418 __emulate_1op_8byte(_op, _dst, _eflags); \
419 break; \
420 } \
421 } while (0)
423 /* Emulate an instruction with quadword operands (x86/64 only). */
424 #if defined(__x86_64__)
425 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
426 do{ asm volatile ( \
427 _PRE_EFLAGS("0","4","2") \
428 _op"q %"_qx"3,%1; " \
429 _POST_EFLAGS("0","4","2") \
430 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
431 : _qy ((_src).val), "i" (EFLAGS_MASK), \
432 "m" (_eflags), "m" ((_dst).val) ); \
433 } while (0)
434 #define __emulate_1op_8byte(_op, _dst, _eflags) \
435 do{ asm volatile ( \
436 _PRE_EFLAGS("0","3","2") \
437 _op"q %1; " \
438 _POST_EFLAGS("0","3","2") \
439 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
440 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
441 } while (0)
442 #elif defined(__i386__)
443 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
444 #define __emulate_1op_8byte(_op, _dst, _eflags)
445 #endif /* __i386__ */
447 /* Fetch next part of the instruction being emulated. */
448 #define insn_fetch_bytes(_size) \
449 ({ unsigned long _x, _eip = _regs.eip; \
450 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
451 _regs.eip += (_size); /* real hardware doesn't truncate */ \
452 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
453 EXC_GP); \
454 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
455 if ( rc ) goto done; \
456 _x; \
457 })
458 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
460 #define _truncate_ea(ea, byte_width) \
461 ({ unsigned long __ea = (ea); \
462 unsigned int _width = (byte_width); \
463 ((_width == sizeof(unsigned long)) ? __ea : \
464 (__ea & ((1UL << (_width << 3)) - 1))); \
465 })
466 #define truncate_ea(ea) _truncate_ea((ea), ad_bytes)
468 #define mode_64bit() (def_ad_bytes == 8)
470 #define fail_if(p) \
471 do { \
472 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
473 if ( rc ) goto done; \
474 } while (0)
476 /* In future we will be able to generate arbitrary exceptions. */
477 #define generate_exception_if(p, e) fail_if(p)
479 /* To be done... */
480 #define mode_ring0() (0)
481 #define mode_iopl() (0)
483 /* Given byte has even parity (even number of 1s)? */
484 static int even_parity(uint8_t v)
485 {
486 asm ( "test %%al,%%al; setp %%al"
487 : "=a" (v) : "0" (v) );
488 return v;
489 }
491 /* Update address held in a register, based on addressing mode. */
492 #define _register_address_increment(reg, inc, byte_width) \
493 do { \
494 int _inc = (inc); /* signed type ensures sign extension to long */ \
495 unsigned int _width = (byte_width); \
496 if ( _width == sizeof(unsigned long) ) \
497 (reg) += _inc; \
498 else if ( mode_64bit() ) \
499 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
500 else \
501 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
502 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
503 } while (0)
504 #define register_address_increment(reg, inc) \
505 _register_address_increment((reg), (inc), ad_bytes)
507 #define sp_pre_dec(dec) ({ \
508 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
509 _truncate_ea(_regs.esp, ctxt->sp_size/8); \
510 })
511 #define sp_post_inc(inc) ({ \
512 unsigned long __esp = _truncate_ea(_regs.esp, ctxt->sp_size/8); \
513 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
514 __esp; \
515 })
517 #define jmp_rel(rel) \
518 do { \
519 _regs.eip += (int)(rel); \
520 if ( !mode_64bit() ) \
521 _regs.eip = ((op_bytes == 2) \
522 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
523 } while (0)
525 static int __handle_rep_prefix(
526 struct cpu_user_regs *int_regs,
527 struct cpu_user_regs *ext_regs,
528 int ad_bytes)
529 {
530 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
531 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
532 int_regs->ecx);
534 if ( ecx-- == 0 )
535 {
536 ext_regs->eip = int_regs->eip;
537 return 1;
538 }
540 if ( ad_bytes == 2 )
541 *(uint16_t *)&int_regs->ecx = ecx;
542 else if ( ad_bytes == 4 )
543 int_regs->ecx = (uint32_t)ecx;
544 else
545 int_regs->ecx = ecx;
546 int_regs->eip = ext_regs->eip;
547 return 0;
548 }
550 #define handle_rep_prefix() \
551 do { \
552 if ( rep_prefix && __handle_rep_prefix(&_regs, ctxt->regs, ad_bytes) ) \
553 goto done; \
554 } while (0)
556 /*
557 * Unsigned multiplication with double-word result.
558 * IN: Multiplicand=m[0], Multiplier=m[1]
559 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
560 */
561 static int mul_dbl(unsigned long m[2])
562 {
563 int rc;
564 asm ( "mul %4; seto %b2"
565 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
566 : "0" (m[0]), "1" (m[1]), "2" (0) );
567 return rc;
568 }
570 /*
571 * Signed multiplication with double-word result.
572 * IN: Multiplicand=m[0], Multiplier=m[1]
573 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
574 */
575 static int imul_dbl(unsigned long m[2])
576 {
577 int rc;
578 asm ( "imul %4; seto %b2"
579 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
580 : "0" (m[0]), "1" (m[1]), "2" (0) );
581 return rc;
582 }
584 /*
585 * Unsigned division of double-word dividend.
586 * IN: Dividend=u[1]:u[0], Divisor=v
587 * OUT: Return 1: #DE
588 * Return 0: Quotient=u[0], Remainder=u[1]
589 */
590 static int div_dbl(unsigned long u[2], unsigned long v)
591 {
592 if ( (v == 0) || (u[1] >= v) )
593 return 1;
594 asm ( "div %4"
595 : "=a" (u[0]), "=d" (u[1])
596 : "0" (u[0]), "1" (u[1]), "r" (v) );
597 return 0;
598 }
600 /*
601 * Signed division of double-word dividend.
602 * IN: Dividend=u[1]:u[0], Divisor=v
603 * OUT: Return 1: #DE
604 * Return 0: Quotient=u[0], Remainder=u[1]
605 * NB. We don't use idiv directly as it's moderately hard to work out
606 * ahead of time whether it will #DE, which we cannot allow to happen.
607 */
608 static int idiv_dbl(unsigned long u[2], unsigned long v)
609 {
610 int negu = (long)u[1] < 0, negv = (long)v < 0;
612 /* u = abs(u) */
613 if ( negu )
614 {
615 u[1] = ~u[1];
616 if ( (u[0] = -u[0]) == 0 )
617 u[1]++;
618 }
620 /* abs(u) / abs(v) */
621 if ( div_dbl(u, negv ? -v : v) )
622 return 1;
624 /* Remainder has same sign as dividend. It cannot overflow. */
625 if ( negu )
626 u[1] = -u[1];
628 /* Quotient is overflowed if sign bit is set. */
629 if ( negu ^ negv )
630 {
631 if ( (long)u[0] >= 0 )
632 u[0] = -u[0];
633 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
634 return 1;
635 }
636 else if ( (long)u[0] < 0 )
637 return 1;
639 return 0;
640 }
642 static int
643 test_cc(
644 unsigned int condition, unsigned int flags)
645 {
646 int rc = 0;
648 switch ( (condition & 15) >> 1 )
649 {
650 case 0: /* o */
651 rc |= (flags & EFLG_OF);
652 break;
653 case 1: /* b/c/nae */
654 rc |= (flags & EFLG_CF);
655 break;
656 case 2: /* z/e */
657 rc |= (flags & EFLG_ZF);
658 break;
659 case 3: /* be/na */
660 rc |= (flags & (EFLG_CF|EFLG_ZF));
661 break;
662 case 4: /* s */
663 rc |= (flags & EFLG_SF);
664 break;
665 case 5: /* p/pe */
666 rc |= (flags & EFLG_PF);
667 break;
668 case 7: /* le/ng */
669 rc |= (flags & EFLG_ZF);
670 /* fall through */
671 case 6: /* l/nge */
672 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
673 break;
674 }
676 /* Odd condition identifiers (lsb == 1) have inverted sense. */
677 return (!!rc ^ (condition & 1));
678 }
680 void *
681 decode_register(
682 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
683 {
684 void *p;
686 switch ( modrm_reg )
687 {
688 case 0: p = &regs->eax; break;
689 case 1: p = &regs->ecx; break;
690 case 2: p = &regs->edx; break;
691 case 3: p = &regs->ebx; break;
692 case 4: p = (highbyte_regs ?
693 ((unsigned char *)&regs->eax + 1) :
694 (unsigned char *)&regs->esp); break;
695 case 5: p = (highbyte_regs ?
696 ((unsigned char *)&regs->ecx + 1) :
697 (unsigned char *)&regs->ebp); break;
698 case 6: p = (highbyte_regs ?
699 ((unsigned char *)&regs->edx + 1) :
700 (unsigned char *)&regs->esi); break;
701 case 7: p = (highbyte_regs ?
702 ((unsigned char *)&regs->ebx + 1) :
703 (unsigned char *)&regs->edi); break;
704 #if defined(__x86_64__)
705 case 8: p = &regs->r8; break;
706 case 9: p = &regs->r9; break;
707 case 10: p = &regs->r10; break;
708 case 11: p = &regs->r11; break;
709 case 12: p = &regs->r12; break;
710 case 13: p = &regs->r13; break;
711 case 14: p = &regs->r14; break;
712 case 15: p = &regs->r15; break;
713 #endif
714 default: p = NULL; break;
715 }
717 return p;
718 }
720 int
721 x86_emulate(
722 struct x86_emulate_ctxt *ctxt,
723 struct x86_emulate_ops *ops)
724 {
725 /* Shadow copy of register state. Committed on successful emulation. */
726 struct cpu_user_regs _regs = *ctxt->regs;
728 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
729 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
730 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
731 unsigned int lock_prefix = 0, rep_prefix = 0;
732 int override_seg = -1, rc = X86EMUL_OKAY;
733 struct operand src, dst;
735 /* Data operand effective address (usually computed from ModRM). */
736 struct operand ea;
738 /* Default is a memory operand relative to segment DS. */
739 ea.type = OP_MEM;
740 ea.mem.seg = x86_seg_ds;
741 ea.mem.off = 0;
743 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
744 if ( op_bytes == 8 )
745 {
746 op_bytes = def_op_bytes = 4;
747 #ifndef __x86_64__
748 return X86EMUL_UNHANDLEABLE;
749 #endif
750 }
752 /* Prefix bytes. */
753 for ( ; ; )
754 {
755 switch ( b = insn_fetch_type(uint8_t) )
756 {
757 case 0x66: /* operand-size override */
758 op_bytes = def_op_bytes ^ 6;
759 break;
760 case 0x67: /* address-size override */
761 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
762 break;
763 case 0x2e: /* CS override */
764 override_seg = x86_seg_cs;
765 break;
766 case 0x3e: /* DS override */
767 override_seg = x86_seg_ds;
768 break;
769 case 0x26: /* ES override */
770 override_seg = x86_seg_es;
771 break;
772 case 0x64: /* FS override */
773 override_seg = x86_seg_fs;
774 break;
775 case 0x65: /* GS override */
776 override_seg = x86_seg_gs;
777 break;
778 case 0x36: /* SS override */
779 override_seg = x86_seg_ss;
780 break;
781 case 0xf0: /* LOCK */
782 lock_prefix = 1;
783 break;
784 case 0xf2: /* REPNE/REPNZ */
785 case 0xf3: /* REP/REPE/REPZ */
786 rep_prefix = 1;
787 break;
788 case 0x40 ... 0x4f: /* REX */
789 if ( !mode_64bit() )
790 goto done_prefixes;
791 rex_prefix = b;
792 continue;
793 default:
794 goto done_prefixes;
795 }
797 /* Any legacy prefix after a REX prefix nullifies its effect. */
798 rex_prefix = 0;
799 }
800 done_prefixes:
802 if ( rex_prefix & 8 ) /* REX.W */
803 op_bytes = 8;
805 /* Opcode byte(s). */
806 d = opcode_table[b];
807 if ( d == 0 )
808 {
809 /* Two-byte opcode? */
810 if ( b == 0x0f )
811 {
812 twobyte = 1;
813 b = insn_fetch_type(uint8_t);
814 d = twobyte_table[b];
815 }
817 /* Unrecognised? */
818 if ( d == 0 )
819 goto cannot_emulate;
820 }
822 /* Lock prefix is allowed only on RMW instructions. */
823 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
825 /* ModRM and SIB bytes. */
826 if ( d & ModRM )
827 {
828 modrm = insn_fetch_type(uint8_t);
829 modrm_mod = (modrm & 0xc0) >> 6;
830 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
831 modrm_rm = modrm & 0x07;
833 if ( modrm_mod == 3 )
834 {
835 modrm_rm |= (rex_prefix & 1) << 3;
836 ea.type = OP_REG;
837 ea.reg = decode_register(
838 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
839 }
840 else if ( ad_bytes == 2 )
841 {
842 /* 16-bit ModR/M decode. */
843 switch ( modrm_rm )
844 {
845 case 0:
846 ea.mem.off = _regs.ebx + _regs.esi;
847 break;
848 case 1:
849 ea.mem.off = _regs.ebx + _regs.edi;
850 break;
851 case 2:
852 ea.mem.seg = x86_seg_ss;
853 ea.mem.off = _regs.ebp + _regs.esi;
854 break;
855 case 3:
856 ea.mem.seg = x86_seg_ss;
857 ea.mem.off = _regs.ebp + _regs.edi;
858 break;
859 case 4:
860 ea.mem.off = _regs.esi;
861 break;
862 case 5:
863 ea.mem.off = _regs.edi;
864 break;
865 case 6:
866 if ( modrm_mod == 0 )
867 break;
868 ea.mem.seg = x86_seg_ss;
869 ea.mem.off = _regs.ebp;
870 break;
871 case 7:
872 ea.mem.off = _regs.ebx;
873 break;
874 }
875 switch ( modrm_mod )
876 {
877 case 0:
878 if ( modrm_rm == 6 )
879 ea.mem.off = insn_fetch_type(int16_t);
880 break;
881 case 1:
882 ea.mem.off += insn_fetch_type(int8_t);
883 break;
884 case 2:
885 ea.mem.off += insn_fetch_type(int16_t);
886 break;
887 }
888 ea.mem.off = truncate_ea(ea.mem.off);
889 }
890 else
891 {
892 /* 32/64-bit ModR/M decode. */
893 if ( modrm_rm == 4 )
894 {
895 sib = insn_fetch_type(uint8_t);
896 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
897 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
898 if ( sib_index != 4 )
899 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
900 ea.mem.off <<= (sib >> 6) & 3;
901 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
902 ea.mem.off += insn_fetch_type(int32_t);
903 else if ( sib_base == 4 )
904 {
905 ea.mem.seg = x86_seg_ss;
906 ea.mem.off += _regs.esp;
907 if ( !twobyte && (b == 0x8f) )
908 /* POP <rm> computes its EA post increment. */
909 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
910 ? 8 : op_bytes);
911 }
912 else if ( sib_base == 5 )
913 {
914 ea.mem.seg = x86_seg_ss;
915 ea.mem.off += _regs.ebp;
916 }
917 else
918 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
919 }
920 else
921 {
922 modrm_rm |= (rex_prefix & 1) << 3;
923 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
924 if ( (modrm_rm == 5) && (modrm_mod != 0) )
925 ea.mem.seg = x86_seg_ss;
926 }
927 switch ( modrm_mod )
928 {
929 case 0:
930 if ( (modrm_rm & 7) != 5 )
931 break;
932 ea.mem.off = insn_fetch_type(int32_t);
933 if ( !mode_64bit() )
934 break;
935 /* Relative to RIP of next instruction. Argh! */
936 ea.mem.off += _regs.eip;
937 if ( (d & SrcMask) == SrcImm )
938 ea.mem.off += (d & ByteOp) ? 1 :
939 ((op_bytes == 8) ? 4 : op_bytes);
940 else if ( (d & SrcMask) == SrcImmByte )
941 ea.mem.off += 1;
942 else if ( ((b == 0xf6) || (b == 0xf7)) &&
943 ((modrm_reg & 7) <= 1) )
944 /* Special case in Grp3: test has immediate operand. */
945 ea.mem.off += (d & ByteOp) ? 1
946 : ((op_bytes == 8) ? 4 : op_bytes);
947 break;
948 case 1:
949 ea.mem.off += insn_fetch_type(int8_t);
950 break;
951 case 2:
952 ea.mem.off += insn_fetch_type(int32_t);
953 break;
954 }
955 ea.mem.off = truncate_ea(ea.mem.off);
956 }
957 }
959 if ( override_seg != -1 )
960 ea.mem.seg = override_seg;
962 /* Special instructions do their own operand decoding. */
963 if ( (d & DstMask) == ImplicitOps )
964 goto special_insn;
966 /* Decode and fetch the source operand: register, memory or immediate. */
967 switch ( d & SrcMask )
968 {
969 case SrcNone:
970 break;
971 case SrcReg:
972 src.type = OP_REG;
973 if ( d & ByteOp )
974 {
975 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
976 src.val = *(uint8_t *)src.reg;
977 src.bytes = 1;
978 }
979 else
980 {
981 src.reg = decode_register(modrm_reg, &_regs, 0);
982 switch ( (src.bytes = op_bytes) )
983 {
984 case 2: src.val = *(uint16_t *)src.reg; break;
985 case 4: src.val = *(uint32_t *)src.reg; break;
986 case 8: src.val = *(uint64_t *)src.reg; break;
987 }
988 }
989 break;
990 case SrcMem16:
991 ea.bytes = 2;
992 goto srcmem_common;
993 case SrcMem:
994 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
995 srcmem_common:
996 src = ea;
997 if ( src.type == OP_REG )
998 {
999 switch ( src.bytes )
1001 case 1: src.val = *(uint8_t *)src.reg; break;
1002 case 2: src.val = *(uint16_t *)src.reg; break;
1003 case 4: src.val = *(uint32_t *)src.reg; break;
1004 case 8: src.val = *(uint64_t *)src.reg; break;
1007 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1008 &src.val, src.bytes, ctxt)) )
1009 goto done;
1010 break;
1011 case SrcImm:
1012 src.type = OP_IMM;
1013 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1014 if ( src.bytes == 8 ) src.bytes = 4;
1015 /* NB. Immediates are sign-extended as necessary. */
1016 switch ( src.bytes )
1018 case 1: src.val = insn_fetch_type(int8_t); break;
1019 case 2: src.val = insn_fetch_type(int16_t); break;
1020 case 4: src.val = insn_fetch_type(int32_t); break;
1022 break;
1023 case SrcImmByte:
1024 src.type = OP_IMM;
1025 src.bytes = 1;
1026 src.val = insn_fetch_type(int8_t);
1027 break;
1030 /* Decode and fetch the destination operand: register or memory. */
1031 switch ( d & DstMask )
1033 case DstReg:
1034 dst.type = OP_REG;
1035 if ( d & ByteOp )
1037 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1038 dst.val = *(uint8_t *)dst.reg;
1039 dst.bytes = 1;
1041 else
1043 dst.reg = decode_register(modrm_reg, &_regs, 0);
1044 switch ( (dst.bytes = op_bytes) )
1046 case 2: dst.val = *(uint16_t *)dst.reg; break;
1047 case 4: dst.val = *(uint32_t *)dst.reg; break;
1048 case 8: dst.val = *(uint64_t *)dst.reg; break;
1051 break;
1052 case DstBitBase:
1053 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1055 src.val &= (op_bytes << 3) - 1;
1057 else
1059 /*
1060 * EA += BitOffset DIV op_bytes*8
1061 * BitOffset = BitOffset MOD op_bytes*8
1062 * DIV truncates towards negative infinity.
1063 * MOD always produces a positive result.
1064 */
1065 if ( op_bytes == 2 )
1066 src.val = (int16_t)src.val;
1067 else if ( op_bytes == 4 )
1068 src.val = (int32_t)src.val;
1069 if ( (long)src.val < 0 )
1071 unsigned long byte_offset;
1072 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1073 ea.mem.off -= byte_offset;
1074 src.val = (byte_offset << 3) + src.val;
1076 else
1078 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1079 src.val &= (op_bytes << 3) - 1;
1082 /* Becomes a normal DstMem operation from here on. */
1083 d = (d & ~DstMask) | DstMem;
1084 case DstMem:
1085 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1086 dst = ea;
1087 if ( dst.type == OP_REG )
1089 switch ( dst.bytes )
1091 case 1: dst.val = *(uint8_t *)dst.reg; break;
1092 case 2: dst.val = *(uint16_t *)dst.reg; break;
1093 case 4: dst.val = *(uint32_t *)dst.reg; break;
1094 case 8: dst.val = *(uint64_t *)dst.reg; break;
1097 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1099 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1100 &dst.val, dst.bytes, ctxt)) )
1101 goto done;
1102 dst.orig_val = dst.val;
1104 break;
1107 /* LOCK prefix allowed only on instructions with memory destination. */
1108 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
1110 if ( twobyte )
1111 goto twobyte_insn;
1113 switch ( b )
1115 case 0x04 ... 0x05: /* add imm,%%eax */
1116 dst.reg = (unsigned long *)&_regs.eax;
1117 dst.val = _regs.eax;
1118 case 0x00 ... 0x03: add: /* add */
1119 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1120 break;
1122 case 0x0c ... 0x0d: /* or imm,%%eax */
1123 dst.reg = (unsigned long *)&_regs.eax;
1124 dst.val = _regs.eax;
1125 case 0x08 ... 0x0b: or: /* or */
1126 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1127 break;
1129 case 0x14 ... 0x15: /* adc imm,%%eax */
1130 dst.reg = (unsigned long *)&_regs.eax;
1131 dst.val = _regs.eax;
1132 case 0x10 ... 0x13: adc: /* adc */
1133 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1134 break;
1136 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1137 dst.reg = (unsigned long *)&_regs.eax;
1138 dst.val = _regs.eax;
1139 case 0x18 ... 0x1b: sbb: /* sbb */
1140 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1141 break;
1143 case 0x24 ... 0x25: /* and imm,%%eax */
1144 dst.reg = (unsigned long *)&_regs.eax;
1145 dst.val = _regs.eax;
1146 case 0x20 ... 0x23: and: /* and */
1147 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1148 break;
1150 case 0x2c ... 0x2d: /* sub imm,%%eax */
1151 dst.reg = (unsigned long *)&_regs.eax;
1152 dst.val = _regs.eax;
1153 case 0x28 ... 0x2b: sub: /* sub */
1154 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1155 break;
1157 case 0x34 ... 0x35: /* xor imm,%%eax */
1158 dst.reg = (unsigned long *)&_regs.eax;
1159 dst.val = _regs.eax;
1160 case 0x30 ... 0x33: xor: /* xor */
1161 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1162 break;
1164 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1165 dst.reg = (unsigned long *)&_regs.eax;
1166 dst.val = _regs.eax;
1167 case 0x38 ... 0x3b: cmp: /* cmp */
1168 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1169 break;
1171 case 0x62: /* bound */ {
1172 unsigned long src_val2;
1173 int lb, ub, idx;
1174 generate_exception_if(mode_64bit() || (src.type != OP_MEM), EXC_UD);
1175 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1176 &src_val2, op_bytes, ctxt)) )
1177 goto done;
1178 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1179 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1180 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1181 generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
1182 dst.type = OP_NONE;
1183 break;
1186 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1187 if ( mode_64bit() )
1189 /* movsxd */
1190 if ( src.type == OP_REG )
1191 src.val = *(int32_t *)src.reg;
1192 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1193 &src.val, 4, ctxt)) )
1194 goto done;
1195 dst.val = (int32_t)src.val;
1197 else
1199 /* arpl */
1200 uint16_t src_val = dst.val;
1201 dst = src;
1202 _regs.eflags &= ~EFLG_ZF;
1203 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1204 if ( _regs.eflags & EFLG_ZF )
1205 dst.val = (dst.val & ~3) | (src_val & 3);
1206 else
1207 dst.type = OP_NONE;
1209 break;
1211 case 0x69: /* imul imm16/32 */
1212 case 0x6b: /* imul imm8 */ {
1213 unsigned long reg = *(long *)decode_register(modrm_reg, &_regs, 0);
1214 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1215 switch ( dst.bytes )
1217 case 2:
1218 dst.val = ((uint32_t)(int16_t)src.val *
1219 (uint32_t)(int16_t)reg);
1220 if ( (int16_t)dst.val != (uint32_t)dst.val )
1221 _regs.eflags |= EFLG_OF|EFLG_CF;
1222 break;
1223 #ifdef __x86_64__
1224 case 4:
1225 dst.val = ((uint64_t)(int32_t)src.val *
1226 (uint64_t)(int32_t)reg);
1227 if ( (int32_t)dst.val != dst.val )
1228 _regs.eflags |= EFLG_OF|EFLG_CF;
1229 break;
1230 #endif
1231 default: {
1232 unsigned long m[2] = { src.val, reg };
1233 if ( imul_dbl(m) )
1234 _regs.eflags |= EFLG_OF|EFLG_CF;
1235 dst.val = m[0];
1236 break;
1239 dst.type = OP_REG;
1240 dst.reg = decode_register(modrm_reg, &_regs, 0);
1241 break;
1244 case 0x82: /* Grp1 (x86/32 only) */
1245 generate_exception_if(mode_64bit(), EXC_UD);
1246 case 0x80: case 0x81: case 0x83: /* Grp1 */
1247 switch ( modrm_reg & 7 )
1249 case 0: goto add;
1250 case 1: goto or;
1251 case 2: goto adc;
1252 case 3: goto sbb;
1253 case 4: goto and;
1254 case 5: goto sub;
1255 case 6: goto xor;
1256 case 7: goto cmp;
1258 break;
1260 case 0xa8 ... 0xa9: /* test imm,%%eax */
1261 dst.reg = (unsigned long *)&_regs.eax;
1262 dst.val = _regs.eax;
1263 case 0x84 ... 0x85: test: /* test */
1264 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1265 break;
1267 case 0x86 ... 0x87: xchg: /* xchg */
1268 /* Write back the register source. */
1269 switch ( dst.bytes )
1271 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1272 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1273 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1274 case 8: *src.reg = dst.val; break;
1276 /* Write back the memory destination with implicit LOCK prefix. */
1277 dst.val = src.val;
1278 lock_prefix = 1;
1279 break;
1281 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1282 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1283 case 0x88 ... 0x8b: /* mov */
1284 dst.val = src.val;
1285 break;
1287 case 0x8d: /* lea */
1288 dst.val = ea.mem.off;
1289 break;
1291 case 0x8f: /* pop (sole member of Grp1a) */
1292 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1293 /* 64-bit mode: POP defaults to a 64-bit operand. */
1294 if ( mode_64bit() && (dst.bytes == 4) )
1295 dst.bytes = 8;
1296 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1297 &dst.val, dst.bytes, ctxt)) != 0 )
1298 goto done;
1299 break;
1301 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1302 dst.reg = decode_register(
1303 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1304 dst.val = src.val;
1305 break;
1307 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1308 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1309 src.val = ((uint32_t)src.val |
1310 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1311 dst.reg = decode_register(
1312 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1313 dst.val = src.val;
1314 break;
1316 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1317 switch ( modrm_reg & 7 )
1319 case 0: /* rol */
1320 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1321 break;
1322 case 1: /* ror */
1323 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1324 break;
1325 case 2: /* rcl */
1326 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1327 break;
1328 case 3: /* rcr */
1329 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1330 break;
1331 case 4: /* sal/shl */
1332 case 6: /* sal/shl */
1333 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1334 break;
1335 case 5: /* shr */
1336 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1337 break;
1338 case 7: /* sar */
1339 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1340 break;
1342 break;
1344 case 0xd0 ... 0xd1: /* Grp2 */
1345 src.val = 1;
1346 goto grp2;
1348 case 0xd2 ... 0xd3: /* Grp2 */
1349 src.val = _regs.ecx;
1350 goto grp2;
1352 case 0xf6 ... 0xf7: /* Grp3 */
1353 switch ( modrm_reg & 7 )
1355 case 0 ... 1: /* test */
1356 /* Special case in Grp3: test has an immediate source operand. */
1357 src.type = OP_IMM;
1358 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1359 if ( src.bytes == 8 ) src.bytes = 4;
1360 switch ( src.bytes )
1362 case 1: src.val = insn_fetch_type(int8_t); break;
1363 case 2: src.val = insn_fetch_type(int16_t); break;
1364 case 4: src.val = insn_fetch_type(int32_t); break;
1366 goto test;
1367 case 2: /* not */
1368 dst.val = ~dst.val;
1369 break;
1370 case 3: /* neg */
1371 emulate_1op("neg", dst, _regs.eflags);
1372 break;
1373 case 4: /* mul */
1374 src = dst;
1375 dst.type = OP_REG;
1376 dst.reg = (unsigned long *)&_regs.eax;
1377 dst.val = *dst.reg;
1378 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1379 switch ( src.bytes )
1381 case 1:
1382 dst.val *= src.val;
1383 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1384 _regs.eflags |= EFLG_OF|EFLG_CF;
1385 break;
1386 case 2:
1387 dst.val *= src.val;
1388 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1389 _regs.eflags |= EFLG_OF|EFLG_CF;
1390 *(uint16_t *)&_regs.edx = dst.val >> 16;
1391 break;
1392 #ifdef __x86_64__
1393 case 4:
1394 dst.val *= src.val;
1395 if ( (uint32_t)dst.val != dst.val )
1396 _regs.eflags |= EFLG_OF|EFLG_CF;
1397 _regs.edx = (uint32_t)(dst.val >> 32);
1398 break;
1399 #endif
1400 default: {
1401 unsigned long m[2] = { src.val, dst.val };
1402 if ( mul_dbl(m) )
1403 _regs.eflags |= EFLG_OF|EFLG_CF;
1404 _regs.edx = m[1];
1405 dst.val = m[0];
1406 break;
1409 break;
1410 case 5: /* imul */
1411 src = dst;
1412 dst.type = OP_REG;
1413 dst.reg = (unsigned long *)&_regs.eax;
1414 dst.val = *dst.reg;
1415 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1416 switch ( src.bytes )
1418 case 1:
1419 dst.val = ((uint16_t)(int8_t)src.val *
1420 (uint16_t)(int8_t)dst.val);
1421 if ( (int8_t)dst.val != (uint16_t)dst.val )
1422 _regs.eflags |= EFLG_OF|EFLG_CF;
1423 break;
1424 case 2:
1425 dst.val = ((uint32_t)(int16_t)src.val *
1426 (uint32_t)(int16_t)dst.val);
1427 if ( (int16_t)dst.val != (uint32_t)dst.val )
1428 _regs.eflags |= EFLG_OF|EFLG_CF;
1429 *(uint16_t *)&_regs.edx = dst.val >> 16;
1430 break;
1431 #ifdef __x86_64__
1432 case 4:
1433 dst.val = ((uint64_t)(int32_t)src.val *
1434 (uint64_t)(int32_t)dst.val);
1435 if ( (int32_t)dst.val != dst.val )
1436 _regs.eflags |= EFLG_OF|EFLG_CF;
1437 _regs.edx = (uint32_t)(dst.val >> 32);
1438 break;
1439 #endif
1440 default: {
1441 unsigned long m[2] = { src.val, dst.val };
1442 if ( imul_dbl(m) )
1443 _regs.eflags |= EFLG_OF|EFLG_CF;
1444 _regs.edx = m[1];
1445 dst.val = m[0];
1446 break;
1449 break;
1450 case 6: /* div */ {
1451 unsigned long u[2], v;
1452 src = dst;
1453 dst.type = OP_REG;
1454 dst.reg = (unsigned long *)&_regs.eax;
1455 switch ( src.bytes )
1457 case 1:
1458 u[0] = (uint16_t)_regs.eax;
1459 u[1] = 0;
1460 v = (uint8_t)src.val;
1461 generate_exception_if(
1462 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1463 EXC_DE);
1464 dst.val = (uint8_t)u[0];
1465 ((uint8_t *)&_regs.eax)[1] = u[1];
1466 break;
1467 case 2:
1468 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1469 u[1] = 0;
1470 v = (uint16_t)src.val;
1471 generate_exception_if(
1472 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1473 EXC_DE);
1474 dst.val = (uint16_t)u[0];
1475 *(uint16_t *)&_regs.edx = u[1];
1476 break;
1477 #ifdef __x86_64__
1478 case 4:
1479 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1480 u[1] = 0;
1481 v = (uint32_t)src.val;
1482 generate_exception_if(
1483 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1484 EXC_DE);
1485 dst.val = (uint32_t)u[0];
1486 _regs.edx = (uint32_t)u[1];
1487 break;
1488 #endif
1489 default:
1490 u[0] = _regs.eax;
1491 u[1] = _regs.edx;
1492 v = src.val;
1493 generate_exception_if(div_dbl(u, v), EXC_DE);
1494 dst.val = u[0];
1495 _regs.edx = u[1];
1496 break;
1498 break;
1500 case 7: /* idiv */ {
1501 unsigned long u[2], v;
1502 src = dst;
1503 dst.type = OP_REG;
1504 dst.reg = (unsigned long *)&_regs.eax;
1505 switch ( src.bytes )
1507 case 1:
1508 u[0] = (int16_t)_regs.eax;
1509 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1510 v = (int8_t)src.val;
1511 generate_exception_if(
1512 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1513 EXC_DE);
1514 dst.val = (int8_t)u[0];
1515 ((int8_t *)&_regs.eax)[1] = u[1];
1516 break;
1517 case 2:
1518 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1519 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1520 v = (int16_t)src.val;
1521 generate_exception_if(
1522 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1523 EXC_DE);
1524 dst.val = (int16_t)u[0];
1525 *(int16_t *)&_regs.edx = u[1];
1526 break;
1527 #ifdef __x86_64__
1528 case 4:
1529 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1530 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1531 v = (int32_t)src.val;
1532 generate_exception_if(
1533 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1534 EXC_DE);
1535 dst.val = (int32_t)u[0];
1536 _regs.edx = (uint32_t)u[1];
1537 break;
1538 #endif
1539 default:
1540 u[0] = _regs.eax;
1541 u[1] = _regs.edx;
1542 v = src.val;
1543 generate_exception_if(idiv_dbl(u, v), EXC_DE);
1544 dst.val = u[0];
1545 _regs.edx = u[1];
1546 break;
1548 break;
1550 default:
1551 goto cannot_emulate;
1553 break;
1555 case 0xfe: /* Grp4 */
1556 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
1557 case 0xff: /* Grp5 */
1558 switch ( modrm_reg & 7 )
1560 case 0: /* inc */
1561 emulate_1op("inc", dst, _regs.eflags);
1562 break;
1563 case 1: /* dec */
1564 emulate_1op("dec", dst, _regs.eflags);
1565 break;
1566 case 2: /* call (near) */
1567 case 4: /* jmp (near) */
1568 if ( ((op_bytes = dst.bytes) != 8) && mode_64bit() )
1570 dst.bytes = op_bytes = 8;
1571 if ( dst.type == OP_REG )
1572 dst.val = *dst.reg;
1573 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1574 &dst.val, 8, ctxt)) != 0 )
1575 goto done;
1577 src.val = _regs.eip;
1578 _regs.eip = dst.val;
1579 if ( (modrm_reg & 7) == 2 )
1580 goto push; /* call */
1581 break;
1582 case 6: /* push */
1583 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1584 if ( mode_64bit() && (dst.bytes == 4) )
1586 dst.bytes = 8;
1587 if ( dst.type == OP_REG )
1588 dst.val = *dst.reg;
1589 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1590 &dst.val, 8, ctxt)) != 0 )
1591 goto done;
1593 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
1594 dst.val, dst.bytes, ctxt)) != 0 )
1595 goto done;
1596 dst.type = OP_NONE;
1597 break;
1598 case 7:
1599 generate_exception_if(1, EXC_UD);
1600 default:
1601 goto cannot_emulate;
1603 break;
1606 writeback:
1607 switch ( dst.type )
1609 case OP_REG:
1610 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1611 switch ( dst.bytes )
1613 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1614 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1615 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1616 case 8: *dst.reg = dst.val; break;
1618 break;
1619 case OP_MEM:
1620 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1621 /* nothing to do */;
1622 else if ( lock_prefix )
1623 rc = ops->cmpxchg(
1624 dst.mem.seg, dst.mem.off, dst.orig_val,
1625 dst.val, dst.bytes, ctxt);
1626 else
1627 rc = ops->write(
1628 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1629 if ( rc != 0 )
1630 goto done;
1631 default:
1632 break;
1635 /* Commit shadow register state. */
1636 _regs.eflags &= ~EFLG_RF;
1637 *ctxt->regs = _regs;
1638 /* FIXME generate_exception_if(_regs.eflags & EFLG_TF, EXC_DB); */
1640 done:
1641 return rc;
1643 special_insn:
1644 dst.type = OP_NONE;
1646 /*
1647 * The only implicit-operands instructions allowed a LOCK prefix are
1648 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1649 */
1650 generate_exception_if(lock_prefix &&
1651 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1652 (b != 0xc7), /* CMPXCHG{8,16}B */
1653 EXC_GP);
1655 if ( twobyte )
1656 goto twobyte_special_insn;
1658 switch ( b )
1660 case 0x27: /* daa */ {
1661 uint8_t al = _regs.eax;
1662 unsigned long eflags = _regs.eflags;
1663 generate_exception_if(mode_64bit(), EXC_UD);
1664 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1665 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1667 *(uint8_t *)&_regs.eax += 6;
1668 _regs.eflags |= EFLG_AF;
1670 if ( (al > 0x99) || (eflags & EFLG_CF) )
1672 *(uint8_t *)&_regs.eax += 0x60;
1673 _regs.eflags |= EFLG_CF;
1675 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1676 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1677 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1678 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1679 break;
1682 case 0x2f: /* das */ {
1683 uint8_t al = _regs.eax;
1684 unsigned long eflags = _regs.eflags;
1685 generate_exception_if(mode_64bit(), EXC_UD);
1686 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1687 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1689 _regs.eflags |= EFLG_AF;
1690 if ( (al < 6) || (eflags & EFLG_CF) )
1691 _regs.eflags |= EFLG_CF;
1692 *(uint8_t *)&_regs.eax -= 6;
1694 if ( (al > 0x99) || (eflags & EFLG_CF) )
1696 *(uint8_t *)&_regs.eax -= 0x60;
1697 _regs.eflags |= EFLG_CF;
1699 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1700 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1701 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1702 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1703 break;
1706 case 0x37: /* aaa */
1707 case 0x3f: /* aas */
1708 generate_exception_if(mode_64bit(), EXC_UD);
1709 _regs.eflags &= ~EFLG_CF;
1710 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1712 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1713 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1714 _regs.eflags |= EFLG_CF | EFLG_AF;
1716 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1717 break;
1719 case 0x40 ... 0x4f: /* inc/dec reg */
1720 dst.type = OP_REG;
1721 dst.reg = decode_register(b & 7, &_regs, 0);
1722 dst.bytes = op_bytes;
1723 dst.val = *dst.reg;
1724 if ( b & 8 )
1725 emulate_1op("dec", dst, _regs.eflags);
1726 else
1727 emulate_1op("inc", dst, _regs.eflags);
1728 break;
1730 case 0x50 ... 0x57: /* push reg */
1731 src.val = *(unsigned long *)decode_register(
1732 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1733 goto push;
1735 case 0x58 ... 0x5f: /* pop reg */
1736 dst.type = OP_REG;
1737 dst.reg = decode_register(
1738 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1739 dst.bytes = op_bytes;
1740 if ( mode_64bit() && (dst.bytes == 4) )
1741 dst.bytes = 8;
1742 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1743 &dst.val, dst.bytes, ctxt)) != 0 )
1744 goto done;
1745 break;
1747 case 0x60: /* pusha */ {
1748 int i;
1749 unsigned long regs[] = {
1750 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1751 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1752 generate_exception_if(mode_64bit(), EXC_UD);
1753 for ( i = 0; i < 8; i++ )
1754 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1755 regs[i], op_bytes, ctxt)) != 0 )
1756 goto done;
1757 break;
1760 case 0x61: /* popa */ {
1761 int i;
1762 unsigned long dummy_esp, *regs[] = {
1763 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
1764 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
1765 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
1766 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
1767 generate_exception_if(mode_64bit(), EXC_UD);
1768 for ( i = 0; i < 8; i++ )
1769 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
1770 regs[i], op_bytes, ctxt)) != 0 )
1771 goto done;
1772 break;
1775 case 0x68: /* push imm{16,32,64} */
1776 src.val = ((op_bytes == 2)
1777 ? (int32_t)insn_fetch_type(int16_t)
1778 : insn_fetch_type(int32_t));
1779 goto push;
1781 case 0x6a: /* push imm8 */
1782 src.val = insn_fetch_type(int8_t);
1783 push:
1784 d |= Mov; /* force writeback */
1785 dst.type = OP_MEM;
1786 dst.bytes = op_bytes;
1787 if ( mode_64bit() && (dst.bytes == 4) )
1788 dst.bytes = 8;
1789 dst.val = src.val;
1790 dst.mem.seg = x86_seg_ss;
1791 dst.mem.off = sp_pre_dec(dst.bytes);
1792 break;
1794 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */
1795 handle_rep_prefix();
1796 generate_exception_if(!mode_iopl(), EXC_GP);
1797 dst.type = OP_MEM;
1798 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1799 dst.mem.seg = x86_seg_es;
1800 dst.mem.off = truncate_ea(_regs.edi);
1801 fail_if(ops->read_io == NULL);
1802 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
1803 &dst.val, ctxt)) != 0 )
1804 goto done;
1805 register_address_increment(
1806 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1807 break;
1809 case 0x6e ... 0x6f: /* outs %esi,%dx */
1810 handle_rep_prefix();
1811 generate_exception_if(!mode_iopl(), EXC_GP);
1812 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1813 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1814 &dst.val, dst.bytes, ctxt)) != 0 )
1815 goto done;
1816 fail_if(ops->write_io == NULL);
1817 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
1818 dst.val, ctxt)) != 0 )
1819 goto done;
1820 register_address_increment(
1821 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1822 break;
1824 case 0x70 ... 0x7f: /* jcc (short) */ {
1825 int rel = insn_fetch_type(int8_t);
1826 if ( test_cc(b, _regs.eflags) )
1827 jmp_rel(rel);
1828 break;
1831 case 0x90: /* nop / xchg %%r8,%%rax */
1832 if ( !(rex_prefix & 1) )
1833 break; /* nop */
1835 case 0x91 ... 0x97: /* xchg reg,%%rax */
1836 src.type = dst.type = OP_REG;
1837 src.bytes = dst.bytes = op_bytes;
1838 src.reg = (unsigned long *)&_regs.eax;
1839 src.val = *src.reg;
1840 dst.reg = decode_register(
1841 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1842 dst.val = *dst.reg;
1843 goto xchg;
1845 case 0x98: /* cbw/cwde/cdqe */
1846 switch ( op_bytes )
1848 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
1849 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
1850 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
1852 break;
1854 case 0x99: /* cwd/cdq/cqo */
1855 switch ( op_bytes )
1857 case 2:
1858 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
1859 break;
1860 case 4:
1861 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
1862 break;
1863 case 8:
1864 _regs.edx = (_regs.eax < 0) ? -1 : 0;
1865 break;
1867 break;
1869 case 0x9e: /* sahf */
1870 *(uint8_t *)_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
1871 break;
1873 case 0x9f: /* lahf */
1874 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
1875 break;
1877 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
1878 /* Source EA is not encoded via ModRM. */
1879 dst.type = OP_REG;
1880 dst.reg = (unsigned long *)&_regs.eax;
1881 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1882 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
1883 &dst.val, dst.bytes, ctxt)) != 0 )
1884 goto done;
1885 break;
1887 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
1888 /* Destination EA is not encoded via ModRM. */
1889 dst.type = OP_MEM;
1890 dst.mem.seg = ea.mem.seg;
1891 dst.mem.off = insn_fetch_bytes(ad_bytes);
1892 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1893 dst.val = (unsigned long)_regs.eax;
1894 break;
1896 case 0xa4 ... 0xa5: /* movs */
1897 handle_rep_prefix();
1898 dst.type = OP_MEM;
1899 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1900 dst.mem.seg = x86_seg_es;
1901 dst.mem.off = truncate_ea(_regs.edi);
1902 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1903 &dst.val, dst.bytes, ctxt)) != 0 )
1904 goto done;
1905 register_address_increment(
1906 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1907 register_address_increment(
1908 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1909 break;
1911 case 0xaa ... 0xab: /* stos */
1912 handle_rep_prefix();
1913 dst.type = OP_MEM;
1914 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1915 dst.mem.seg = x86_seg_es;
1916 dst.mem.off = truncate_ea(_regs.edi);
1917 dst.val = _regs.eax;
1918 register_address_increment(
1919 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1920 break;
1922 case 0xac ... 0xad: /* lods */
1923 handle_rep_prefix();
1924 dst.type = OP_REG;
1925 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1926 dst.reg = (unsigned long *)&_regs.eax;
1927 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1928 &dst.val, dst.bytes, ctxt)) != 0 )
1929 goto done;
1930 register_address_increment(
1931 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1932 break;
1934 case 0xc2: /* ret imm16 (near) */
1935 case 0xc3: /* ret (near) */ {
1936 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
1937 op_bytes = mode_64bit() ? 8 : op_bytes;
1938 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
1939 &dst.val, op_bytes, ctxt)) != 0 )
1940 goto done;
1941 _regs.eip = dst.val;
1942 break;
1945 case 0xd4: /* aam */ {
1946 unsigned int base = insn_fetch_type(uint8_t);
1947 uint8_t al = _regs.eax;
1948 generate_exception_if(mode_64bit(), EXC_UD);
1949 generate_exception_if(base == 0, EXC_DE);
1950 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
1951 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1952 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1953 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1954 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1955 break;
1958 case 0xd5: /* aad */ {
1959 unsigned int base = insn_fetch_type(uint8_t);
1960 uint16_t ax = _regs.eax;
1961 generate_exception_if(mode_64bit(), EXC_UD);
1962 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
1963 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1964 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1965 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1966 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1967 break;
1970 case 0xd6: /* salc */
1971 generate_exception_if(mode_64bit(), EXC_UD);
1972 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
1973 break;
1975 case 0xd7: /* xlat */ {
1976 unsigned long al = (uint8_t)_regs.eax;
1977 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
1978 &al, 1, ctxt)) != 0 )
1979 goto done;
1980 *(uint8_t *)&_regs.eax = al;
1981 break;
1984 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
1985 int rel = insn_fetch_type(int8_t);
1986 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
1987 if ( b == 0xe1 )
1988 do_jmp = !do_jmp; /* loopz */
1989 else if ( b == 0xe2 )
1990 do_jmp = 1; /* loop */
1991 switch ( ad_bytes )
1993 case 2:
1994 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
1995 break;
1996 case 4:
1997 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
1998 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
1999 break;
2000 default: /* case 8: */
2001 do_jmp &= --_regs.ecx != 0;
2002 break;
2004 if ( do_jmp )
2005 jmp_rel(rel);
2006 break;
2009 case 0xe3: /* jcxz/jecxz (short) */ {
2010 int rel = insn_fetch_type(int8_t);
2011 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2012 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2013 jmp_rel(rel);
2014 break;
2017 case 0xe4: /* in imm8,%al */
2018 case 0xe5: /* in imm8,%eax */
2019 case 0xe6: /* out %al,imm8 */
2020 case 0xe7: /* out %eax,imm8 */
2021 case 0xec: /* in %dx,%al */
2022 case 0xed: /* in %dx,%eax */
2023 case 0xee: /* out %al,%dx */
2024 case 0xef: /* out %eax,%dx */ {
2025 unsigned int port = ((b < 0xe8)
2026 ? insn_fetch_type(uint8_t)
2027 : (uint16_t)_regs.edx);
2028 generate_exception_if(!mode_iopl(), EXC_GP);
2029 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2030 if ( b & 2 )
2032 /* out */
2033 fail_if(ops->write_io == NULL);
2034 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2037 else
2039 /* in */
2040 dst.type = OP_REG;
2041 dst.bytes = op_bytes;
2042 dst.reg = (unsigned long *)&_regs.eax;
2043 fail_if(ops->read_io == NULL);
2044 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2046 if ( rc != 0 )
2047 goto done;
2048 break;
2051 case 0xe8: /* call (near) */ {
2052 int rel = (((op_bytes == 2) && !mode_64bit())
2053 ? (int32_t)insn_fetch_type(int16_t)
2054 : insn_fetch_type(int32_t));
2055 op_bytes = mode_64bit() ? 8 : op_bytes;
2056 src.val = _regs.eip;
2057 jmp_rel(rel);
2058 goto push;
2061 case 0xe9: /* jmp (near) */ {
2062 int rel = (((op_bytes == 2) && !mode_64bit())
2063 ? (int32_t)insn_fetch_type(int16_t)
2064 : insn_fetch_type(int32_t));
2065 jmp_rel(rel);
2066 break;
2069 case 0xeb: /* jmp (short) */
2070 jmp_rel(insn_fetch_type(int8_t));
2071 break;
2073 case 0xf5: /* cmc */
2074 _regs.eflags ^= EFLG_CF;
2075 break;
2077 case 0xf8: /* clc */
2078 _regs.eflags &= ~EFLG_CF;
2079 break;
2081 case 0xf9: /* stc */
2082 _regs.eflags |= EFLG_CF;
2083 break;
2085 case 0xfa: /* cli */
2086 generate_exception_if(!mode_iopl(), EXC_GP);
2087 fail_if(ops->write_rflags == NULL);
2088 if ( (rc = ops->write_rflags(_regs.eflags & ~EFLG_IF, ctxt)) != 0 )
2089 goto done;
2090 break;
2092 case 0xfb: /* sti */
2093 generate_exception_if(!mode_iopl(), EXC_GP);
2094 fail_if(ops->write_rflags == NULL);
2095 if ( (rc = ops->write_rflags(_regs.eflags | EFLG_IF, ctxt)) != 0 )
2096 goto done;
2097 break;
2099 case 0xfc: /* cld */
2100 _regs.eflags &= ~EFLG_DF;
2101 break;
2103 case 0xfd: /* std */
2104 _regs.eflags |= EFLG_DF;
2105 break;
2107 goto writeback;
2109 twobyte_insn:
2110 switch ( b )
2112 case 0x40 ... 0x4f: /* cmovcc */
2113 dst.val = src.val;
2114 if ( !test_cc(b, _regs.eflags) )
2115 dst.type = OP_NONE;
2116 break;
2118 case 0x90 ... 0x9f: /* setcc */
2119 dst.val = test_cc(b, _regs.eflags);
2120 break;
2122 case 0xb0 ... 0xb1: /* cmpxchg */
2123 /* Save real source value, then compare EAX against destination. */
2124 src.orig_val = src.val;
2125 src.val = _regs.eax;
2126 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2127 /* Always write back. The question is: where to? */
2128 d |= Mov;
2129 if ( _regs.eflags & EFLG_ZF )
2131 /* Success: write back to memory. */
2132 dst.val = src.orig_val;
2134 else
2136 /* Failure: write the value we saw to EAX. */
2137 dst.type = OP_REG;
2138 dst.reg = (unsigned long *)&_regs.eax;
2140 break;
2142 case 0xa3: bt: /* bt */
2143 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2144 break;
2146 case 0xb3: btr: /* btr */
2147 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
2148 break;
2150 case 0xab: bts: /* bts */
2151 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
2152 break;
2154 case 0xaf: /* imul */
2155 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
2156 switch ( dst.bytes )
2158 case 2:
2159 dst.val = ((uint32_t)(int16_t)src.val *
2160 (uint32_t)(int16_t)dst.val);
2161 if ( (int16_t)dst.val != (uint32_t)dst.val )
2162 _regs.eflags |= EFLG_OF|EFLG_CF;
2163 break;
2164 #ifdef __x86_64__
2165 case 4:
2166 dst.val = ((uint64_t)(int32_t)src.val *
2167 (uint64_t)(int32_t)dst.val);
2168 if ( (int32_t)dst.val != dst.val )
2169 _regs.eflags |= EFLG_OF|EFLG_CF;
2170 break;
2171 #endif
2172 default: {
2173 unsigned long m[2] = { src.val, dst.val };
2174 if ( imul_dbl(m) )
2175 _regs.eflags |= EFLG_OF|EFLG_CF;
2176 dst.val = m[0];
2177 break;
2180 break;
2182 case 0xb6: /* movzx rm8,r{16,32,64} */
2183 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2184 dst.reg = decode_register(modrm_reg, &_regs, 0);
2185 dst.bytes = op_bytes;
2186 dst.val = (uint8_t)src.val;
2187 break;
2189 case 0xbc: /* bsf */ {
2190 int zf;
2191 asm ( "bsf %2,%0; setz %b1"
2192 : "=r" (dst.val), "=q" (zf)
2193 : "r" (src.val), "1" (0) );
2194 _regs.eflags &= ~EFLG_ZF;
2195 _regs.eflags |= zf ? EFLG_ZF : 0;
2196 break;
2199 case 0xbd: /* bsr */ {
2200 int zf;
2201 asm ( "bsr %2,%0; setz %b1"
2202 : "=r" (dst.val), "=q" (zf)
2203 : "r" (src.val), "1" (0) );
2204 _regs.eflags &= ~EFLG_ZF;
2205 _regs.eflags |= zf ? EFLG_ZF : 0;
2206 break;
2209 case 0xb7: /* movzx rm16,r{16,32,64} */
2210 dst.val = (uint16_t)src.val;
2211 break;
2213 case 0xbb: btc: /* btc */
2214 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
2215 break;
2217 case 0xba: /* Grp8 */
2218 switch ( modrm_reg & 7 )
2220 case 4: goto bt;
2221 case 5: goto bts;
2222 case 6: goto btr;
2223 case 7: goto btc;
2224 default: generate_exception_if(1, EXC_UD);
2226 break;
2228 case 0xbe: /* movsx rm8,r{16,32,64} */
2229 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2230 dst.reg = decode_register(modrm_reg, &_regs, 0);
2231 dst.bytes = op_bytes;
2232 dst.val = (int8_t)src.val;
2233 break;
2235 case 0xbf: /* movsx rm16,r{16,32,64} */
2236 dst.val = (int16_t)src.val;
2237 break;
2239 case 0xc0 ... 0xc1: /* xadd */
2240 /* Write back the register source. */
2241 switch ( dst.bytes )
2243 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2244 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2245 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2246 case 8: *src.reg = dst.val; break;
2248 goto add;
2250 goto writeback;
2252 twobyte_special_insn:
2253 switch ( b )
2255 case 0x06: /* clts */
2256 generate_exception_if(!mode_ring0(), EXC_GP);
2257 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
2258 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
2259 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
2260 goto done;
2261 break;
2263 case 0x08: /* invd */
2264 case 0x09: /* wbinvd */
2265 generate_exception_if(!mode_ring0(), EXC_GP);
2266 fail_if(ops->wbinvd == NULL);
2267 if ( (rc = ops->wbinvd(ctxt)) != 0 )
2268 goto done;
2269 break;
2271 case 0x0d: /* GrpP (prefetch) */
2272 case 0x18: /* Grp16 (prefetch/nop) */
2273 case 0x19 ... 0x1f: /* nop (amd-defined) */
2274 break;
2276 case 0x20: /* mov cr,reg */
2277 case 0x21: /* mov dr,reg */
2278 case 0x22: /* mov reg,cr */
2279 case 0x23: /* mov reg,dr */
2280 generate_exception_if(!mode_ring0(), EXC_GP);
2281 modrm_rm |= (rex_prefix & 1) << 3;
2282 modrm_reg |= lock_prefix << 3;
2283 if ( b & 2 )
2285 /* Write to CR/DR. */
2286 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
2287 if ( !mode_64bit() )
2288 src.val = (uint32_t)src.val;
2289 rc = ((b & 1)
2290 ? (ops->write_dr
2291 ? ops->write_dr(modrm_reg, src.val, ctxt)
2292 : X86EMUL_UNHANDLEABLE)
2293 : (ops->write_cr
2294 ? ops->write_dr(modrm_reg, src.val, ctxt)
2295 : X86EMUL_UNHANDLEABLE));
2297 else
2299 /* Read from CR/DR. */
2300 dst.type = OP_REG;
2301 dst.bytes = mode_64bit() ? 8 : 4;
2302 dst.reg = decode_register(modrm_rm, &_regs, 0);
2303 rc = ((b & 1)
2304 ? (ops->read_dr
2305 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2306 : X86EMUL_UNHANDLEABLE)
2307 : (ops->read_cr
2308 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2309 : X86EMUL_UNHANDLEABLE));
2311 if ( rc != 0 )
2312 goto done;
2313 break;
2315 case 0x30: /* wrmsr */ {
2316 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
2317 generate_exception_if(!mode_ring0(), EXC_GP);
2318 fail_if(ops->write_msr == NULL);
2319 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
2320 goto done;
2321 break;
2324 case 0x32: /* rdmsr */ {
2325 uint64_t val;
2326 generate_exception_if(!mode_ring0(), EXC_GP);
2327 fail_if(ops->read_msr == NULL);
2328 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
2329 goto done;
2330 _regs.edx = (uint32_t)(val >> 32);
2331 _regs.eax = (uint32_t)(val >> 0);
2332 break;
2335 case 0x80 ... 0x8f: /* jcc (near) */ {
2336 int rel = (((op_bytes == 2) && !mode_64bit())
2337 ? (int32_t)insn_fetch_type(int16_t)
2338 : insn_fetch_type(int32_t));
2339 if ( test_cc(b, _regs.eflags) )
2340 jmp_rel(rel);
2341 break;
2344 case 0xc7: /* Grp9 (cmpxchg8b) */
2345 #if defined(__i386__)
2347 unsigned long old_lo, old_hi;
2348 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2349 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
2350 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
2351 goto done;
2352 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
2354 _regs.eax = old_lo;
2355 _regs.edx = old_hi;
2356 _regs.eflags &= ~EFLG_ZF;
2358 else if ( ops->cmpxchg8b == NULL )
2360 rc = X86EMUL_UNHANDLEABLE;
2361 goto done;
2363 else
2365 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
2366 _regs.ebx, _regs.ecx, ctxt)) != 0 )
2367 goto done;
2368 _regs.eflags |= EFLG_ZF;
2370 break;
2372 #elif defined(__x86_64__)
2374 unsigned long old, new;
2375 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2376 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
2377 goto done;
2378 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
2379 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
2381 _regs.eax = (uint32_t)(old>>0);
2382 _regs.edx = (uint32_t)(old>>32);
2383 _regs.eflags &= ~EFLG_ZF;
2385 else
2387 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
2388 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
2389 new, 8, ctxt)) != 0 )
2390 goto done;
2391 _regs.eflags |= EFLG_ZF;
2393 break;
2395 #endif
2397 case 0xc8 ... 0xcf: /* bswap */
2398 dst.type = OP_REG;
2399 dst.reg = decode_register(
2400 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2401 switch ( dst.bytes = op_bytes )
2403 default: /* case 2: */
2404 /* Undefined behaviour. Writes zero on all tested CPUs. */
2405 dst.val = 0;
2406 break;
2407 case 4:
2408 #ifdef __x86_64__
2409 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
2410 break;
2411 case 8:
2412 #endif
2413 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
2414 break;
2416 break;
2418 goto writeback;
2420 cannot_emulate:
2421 #if 0
2422 gdprintk(XENLOG_DEBUG, "Instr:");
2423 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
2425 unsigned long x;
2426 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
2427 printk(" %02x", (uint8_t)x);
2429 printk("\n");
2430 #endif
2431 return X86EMUL_UNHANDLEABLE;