ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 16704:56b42d68518e

x86_emulate: Fix near CALL/JMP <reg,mem>. Broken by c/s 16491.
Thanks to AMD for narrowing this one down.
Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Thu Jan 10 22:53:43 2008 +0000 (2008-01-10)
parents c86025f569cb
children a878752a83f9
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <public/xen.h>
28 #else
29 #include <xen/config.h>
30 #include <xen/types.h>
31 #include <xen/lib.h>
32 #include <asm/regs.h>
33 #undef cmpxchg
34 #endif
35 #include <asm-x86/x86_emulate.h>
37 /* Operand sizes: 8-bit operands or specified/overridden size. */
38 #define ByteOp (1<<0) /* 8-bit operands. */
39 /* Destination operand type. */
40 #define DstBitBase (0<<1) /* Memory operand, bit string. */
41 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
42 #define DstReg (2<<1) /* Register operand. */
43 #define DstMem (3<<1) /* Memory operand. */
44 #define DstMask (3<<1)
45 /* Source operand type. */
46 #define SrcNone (0<<3) /* No source operand. */
47 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
48 #define SrcReg (1<<3) /* Register operand. */
49 #define SrcMem (2<<3) /* Memory operand. */
50 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
51 #define SrcImm (4<<3) /* Immediate operand. */
52 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
53 #define SrcMask (7<<3)
54 /* Generic ModRM decode. */
55 #define ModRM (1<<6)
56 /* Destination is only written; never read. */
57 #define Mov (1<<7)
59 static uint8_t opcode_table[256] = {
60 /* 0x00 - 0x07 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
64 /* 0x08 - 0x0F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
68 /* 0x10 - 0x17 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
72 /* 0x18 - 0x1F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
76 /* 0x20 - 0x27 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
80 /* 0x28 - 0x2F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
84 /* 0x30 - 0x37 */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
88 /* 0x38 - 0x3F */
89 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
90 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
91 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
92 /* 0x40 - 0x4F */
93 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 /* 0x50 - 0x5F */
98 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 /* 0x60 - 0x67 */
103 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
104 0, 0, 0, 0,
105 /* 0x68 - 0x6F */
106 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
107 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
108 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
109 /* 0x70 - 0x77 */
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 /* 0x78 - 0x7F */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x80 - 0x87 */
116 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
118 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 /* 0x88 - 0x8F */
121 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
122 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
123 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
124 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
125 /* 0x90 - 0x97 */
126 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
128 /* 0x98 - 0x9F */
129 ImplicitOps, ImplicitOps, ImplicitOps, 0,
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 /* 0xA0 - 0xA7 */
132 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
133 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
134 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
135 ByteOp|ImplicitOps, ImplicitOps,
136 /* 0xA8 - 0xAF */
137 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
138 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
139 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
140 ByteOp|ImplicitOps, ImplicitOps,
141 /* 0xB0 - 0xB7 */
142 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
143 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
144 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
145 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
146 /* 0xB8 - 0xBF */
147 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
148 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
149 /* 0xC0 - 0xC7 */
150 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
151 ImplicitOps, ImplicitOps,
152 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
153 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
154 /* 0xC8 - 0xCF */
155 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
156 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
157 /* 0xD0 - 0xD7 */
158 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
159 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
160 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
161 /* 0xD8 - 0xDF */
162 0, 0, 0, 0, 0, 0, 0, 0,
163 /* 0xE0 - 0xE7 */
164 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
165 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
166 /* 0xE8 - 0xEF */
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
169 /* 0xF0 - 0xF7 */
170 0, ImplicitOps, 0, 0,
171 ImplicitOps, ImplicitOps,
172 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
173 /* 0xF8 - 0xFF */
174 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
175 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
176 };
178 static uint8_t twobyte_table[256] = {
179 /* 0x00 - 0x07 */
180 0, ImplicitOps|ModRM, 0, 0, 0, ImplicitOps, 0, 0,
181 /* 0x08 - 0x0F */
182 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
183 /* 0x10 - 0x17 */
184 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0x18 - 0x1F */
186 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
187 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
188 /* 0x20 - 0x27 */
189 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
190 0, 0, 0, 0,
191 /* 0x28 - 0x2F */
192 0, 0, 0, 0, 0, 0, 0, 0,
193 /* 0x30 - 0x37 */
194 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
195 /* 0x38 - 0x3F */
196 0, 0, 0, 0, 0, 0, 0, 0,
197 /* 0x40 - 0x47 */
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
202 /* 0x48 - 0x4F */
203 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
204 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
205 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
206 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
207 /* 0x50 - 0x5F */
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 /* 0x60 - 0x6F */
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 /* 0x70 - 0x7F */
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 /* 0x80 - 0x87 */
214 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
215 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
216 /* 0x88 - 0x8F */
217 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
218 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
219 /* 0x90 - 0x97 */
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
224 /* 0x98 - 0x9F */
225 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
226 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
227 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
228 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
229 /* 0xA0 - 0xA7 */
230 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
231 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
232 /* 0xA8 - 0xAF */
233 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
234 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
235 /* 0xB0 - 0xB7 */
236 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
237 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
238 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
239 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
240 /* 0xB8 - 0xBF */
241 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
242 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
243 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
244 /* 0xC0 - 0xC7 */
245 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
246 0, 0, 0, ImplicitOps|ModRM,
247 /* 0xC8 - 0xCF */
248 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
249 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
250 /* 0xD0 - 0xDF */
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
252 /* 0xE0 - 0xEF */
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
254 /* 0xF0 - 0xFF */
255 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
256 };
258 /* Type, address-of, and value of an instruction's operand. */
259 struct operand {
260 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
261 unsigned int bytes;
262 unsigned long val, orig_val;
263 union {
264 /* OP_REG: Pointer to register field. */
265 unsigned long *reg;
266 /* OP_MEM: Segment and offset. */
267 struct {
268 enum x86_segment seg;
269 unsigned long off;
270 } mem;
271 };
272 };
274 /* MSRs. */
275 #define MSR_TSC 0x10
277 /* Control register flags. */
278 #define CR0_PE (1<<0)
279 #define CR4_TSD (1<<2)
281 /* EFLAGS bit definitions. */
282 #define EFLG_VIP (1<<20)
283 #define EFLG_VIF (1<<19)
284 #define EFLG_AC (1<<18)
285 #define EFLG_VM (1<<17)
286 #define EFLG_RF (1<<16)
287 #define EFLG_NT (1<<14)
288 #define EFLG_IOPL (3<<12)
289 #define EFLG_OF (1<<11)
290 #define EFLG_DF (1<<10)
291 #define EFLG_IF (1<<9)
292 #define EFLG_TF (1<<8)
293 #define EFLG_SF (1<<7)
294 #define EFLG_ZF (1<<6)
295 #define EFLG_AF (1<<4)
296 #define EFLG_PF (1<<2)
297 #define EFLG_CF (1<<0)
299 /* Exception definitions. */
300 #define EXC_DE 0
301 #define EXC_DB 1
302 #define EXC_BP 3
303 #define EXC_OF 4
304 #define EXC_BR 5
305 #define EXC_UD 6
306 #define EXC_GP 13
308 /*
309 * Instruction emulation:
310 * Most instructions are emulated directly via a fragment of inline assembly
311 * code. This allows us to save/restore EFLAGS and thus very easily pick up
312 * any modified flags.
313 */
315 #if defined(__x86_64__)
316 #define _LO32 "k" /* force 32-bit operand */
317 #define _STK "%%rsp" /* stack pointer */
318 #define _BYTES_PER_LONG "8"
319 #elif defined(__i386__)
320 #define _LO32 "" /* force 32-bit operand */
321 #define _STK "%%esp" /* stack pointer */
322 #define _BYTES_PER_LONG "4"
323 #endif
325 /*
326 * These EFLAGS bits are restored from saved value during emulation, and
327 * any changes are written back to the saved value after emulation.
328 */
329 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
331 /* Before executing instruction: restore necessary bits in EFLAGS. */
332 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
333 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
334 "movl %"_sav",%"_LO32 _tmp"; " \
335 "push %"_tmp"; " \
336 "push %"_tmp"; " \
337 "movl %"_msk",%"_LO32 _tmp"; " \
338 "andl %"_LO32 _tmp",("_STK"); " \
339 "pushf; " \
340 "notl %"_LO32 _tmp"; " \
341 "andl %"_LO32 _tmp",("_STK"); " \
342 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
343 "pop %"_tmp"; " \
344 "orl %"_LO32 _tmp",("_STK"); " \
345 "popf; " \
346 "pop %"_sav"; "
348 /* After executing instruction: write-back necessary bits in EFLAGS. */
349 #define _POST_EFLAGS(_sav, _msk, _tmp) \
350 /* _sav |= EFLAGS & _msk; */ \
351 "pushf; " \
352 "pop %"_tmp"; " \
353 "andl %"_msk",%"_LO32 _tmp"; " \
354 "orl %"_LO32 _tmp",%"_sav"; "
356 /* Raw emulation: instruction has two explicit operands. */
357 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
358 do{ unsigned long _tmp; \
359 switch ( (_dst).bytes ) \
360 { \
361 case 2: \
362 asm volatile ( \
363 _PRE_EFLAGS("0","4","2") \
364 _op"w %"_wx"3,%1; " \
365 _POST_EFLAGS("0","4","2") \
366 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
367 : _wy ((_src).val), "i" (EFLAGS_MASK), \
368 "m" (_eflags), "m" ((_dst).val) ); \
369 break; \
370 case 4: \
371 asm volatile ( \
372 _PRE_EFLAGS("0","4","2") \
373 _op"l %"_lx"3,%1; " \
374 _POST_EFLAGS("0","4","2") \
375 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
376 : _ly ((_src).val), "i" (EFLAGS_MASK), \
377 "m" (_eflags), "m" ((_dst).val) ); \
378 break; \
379 case 8: \
380 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
381 break; \
382 } \
383 } while (0)
384 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
385 do{ unsigned long _tmp; \
386 switch ( (_dst).bytes ) \
387 { \
388 case 1: \
389 asm volatile ( \
390 _PRE_EFLAGS("0","4","2") \
391 _op"b %"_bx"3,%1; " \
392 _POST_EFLAGS("0","4","2") \
393 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
394 : _by ((_src).val), "i" (EFLAGS_MASK), \
395 "m" (_eflags), "m" ((_dst).val) ); \
396 break; \
397 default: \
398 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
399 break; \
400 } \
401 } while (0)
402 /* Source operand is byte-sized and may be restricted to just %cl. */
403 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
404 __emulate_2op(_op, _src, _dst, _eflags, \
405 "b", "c", "b", "c", "b", "c", "b", "c")
406 /* Source operand is byte, word, long or quad sized. */
407 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
408 __emulate_2op(_op, _src, _dst, _eflags, \
409 "b", "q", "w", "r", _LO32, "r", "", "r")
410 /* Source operand is word, long or quad sized. */
411 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
412 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
413 "w", "r", _LO32, "r", "", "r")
415 /* Instruction has only one explicit operand (no source operand). */
416 #define emulate_1op(_op,_dst,_eflags) \
417 do{ unsigned long _tmp; \
418 switch ( (_dst).bytes ) \
419 { \
420 case 1: \
421 asm volatile ( \
422 _PRE_EFLAGS("0","3","2") \
423 _op"b %1; " \
424 _POST_EFLAGS("0","3","2") \
425 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
426 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
427 break; \
428 case 2: \
429 asm volatile ( \
430 _PRE_EFLAGS("0","3","2") \
431 _op"w %1; " \
432 _POST_EFLAGS("0","3","2") \
433 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
434 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
435 break; \
436 case 4: \
437 asm volatile ( \
438 _PRE_EFLAGS("0","3","2") \
439 _op"l %1; " \
440 _POST_EFLAGS("0","3","2") \
441 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
442 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
443 break; \
444 case 8: \
445 __emulate_1op_8byte(_op, _dst, _eflags); \
446 break; \
447 } \
448 } while (0)
450 /* Emulate an instruction with quadword operands (x86/64 only). */
451 #if defined(__x86_64__)
452 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
453 do{ asm volatile ( \
454 _PRE_EFLAGS("0","4","2") \
455 _op"q %"_qx"3,%1; " \
456 _POST_EFLAGS("0","4","2") \
457 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
458 : _qy ((_src).val), "i" (EFLAGS_MASK), \
459 "m" (_eflags), "m" ((_dst).val) ); \
460 } while (0)
461 #define __emulate_1op_8byte(_op, _dst, _eflags) \
462 do{ asm volatile ( \
463 _PRE_EFLAGS("0","3","2") \
464 _op"q %1; " \
465 _POST_EFLAGS("0","3","2") \
466 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
467 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
468 } while (0)
469 #elif defined(__i386__)
470 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
471 #define __emulate_1op_8byte(_op, _dst, _eflags)
472 #endif /* __i386__ */
474 /* Fetch next part of the instruction being emulated. */
475 #define insn_fetch_bytes(_size) \
476 ({ unsigned long _x, _eip = _regs.eip; \
477 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
478 _regs.eip += (_size); /* real hardware doesn't truncate */ \
479 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
480 EXC_GP); \
481 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
482 if ( rc ) goto done; \
483 _x; \
484 })
485 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
487 #define truncate_word(ea, byte_width) \
488 ({ unsigned long __ea = (ea); \
489 unsigned int _width = (byte_width); \
490 ((_width == sizeof(unsigned long)) ? __ea : \
491 (__ea & ((1UL << (_width << 3)) - 1))); \
492 })
493 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
495 #define mode_64bit() (def_ad_bytes == 8)
497 #define fail_if(p) \
498 do { \
499 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
500 if ( rc ) goto done; \
501 } while (0)
503 #define generate_exception_if(p, e) \
504 ({ if ( (p) ) { \
505 fail_if(ops->inject_hw_exception == NULL); \
506 rc = ops->inject_hw_exception(e, ctxt) ? : X86EMUL_EXCEPTION; \
507 goto done; \
508 } \
509 })
511 /*
512 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
513 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
514 */
515 static int even_parity(uint8_t v)
516 {
517 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
518 return v;
519 }
521 /* Update address held in a register, based on addressing mode. */
522 #define _register_address_increment(reg, inc, byte_width) \
523 do { \
524 int _inc = (inc); /* signed type ensures sign extension to long */ \
525 unsigned int _width = (byte_width); \
526 if ( _width == sizeof(unsigned long) ) \
527 (reg) += _inc; \
528 else if ( mode_64bit() ) \
529 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
530 else \
531 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
532 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
533 } while (0)
534 #define register_address_increment(reg, inc) \
535 _register_address_increment((reg), (inc), ad_bytes)
537 #define sp_pre_dec(dec) ({ \
538 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
539 truncate_word(_regs.esp, ctxt->sp_size/8); \
540 })
541 #define sp_post_inc(inc) ({ \
542 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
543 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
544 __esp; \
545 })
547 #define jmp_rel(rel) \
548 do { \
549 _regs.eip += (int)(rel); \
550 if ( !mode_64bit() ) \
551 _regs.eip = ((op_bytes == 2) \
552 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
553 } while (0)
555 static int __handle_rep_prefix(
556 struct cpu_user_regs *int_regs,
557 struct cpu_user_regs *ext_regs,
558 int ad_bytes)
559 {
560 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
561 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
562 int_regs->ecx);
564 if ( ecx-- == 0 )
565 {
566 ext_regs->eip = int_regs->eip;
567 return 1;
568 }
570 if ( ad_bytes == 2 )
571 *(uint16_t *)&int_regs->ecx = ecx;
572 else if ( ad_bytes == 4 )
573 int_regs->ecx = (uint32_t)ecx;
574 else
575 int_regs->ecx = ecx;
576 int_regs->eip = ext_regs->eip;
577 return 0;
578 }
580 #define handle_rep_prefix() \
581 do { \
582 if ( rep_prefix && __handle_rep_prefix(&_regs, ctxt->regs, ad_bytes) ) \
583 goto done; \
584 } while (0)
586 /*
587 * Unsigned multiplication with double-word result.
588 * IN: Multiplicand=m[0], Multiplier=m[1]
589 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
590 */
591 static int mul_dbl(unsigned long m[2])
592 {
593 int rc;
594 asm ( "mul %4; seto %b2"
595 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
596 : "0" (m[0]), "1" (m[1]), "2" (0) );
597 return rc;
598 }
600 /*
601 * Signed multiplication with double-word result.
602 * IN: Multiplicand=m[0], Multiplier=m[1]
603 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
604 */
605 static int imul_dbl(unsigned long m[2])
606 {
607 int rc;
608 asm ( "imul %4; seto %b2"
609 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
610 : "0" (m[0]), "1" (m[1]), "2" (0) );
611 return rc;
612 }
614 /*
615 * Unsigned division of double-word dividend.
616 * IN: Dividend=u[1]:u[0], Divisor=v
617 * OUT: Return 1: #DE
618 * Return 0: Quotient=u[0], Remainder=u[1]
619 */
620 static int div_dbl(unsigned long u[2], unsigned long v)
621 {
622 if ( (v == 0) || (u[1] >= v) )
623 return 1;
624 asm ( "div %4"
625 : "=a" (u[0]), "=d" (u[1])
626 : "0" (u[0]), "1" (u[1]), "r" (v) );
627 return 0;
628 }
630 /*
631 * Signed division of double-word dividend.
632 * IN: Dividend=u[1]:u[0], Divisor=v
633 * OUT: Return 1: #DE
634 * Return 0: Quotient=u[0], Remainder=u[1]
635 * NB. We don't use idiv directly as it's moderately hard to work out
636 * ahead of time whether it will #DE, which we cannot allow to happen.
637 */
638 static int idiv_dbl(unsigned long u[2], unsigned long v)
639 {
640 int negu = (long)u[1] < 0, negv = (long)v < 0;
642 /* u = abs(u) */
643 if ( negu )
644 {
645 u[1] = ~u[1];
646 if ( (u[0] = -u[0]) == 0 )
647 u[1]++;
648 }
650 /* abs(u) / abs(v) */
651 if ( div_dbl(u, negv ? -v : v) )
652 return 1;
654 /* Remainder has same sign as dividend. It cannot overflow. */
655 if ( negu )
656 u[1] = -u[1];
658 /* Quotient is overflowed if sign bit is set. */
659 if ( negu ^ negv )
660 {
661 if ( (long)u[0] >= 0 )
662 u[0] = -u[0];
663 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
664 return 1;
665 }
666 else if ( (long)u[0] < 0 )
667 return 1;
669 return 0;
670 }
672 static int
673 test_cc(
674 unsigned int condition, unsigned int flags)
675 {
676 int rc = 0;
678 switch ( (condition & 15) >> 1 )
679 {
680 case 0: /* o */
681 rc |= (flags & EFLG_OF);
682 break;
683 case 1: /* b/c/nae */
684 rc |= (flags & EFLG_CF);
685 break;
686 case 2: /* z/e */
687 rc |= (flags & EFLG_ZF);
688 break;
689 case 3: /* be/na */
690 rc |= (flags & (EFLG_CF|EFLG_ZF));
691 break;
692 case 4: /* s */
693 rc |= (flags & EFLG_SF);
694 break;
695 case 5: /* p/pe */
696 rc |= (flags & EFLG_PF);
697 break;
698 case 7: /* le/ng */
699 rc |= (flags & EFLG_ZF);
700 /* fall through */
701 case 6: /* l/nge */
702 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
703 break;
704 }
706 /* Odd condition identifiers (lsb == 1) have inverted sense. */
707 return (!!rc ^ (condition & 1));
708 }
710 static int
711 get_cpl(
712 struct x86_emulate_ctxt *ctxt,
713 struct x86_emulate_ops *ops)
714 {
715 struct segment_register reg;
717 if ( ctxt->regs->eflags & EFLG_VM )
718 return 3;
720 if ( (ops->read_segment == NULL) ||
721 ops->read_segment(x86_seg_ss, &reg, ctxt) )
722 return -1;
724 return reg.attr.fields.dpl;
725 }
727 static int
728 _mode_iopl(
729 struct x86_emulate_ctxt *ctxt,
730 struct x86_emulate_ops *ops)
731 {
732 int cpl = get_cpl(ctxt, ops);
733 return ((cpl >= 0) && (cpl <= ((ctxt->regs->eflags >> 12) & 3)));
734 }
736 #define mode_ring0() (get_cpl(ctxt, ops) == 0)
737 #define mode_iopl() _mode_iopl(ctxt, ops)
739 static int
740 in_realmode(
741 struct x86_emulate_ctxt *ctxt,
742 struct x86_emulate_ops *ops)
743 {
744 unsigned long cr0;
745 int rc;
747 if ( ops->read_cr == NULL )
748 return 0;
750 rc = ops->read_cr(0, &cr0, ctxt);
751 return (!rc && !(cr0 & CR0_PE));
752 }
754 static int
755 load_seg(
756 enum x86_segment seg,
757 uint16_t sel,
758 struct x86_emulate_ctxt *ctxt,
759 struct x86_emulate_ops *ops)
760 {
761 struct segment_register reg;
762 int rc;
764 if ( !in_realmode(ctxt, ops) ||
765 (ops->read_segment == NULL) ||
766 (ops->write_segment == NULL) )
767 return X86EMUL_UNHANDLEABLE;
769 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
770 return rc;
772 reg.sel = sel;
773 reg.base = (uint32_t)sel << 4;
775 return ops->write_segment(seg, &reg, ctxt);
776 }
778 void *
779 decode_register(
780 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
781 {
782 void *p;
784 switch ( modrm_reg )
785 {
786 case 0: p = &regs->eax; break;
787 case 1: p = &regs->ecx; break;
788 case 2: p = &regs->edx; break;
789 case 3: p = &regs->ebx; break;
790 case 4: p = (highbyte_regs ?
791 ((unsigned char *)&regs->eax + 1) :
792 (unsigned char *)&regs->esp); break;
793 case 5: p = (highbyte_regs ?
794 ((unsigned char *)&regs->ecx + 1) :
795 (unsigned char *)&regs->ebp); break;
796 case 6: p = (highbyte_regs ?
797 ((unsigned char *)&regs->edx + 1) :
798 (unsigned char *)&regs->esi); break;
799 case 7: p = (highbyte_regs ?
800 ((unsigned char *)&regs->ebx + 1) :
801 (unsigned char *)&regs->edi); break;
802 #if defined(__x86_64__)
803 case 8: p = &regs->r8; break;
804 case 9: p = &regs->r9; break;
805 case 10: p = &regs->r10; break;
806 case 11: p = &regs->r11; break;
807 case 12: p = &regs->r12; break;
808 case 13: p = &regs->r13; break;
809 case 14: p = &regs->r14; break;
810 case 15: p = &regs->r15; break;
811 #endif
812 default: p = NULL; break;
813 }
815 return p;
816 }
818 #define decode_segment_failed x86_seg_tr
819 enum x86_segment
820 decode_segment(
821 uint8_t modrm_reg)
822 {
823 switch ( modrm_reg )
824 {
825 case 0: return x86_seg_es;
826 case 1: return x86_seg_cs;
827 case 2: return x86_seg_ss;
828 case 3: return x86_seg_ds;
829 case 4: return x86_seg_fs;
830 case 5: return x86_seg_gs;
831 default: break;
832 }
833 return decode_segment_failed;
834 }
836 int
837 x86_emulate(
838 struct x86_emulate_ctxt *ctxt,
839 struct x86_emulate_ops *ops)
840 {
841 /* Shadow copy of register state. Committed on successful emulation. */
842 struct cpu_user_regs _regs = *ctxt->regs;
844 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
845 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
846 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
847 #define REPE_PREFIX 1
848 #define REPNE_PREFIX 2
849 unsigned int lock_prefix = 0, rep_prefix = 0;
850 int override_seg = -1, rc = X86EMUL_OKAY;
851 struct operand src, dst;
853 /* Data operand effective address (usually computed from ModRM). */
854 struct operand ea;
856 /* Default is a memory operand relative to segment DS. */
857 ea.type = OP_MEM;
858 ea.mem.seg = x86_seg_ds;
859 ea.mem.off = 0;
861 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
862 if ( op_bytes == 8 )
863 {
864 op_bytes = def_op_bytes = 4;
865 #ifndef __x86_64__
866 return X86EMUL_UNHANDLEABLE;
867 #endif
868 }
870 /* Prefix bytes. */
871 for ( ; ; )
872 {
873 switch ( b = insn_fetch_type(uint8_t) )
874 {
875 case 0x66: /* operand-size override */
876 op_bytes = def_op_bytes ^ 6;
877 break;
878 case 0x67: /* address-size override */
879 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
880 break;
881 case 0x2e: /* CS override */
882 override_seg = x86_seg_cs;
883 break;
884 case 0x3e: /* DS override */
885 override_seg = x86_seg_ds;
886 break;
887 case 0x26: /* ES override */
888 override_seg = x86_seg_es;
889 break;
890 case 0x64: /* FS override */
891 override_seg = x86_seg_fs;
892 break;
893 case 0x65: /* GS override */
894 override_seg = x86_seg_gs;
895 break;
896 case 0x36: /* SS override */
897 override_seg = x86_seg_ss;
898 break;
899 case 0xf0: /* LOCK */
900 lock_prefix = 1;
901 break;
902 case 0xf2: /* REPNE/REPNZ */
903 rep_prefix = REPNE_PREFIX;
904 break;
905 case 0xf3: /* REP/REPE/REPZ */
906 rep_prefix = REPE_PREFIX;
907 break;
908 case 0x40 ... 0x4f: /* REX */
909 if ( !mode_64bit() )
910 goto done_prefixes;
911 rex_prefix = b;
912 continue;
913 default:
914 goto done_prefixes;
915 }
917 /* Any legacy prefix after a REX prefix nullifies its effect. */
918 rex_prefix = 0;
919 }
920 done_prefixes:
922 if ( rex_prefix & 8 ) /* REX.W */
923 op_bytes = 8;
925 /* Opcode byte(s). */
926 d = opcode_table[b];
927 if ( d == 0 )
928 {
929 /* Two-byte opcode? */
930 if ( b == 0x0f )
931 {
932 twobyte = 1;
933 b = insn_fetch_type(uint8_t);
934 d = twobyte_table[b];
935 }
937 /* Unrecognised? */
938 if ( d == 0 )
939 goto cannot_emulate;
940 }
942 /* Lock prefix is allowed only on RMW instructions. */
943 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
945 /* ModRM and SIB bytes. */
946 if ( d & ModRM )
947 {
948 modrm = insn_fetch_type(uint8_t);
949 modrm_mod = (modrm & 0xc0) >> 6;
950 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
951 modrm_rm = modrm & 0x07;
953 if ( modrm_mod == 3 )
954 {
955 modrm_rm |= (rex_prefix & 1) << 3;
956 ea.type = OP_REG;
957 ea.reg = decode_register(
958 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
959 }
960 else if ( ad_bytes == 2 )
961 {
962 /* 16-bit ModR/M decode. */
963 switch ( modrm_rm )
964 {
965 case 0:
966 ea.mem.off = _regs.ebx + _regs.esi;
967 break;
968 case 1:
969 ea.mem.off = _regs.ebx + _regs.edi;
970 break;
971 case 2:
972 ea.mem.seg = x86_seg_ss;
973 ea.mem.off = _regs.ebp + _regs.esi;
974 break;
975 case 3:
976 ea.mem.seg = x86_seg_ss;
977 ea.mem.off = _regs.ebp + _regs.edi;
978 break;
979 case 4:
980 ea.mem.off = _regs.esi;
981 break;
982 case 5:
983 ea.mem.off = _regs.edi;
984 break;
985 case 6:
986 if ( modrm_mod == 0 )
987 break;
988 ea.mem.seg = x86_seg_ss;
989 ea.mem.off = _regs.ebp;
990 break;
991 case 7:
992 ea.mem.off = _regs.ebx;
993 break;
994 }
995 switch ( modrm_mod )
996 {
997 case 0:
998 if ( modrm_rm == 6 )
999 ea.mem.off = insn_fetch_type(int16_t);
1000 break;
1001 case 1:
1002 ea.mem.off += insn_fetch_type(int8_t);
1003 break;
1004 case 2:
1005 ea.mem.off += insn_fetch_type(int16_t);
1006 break;
1008 ea.mem.off = truncate_ea(ea.mem.off);
1010 else
1012 /* 32/64-bit ModR/M decode. */
1013 if ( modrm_rm == 4 )
1015 sib = insn_fetch_type(uint8_t);
1016 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1017 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1018 if ( sib_index != 4 )
1019 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1020 ea.mem.off <<= (sib >> 6) & 3;
1021 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1022 ea.mem.off += insn_fetch_type(int32_t);
1023 else if ( sib_base == 4 )
1025 ea.mem.seg = x86_seg_ss;
1026 ea.mem.off += _regs.esp;
1027 if ( !twobyte && (b == 0x8f) )
1028 /* POP <rm> computes its EA post increment. */
1029 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1030 ? 8 : op_bytes);
1032 else if ( sib_base == 5 )
1034 ea.mem.seg = x86_seg_ss;
1035 ea.mem.off += _regs.ebp;
1037 else
1038 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1040 else
1042 modrm_rm |= (rex_prefix & 1) << 3;
1043 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1044 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1045 ea.mem.seg = x86_seg_ss;
1047 switch ( modrm_mod )
1049 case 0:
1050 if ( (modrm_rm & 7) != 5 )
1051 break;
1052 ea.mem.off = insn_fetch_type(int32_t);
1053 if ( !mode_64bit() )
1054 break;
1055 /* Relative to RIP of next instruction. Argh! */
1056 ea.mem.off += _regs.eip;
1057 if ( (d & SrcMask) == SrcImm )
1058 ea.mem.off += (d & ByteOp) ? 1 :
1059 ((op_bytes == 8) ? 4 : op_bytes);
1060 else if ( (d & SrcMask) == SrcImmByte )
1061 ea.mem.off += 1;
1062 else if ( !twobyte && ((b & 0xfe) == 0xf6) &&
1063 ((modrm_reg & 7) <= 1) )
1064 /* Special case in Grp3: test has immediate operand. */
1065 ea.mem.off += (d & ByteOp) ? 1
1066 : ((op_bytes == 8) ? 4 : op_bytes);
1067 else if ( twobyte && ((b & 0xf7) == 0xa4) )
1068 /* SHLD/SHRD with immediate byte third operand. */
1069 ea.mem.off++;
1070 break;
1071 case 1:
1072 ea.mem.off += insn_fetch_type(int8_t);
1073 break;
1074 case 2:
1075 ea.mem.off += insn_fetch_type(int32_t);
1076 break;
1078 ea.mem.off = truncate_ea(ea.mem.off);
1082 if ( override_seg != -1 )
1083 ea.mem.seg = override_seg;
1085 /* Special instructions do their own operand decoding. */
1086 if ( (d & DstMask) == ImplicitOps )
1087 goto special_insn;
1089 /* Decode and fetch the source operand: register, memory or immediate. */
1090 switch ( d & SrcMask )
1092 case SrcNone:
1093 break;
1094 case SrcReg:
1095 src.type = OP_REG;
1096 if ( d & ByteOp )
1098 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1099 src.val = *(uint8_t *)src.reg;
1100 src.bytes = 1;
1102 else
1104 src.reg = decode_register(modrm_reg, &_regs, 0);
1105 switch ( (src.bytes = op_bytes) )
1107 case 2: src.val = *(uint16_t *)src.reg; break;
1108 case 4: src.val = *(uint32_t *)src.reg; break;
1109 case 8: src.val = *(uint64_t *)src.reg; break;
1112 break;
1113 case SrcMem16:
1114 ea.bytes = 2;
1115 goto srcmem_common;
1116 case SrcMem:
1117 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1118 srcmem_common:
1119 src = ea;
1120 if ( src.type == OP_REG )
1122 switch ( src.bytes )
1124 case 1: src.val = *(uint8_t *)src.reg; break;
1125 case 2: src.val = *(uint16_t *)src.reg; break;
1126 case 4: src.val = *(uint32_t *)src.reg; break;
1127 case 8: src.val = *(uint64_t *)src.reg; break;
1130 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1131 &src.val, src.bytes, ctxt)) )
1132 goto done;
1133 break;
1134 case SrcImm:
1135 src.type = OP_IMM;
1136 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1137 if ( src.bytes == 8 ) src.bytes = 4;
1138 /* NB. Immediates are sign-extended as necessary. */
1139 switch ( src.bytes )
1141 case 1: src.val = insn_fetch_type(int8_t); break;
1142 case 2: src.val = insn_fetch_type(int16_t); break;
1143 case 4: src.val = insn_fetch_type(int32_t); break;
1145 break;
1146 case SrcImmByte:
1147 src.type = OP_IMM;
1148 src.bytes = 1;
1149 src.val = insn_fetch_type(int8_t);
1150 break;
1153 /* Decode and fetch the destination operand: register or memory. */
1154 switch ( d & DstMask )
1156 case DstReg:
1157 dst.type = OP_REG;
1158 if ( d & ByteOp )
1160 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1161 dst.val = *(uint8_t *)dst.reg;
1162 dst.bytes = 1;
1164 else
1166 dst.reg = decode_register(modrm_reg, &_regs, 0);
1167 switch ( (dst.bytes = op_bytes) )
1169 case 2: dst.val = *(uint16_t *)dst.reg; break;
1170 case 4: dst.val = *(uint32_t *)dst.reg; break;
1171 case 8: dst.val = *(uint64_t *)dst.reg; break;
1174 break;
1175 case DstBitBase:
1176 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1178 src.val &= (op_bytes << 3) - 1;
1180 else
1182 /*
1183 * EA += BitOffset DIV op_bytes*8
1184 * BitOffset = BitOffset MOD op_bytes*8
1185 * DIV truncates towards negative infinity.
1186 * MOD always produces a positive result.
1187 */
1188 if ( op_bytes == 2 )
1189 src.val = (int16_t)src.val;
1190 else if ( op_bytes == 4 )
1191 src.val = (int32_t)src.val;
1192 if ( (long)src.val < 0 )
1194 unsigned long byte_offset;
1195 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1196 ea.mem.off -= byte_offset;
1197 src.val = (byte_offset << 3) + src.val;
1199 else
1201 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1202 src.val &= (op_bytes << 3) - 1;
1205 /* Becomes a normal DstMem operation from here on. */
1206 d = (d & ~DstMask) | DstMem;
1207 case DstMem:
1208 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1209 dst = ea;
1210 if ( dst.type == OP_REG )
1212 switch ( dst.bytes )
1214 case 1: dst.val = *(uint8_t *)dst.reg; break;
1215 case 2: dst.val = *(uint16_t *)dst.reg; break;
1216 case 4: dst.val = *(uint32_t *)dst.reg; break;
1217 case 8: dst.val = *(uint64_t *)dst.reg; break;
1220 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1222 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1223 &dst.val, dst.bytes, ctxt)) )
1224 goto done;
1225 dst.orig_val = dst.val;
1227 break;
1230 /* LOCK prefix allowed only on instructions with memory destination. */
1231 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
1233 if ( twobyte )
1234 goto twobyte_insn;
1236 switch ( b )
1238 case 0x04 ... 0x05: /* add imm,%%eax */
1239 dst.reg = (unsigned long *)&_regs.eax;
1240 dst.val = _regs.eax;
1241 case 0x00 ... 0x03: add: /* add */
1242 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1243 break;
1245 case 0x0c ... 0x0d: /* or imm,%%eax */
1246 dst.reg = (unsigned long *)&_regs.eax;
1247 dst.val = _regs.eax;
1248 case 0x08 ... 0x0b: or: /* or */
1249 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1250 break;
1252 case 0x14 ... 0x15: /* adc imm,%%eax */
1253 dst.reg = (unsigned long *)&_regs.eax;
1254 dst.val = _regs.eax;
1255 case 0x10 ... 0x13: adc: /* adc */
1256 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1257 break;
1259 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1260 dst.reg = (unsigned long *)&_regs.eax;
1261 dst.val = _regs.eax;
1262 case 0x18 ... 0x1b: sbb: /* sbb */
1263 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1264 break;
1266 case 0x24 ... 0x25: /* and imm,%%eax */
1267 dst.reg = (unsigned long *)&_regs.eax;
1268 dst.val = _regs.eax;
1269 case 0x20 ... 0x23: and: /* and */
1270 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1271 break;
1273 case 0x2c ... 0x2d: /* sub imm,%%eax */
1274 dst.reg = (unsigned long *)&_regs.eax;
1275 dst.val = _regs.eax;
1276 case 0x28 ... 0x2b: sub: /* sub */
1277 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1278 break;
1280 case 0x34 ... 0x35: /* xor imm,%%eax */
1281 dst.reg = (unsigned long *)&_regs.eax;
1282 dst.val = _regs.eax;
1283 case 0x30 ... 0x33: xor: /* xor */
1284 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1285 break;
1287 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1288 dst.reg = (unsigned long *)&_regs.eax;
1289 dst.val = _regs.eax;
1290 case 0x38 ... 0x3b: cmp: /* cmp */
1291 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1292 break;
1294 case 0x62: /* bound */ {
1295 unsigned long src_val2;
1296 int lb, ub, idx;
1297 generate_exception_if(mode_64bit() || (src.type != OP_MEM), EXC_UD);
1298 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1299 &src_val2, op_bytes, ctxt)) )
1300 goto done;
1301 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1302 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1303 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1304 generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
1305 dst.type = OP_NONE;
1306 break;
1309 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1310 if ( mode_64bit() )
1312 /* movsxd */
1313 if ( src.type == OP_REG )
1314 src.val = *(int32_t *)src.reg;
1315 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1316 &src.val, 4, ctxt)) )
1317 goto done;
1318 dst.val = (int32_t)src.val;
1320 else
1322 /* arpl */
1323 uint16_t src_val = dst.val;
1324 dst = src;
1325 _regs.eflags &= ~EFLG_ZF;
1326 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1327 if ( _regs.eflags & EFLG_ZF )
1328 dst.val = (dst.val & ~3) | (src_val & 3);
1329 else
1330 dst.type = OP_NONE;
1331 generate_exception_if(in_realmode(ctxt, ops), EXC_UD);
1333 break;
1335 case 0x69: /* imul imm16/32 */
1336 case 0x6b: /* imul imm8 */ {
1337 unsigned long src1; /* ModR/M source operand */
1338 if ( ea.type == OP_REG )
1339 src1 = *ea.reg;
1340 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
1341 &src1, op_bytes, ctxt)) )
1342 goto done;
1343 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1344 switch ( dst.bytes )
1346 case 2:
1347 dst.val = ((uint32_t)(int16_t)src.val *
1348 (uint32_t)(int16_t)src1);
1349 if ( (int16_t)dst.val != (uint32_t)dst.val )
1350 _regs.eflags |= EFLG_OF|EFLG_CF;
1351 break;
1352 #ifdef __x86_64__
1353 case 4:
1354 dst.val = ((uint64_t)(int32_t)src.val *
1355 (uint64_t)(int32_t)src1);
1356 if ( (int32_t)dst.val != dst.val )
1357 _regs.eflags |= EFLG_OF|EFLG_CF;
1358 break;
1359 #endif
1360 default: {
1361 unsigned long m[2] = { src.val, src1 };
1362 if ( imul_dbl(m) )
1363 _regs.eflags |= EFLG_OF|EFLG_CF;
1364 dst.val = m[0];
1365 break;
1368 break;
1371 case 0x82: /* Grp1 (x86/32 only) */
1372 generate_exception_if(mode_64bit(), EXC_UD);
1373 case 0x80: case 0x81: case 0x83: /* Grp1 */
1374 switch ( modrm_reg & 7 )
1376 case 0: goto add;
1377 case 1: goto or;
1378 case 2: goto adc;
1379 case 3: goto sbb;
1380 case 4: goto and;
1381 case 5: goto sub;
1382 case 6: goto xor;
1383 case 7: goto cmp;
1385 break;
1387 case 0xa8 ... 0xa9: /* test imm,%%eax */
1388 dst.reg = (unsigned long *)&_regs.eax;
1389 dst.val = _regs.eax;
1390 case 0x84 ... 0x85: test: /* test */
1391 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1392 break;
1394 case 0x86 ... 0x87: xchg: /* xchg */
1395 /* Write back the register source. */
1396 switch ( dst.bytes )
1398 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1399 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1400 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1401 case 8: *src.reg = dst.val; break;
1403 /* Write back the memory destination with implicit LOCK prefix. */
1404 dst.val = src.val;
1405 lock_prefix = 1;
1406 break;
1408 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1409 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1410 case 0x88 ... 0x8b: /* mov */
1411 dst.val = src.val;
1412 break;
1414 case 0x8c: /* mov Sreg,r/m */ {
1415 struct segment_register reg;
1416 enum x86_segment seg = decode_segment(modrm_reg);
1417 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1418 fail_if(ops->read_segment == NULL);
1419 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
1420 goto done;
1421 dst.val = reg.sel;
1422 if ( dst.type == OP_MEM )
1423 dst.bytes = 2;
1424 break;
1427 case 0x8e: /* mov r/m,Sreg */ {
1428 enum x86_segment seg = decode_segment(modrm_reg);
1429 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1430 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
1431 goto done;
1432 dst.type = OP_NONE;
1433 break;
1436 case 0x8d: /* lea */
1437 dst.val = ea.mem.off;
1438 break;
1440 case 0x8f: /* pop (sole member of Grp1a) */
1441 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1442 /* 64-bit mode: POP defaults to a 64-bit operand. */
1443 if ( mode_64bit() && (dst.bytes == 4) )
1444 dst.bytes = 8;
1445 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1446 &dst.val, dst.bytes, ctxt)) != 0 )
1447 goto done;
1448 break;
1450 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1451 dst.reg = decode_register(
1452 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1453 dst.val = src.val;
1454 break;
1456 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1457 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1458 src.val = ((uint32_t)src.val |
1459 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1460 dst.reg = decode_register(
1461 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1462 dst.val = src.val;
1463 break;
1465 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1466 switch ( modrm_reg & 7 )
1468 case 0: /* rol */
1469 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1470 break;
1471 case 1: /* ror */
1472 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1473 break;
1474 case 2: /* rcl */
1475 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1476 break;
1477 case 3: /* rcr */
1478 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1479 break;
1480 case 4: /* sal/shl */
1481 case 6: /* sal/shl */
1482 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1483 break;
1484 case 5: /* shr */
1485 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1486 break;
1487 case 7: /* sar */
1488 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1489 break;
1491 break;
1493 case 0xc4: /* les */ {
1494 unsigned long sel;
1495 dst.val = x86_seg_es;
1496 les: /* dst.val identifies the segment */
1497 generate_exception_if(src.type != OP_MEM, EXC_UD);
1498 if ( (rc = ops->read(src.mem.seg, src.mem.off + src.bytes,
1499 &sel, 2, ctxt)) != 0 )
1500 goto done;
1501 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
1502 goto done;
1503 dst.val = src.val;
1504 break;
1507 case 0xc5: /* lds */
1508 dst.val = x86_seg_ds;
1509 goto les;
1511 case 0xd0 ... 0xd1: /* Grp2 */
1512 src.val = 1;
1513 goto grp2;
1515 case 0xd2 ... 0xd3: /* Grp2 */
1516 src.val = _regs.ecx;
1517 goto grp2;
1519 case 0xf6 ... 0xf7: /* Grp3 */
1520 switch ( modrm_reg & 7 )
1522 case 0 ... 1: /* test */
1523 /* Special case in Grp3: test has an immediate source operand. */
1524 src.type = OP_IMM;
1525 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1526 if ( src.bytes == 8 ) src.bytes = 4;
1527 switch ( src.bytes )
1529 case 1: src.val = insn_fetch_type(int8_t); break;
1530 case 2: src.val = insn_fetch_type(int16_t); break;
1531 case 4: src.val = insn_fetch_type(int32_t); break;
1533 goto test;
1534 case 2: /* not */
1535 dst.val = ~dst.val;
1536 break;
1537 case 3: /* neg */
1538 emulate_1op("neg", dst, _regs.eflags);
1539 break;
1540 case 4: /* mul */
1541 src = dst;
1542 dst.type = OP_REG;
1543 dst.reg = (unsigned long *)&_regs.eax;
1544 dst.val = *dst.reg;
1545 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1546 switch ( src.bytes )
1548 case 1:
1549 dst.val *= src.val;
1550 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1551 _regs.eflags |= EFLG_OF|EFLG_CF;
1552 break;
1553 case 2:
1554 dst.val *= src.val;
1555 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1556 _regs.eflags |= EFLG_OF|EFLG_CF;
1557 *(uint16_t *)&_regs.edx = dst.val >> 16;
1558 break;
1559 #ifdef __x86_64__
1560 case 4:
1561 dst.val *= src.val;
1562 if ( (uint32_t)dst.val != dst.val )
1563 _regs.eflags |= EFLG_OF|EFLG_CF;
1564 _regs.edx = (uint32_t)(dst.val >> 32);
1565 break;
1566 #endif
1567 default: {
1568 unsigned long m[2] = { src.val, dst.val };
1569 if ( mul_dbl(m) )
1570 _regs.eflags |= EFLG_OF|EFLG_CF;
1571 _regs.edx = m[1];
1572 dst.val = m[0];
1573 break;
1576 break;
1577 case 5: /* imul */
1578 src = dst;
1579 dst.type = OP_REG;
1580 dst.reg = (unsigned long *)&_regs.eax;
1581 dst.val = *dst.reg;
1582 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1583 switch ( src.bytes )
1585 case 1:
1586 dst.val = ((uint16_t)(int8_t)src.val *
1587 (uint16_t)(int8_t)dst.val);
1588 if ( (int8_t)dst.val != (uint16_t)dst.val )
1589 _regs.eflags |= EFLG_OF|EFLG_CF;
1590 break;
1591 case 2:
1592 dst.val = ((uint32_t)(int16_t)src.val *
1593 (uint32_t)(int16_t)dst.val);
1594 if ( (int16_t)dst.val != (uint32_t)dst.val )
1595 _regs.eflags |= EFLG_OF|EFLG_CF;
1596 *(uint16_t *)&_regs.edx = dst.val >> 16;
1597 break;
1598 #ifdef __x86_64__
1599 case 4:
1600 dst.val = ((uint64_t)(int32_t)src.val *
1601 (uint64_t)(int32_t)dst.val);
1602 if ( (int32_t)dst.val != dst.val )
1603 _regs.eflags |= EFLG_OF|EFLG_CF;
1604 _regs.edx = (uint32_t)(dst.val >> 32);
1605 break;
1606 #endif
1607 default: {
1608 unsigned long m[2] = { src.val, dst.val };
1609 if ( imul_dbl(m) )
1610 _regs.eflags |= EFLG_OF|EFLG_CF;
1611 _regs.edx = m[1];
1612 dst.val = m[0];
1613 break;
1616 break;
1617 case 6: /* div */ {
1618 unsigned long u[2], v;
1619 src = dst;
1620 dst.type = OP_REG;
1621 dst.reg = (unsigned long *)&_regs.eax;
1622 switch ( src.bytes )
1624 case 1:
1625 u[0] = (uint16_t)_regs.eax;
1626 u[1] = 0;
1627 v = (uint8_t)src.val;
1628 generate_exception_if(
1629 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1630 EXC_DE);
1631 dst.val = (uint8_t)u[0];
1632 ((uint8_t *)&_regs.eax)[1] = u[1];
1633 break;
1634 case 2:
1635 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1636 u[1] = 0;
1637 v = (uint16_t)src.val;
1638 generate_exception_if(
1639 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1640 EXC_DE);
1641 dst.val = (uint16_t)u[0];
1642 *(uint16_t *)&_regs.edx = u[1];
1643 break;
1644 #ifdef __x86_64__
1645 case 4:
1646 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1647 u[1] = 0;
1648 v = (uint32_t)src.val;
1649 generate_exception_if(
1650 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1651 EXC_DE);
1652 dst.val = (uint32_t)u[0];
1653 _regs.edx = (uint32_t)u[1];
1654 break;
1655 #endif
1656 default:
1657 u[0] = _regs.eax;
1658 u[1] = _regs.edx;
1659 v = src.val;
1660 generate_exception_if(div_dbl(u, v), EXC_DE);
1661 dst.val = u[0];
1662 _regs.edx = u[1];
1663 break;
1665 break;
1667 case 7: /* idiv */ {
1668 unsigned long u[2], v;
1669 src = dst;
1670 dst.type = OP_REG;
1671 dst.reg = (unsigned long *)&_regs.eax;
1672 switch ( src.bytes )
1674 case 1:
1675 u[0] = (int16_t)_regs.eax;
1676 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1677 v = (int8_t)src.val;
1678 generate_exception_if(
1679 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1680 EXC_DE);
1681 dst.val = (int8_t)u[0];
1682 ((int8_t *)&_regs.eax)[1] = u[1];
1683 break;
1684 case 2:
1685 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1686 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1687 v = (int16_t)src.val;
1688 generate_exception_if(
1689 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1690 EXC_DE);
1691 dst.val = (int16_t)u[0];
1692 *(int16_t *)&_regs.edx = u[1];
1693 break;
1694 #ifdef __x86_64__
1695 case 4:
1696 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1697 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1698 v = (int32_t)src.val;
1699 generate_exception_if(
1700 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1701 EXC_DE);
1702 dst.val = (int32_t)u[0];
1703 _regs.edx = (uint32_t)u[1];
1704 break;
1705 #endif
1706 default:
1707 u[0] = _regs.eax;
1708 u[1] = _regs.edx;
1709 v = src.val;
1710 generate_exception_if(idiv_dbl(u, v), EXC_DE);
1711 dst.val = u[0];
1712 _regs.edx = u[1];
1713 break;
1715 break;
1717 default:
1718 goto cannot_emulate;
1720 break;
1722 case 0xfe: /* Grp4 */
1723 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
1724 case 0xff: /* Grp5 */
1725 switch ( modrm_reg & 7 )
1727 case 0: /* inc */
1728 emulate_1op("inc", dst, _regs.eflags);
1729 break;
1730 case 1: /* dec */
1731 emulate_1op("dec", dst, _regs.eflags);
1732 break;
1733 case 2: /* call (near) */
1734 case 4: /* jmp (near) */
1735 if ( (dst.bytes != 8) && mode_64bit() )
1737 dst.bytes = op_bytes = 8;
1738 if ( dst.type == OP_REG )
1739 dst.val = *dst.reg;
1740 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1741 &dst.val, 8, ctxt)) != 0 )
1742 goto done;
1744 src.val = _regs.eip;
1745 _regs.eip = dst.val;
1746 if ( (modrm_reg & 7) == 2 )
1747 goto push; /* call */
1748 dst.type = OP_NONE;
1749 break;
1750 case 3: /* call (far, absolute indirect) */
1751 case 5: /* jmp (far, absolute indirect) */ {
1752 unsigned long sel;
1754 generate_exception_if(dst.type != OP_MEM, EXC_UD);
1756 if ( (rc = ops->read(dst.mem.seg, dst.mem.off+dst.bytes,
1757 &sel, 2, ctxt)) )
1758 goto done;
1760 if ( (modrm_reg & 7) == 3 ) /* call */
1762 struct segment_register reg;
1763 fail_if(ops->read_segment == NULL);
1764 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
1765 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1766 reg.sel, op_bytes, ctxt)) ||
1767 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1768 _regs.eip, op_bytes, ctxt)) )
1769 goto done;
1772 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
1773 goto done;
1774 _regs.eip = dst.val;
1776 dst.type = OP_NONE;
1777 break;
1779 case 6: /* push */
1780 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1781 if ( mode_64bit() && (dst.bytes == 4) )
1783 dst.bytes = 8;
1784 if ( dst.type == OP_REG )
1785 dst.val = *dst.reg;
1786 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1787 &dst.val, 8, ctxt)) != 0 )
1788 goto done;
1790 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
1791 dst.val, dst.bytes, ctxt)) != 0 )
1792 goto done;
1793 dst.type = OP_NONE;
1794 break;
1795 case 7:
1796 generate_exception_if(1, EXC_UD);
1797 default:
1798 goto cannot_emulate;
1800 break;
1803 writeback:
1804 switch ( dst.type )
1806 case OP_REG:
1807 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1808 switch ( dst.bytes )
1810 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1811 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1812 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1813 case 8: *dst.reg = dst.val; break;
1815 break;
1816 case OP_MEM:
1817 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1818 /* nothing to do */;
1819 else if ( lock_prefix )
1820 rc = ops->cmpxchg(
1821 dst.mem.seg, dst.mem.off, dst.orig_val,
1822 dst.val, dst.bytes, ctxt);
1823 else
1824 rc = ops->write(
1825 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1826 if ( rc != 0 )
1827 goto done;
1828 default:
1829 break;
1832 /* Commit shadow register state. */
1833 _regs.eflags &= ~EFLG_RF;
1834 *ctxt->regs = _regs;
1836 if ( (_regs.eflags & EFLG_TF) &&
1837 (rc == X86EMUL_OKAY) &&
1838 (ops->inject_hw_exception != NULL) )
1839 rc = ops->inject_hw_exception(EXC_DB, ctxt) ? : X86EMUL_EXCEPTION;
1841 done:
1842 return rc;
1844 special_insn:
1845 dst.type = OP_NONE;
1847 /*
1848 * The only implicit-operands instructions allowed a LOCK prefix are
1849 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1850 */
1851 generate_exception_if(lock_prefix &&
1852 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1853 (b != 0xc7), /* CMPXCHG{8,16}B */
1854 EXC_GP);
1856 if ( twobyte )
1857 goto twobyte_special_insn;
1859 switch ( b )
1861 case 0x06: /* push %%es */ {
1862 struct segment_register reg;
1863 src.val = x86_seg_es;
1864 push_seg:
1865 fail_if(ops->read_segment == NULL);
1866 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
1867 return rc;
1868 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1869 if ( mode_64bit() && (op_bytes == 4) )
1870 op_bytes = 8;
1871 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1872 reg.sel, op_bytes, ctxt)) != 0 )
1873 goto done;
1874 break;
1877 case 0x07: /* pop %%es */
1878 src.val = x86_seg_es;
1879 pop_seg:
1880 fail_if(ops->write_segment == NULL);
1881 /* 64-bit mode: POP defaults to a 64-bit operand. */
1882 if ( mode_64bit() && (op_bytes == 4) )
1883 op_bytes = 8;
1884 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
1885 &dst.val, op_bytes, ctxt)) != 0 )
1886 goto done;
1887 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
1888 return rc;
1889 break;
1891 case 0x0e: /* push %%cs */
1892 src.val = x86_seg_cs;
1893 goto push_seg;
1895 case 0x16: /* push %%ss */
1896 src.val = x86_seg_ss;
1897 goto push_seg;
1899 case 0x17: /* pop %%ss */
1900 src.val = x86_seg_ss;
1901 goto pop_seg;
1903 case 0x1e: /* push %%ds */
1904 src.val = x86_seg_ds;
1905 goto push_seg;
1907 case 0x1f: /* pop %%ds */
1908 src.val = x86_seg_ds;
1909 goto pop_seg;
1911 case 0x27: /* daa */ {
1912 uint8_t al = _regs.eax;
1913 unsigned long eflags = _regs.eflags;
1914 generate_exception_if(mode_64bit(), EXC_UD);
1915 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1916 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1918 *(uint8_t *)&_regs.eax += 6;
1919 _regs.eflags |= EFLG_AF;
1921 if ( (al > 0x99) || (eflags & EFLG_CF) )
1923 *(uint8_t *)&_regs.eax += 0x60;
1924 _regs.eflags |= EFLG_CF;
1926 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1927 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1928 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1929 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1930 break;
1933 case 0x2f: /* das */ {
1934 uint8_t al = _regs.eax;
1935 unsigned long eflags = _regs.eflags;
1936 generate_exception_if(mode_64bit(), EXC_UD);
1937 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1938 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1940 _regs.eflags |= EFLG_AF;
1941 if ( (al < 6) || (eflags & EFLG_CF) )
1942 _regs.eflags |= EFLG_CF;
1943 *(uint8_t *)&_regs.eax -= 6;
1945 if ( (al > 0x99) || (eflags & EFLG_CF) )
1947 *(uint8_t *)&_regs.eax -= 0x60;
1948 _regs.eflags |= EFLG_CF;
1950 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1951 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1952 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1953 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1954 break;
1957 case 0x37: /* aaa */
1958 case 0x3f: /* aas */
1959 generate_exception_if(mode_64bit(), EXC_UD);
1960 _regs.eflags &= ~EFLG_CF;
1961 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1963 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1964 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1965 _regs.eflags |= EFLG_CF | EFLG_AF;
1967 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1968 break;
1970 case 0x40 ... 0x4f: /* inc/dec reg */
1971 dst.type = OP_REG;
1972 dst.reg = decode_register(b & 7, &_regs, 0);
1973 dst.bytes = op_bytes;
1974 dst.val = *dst.reg;
1975 if ( b & 8 )
1976 emulate_1op("dec", dst, _regs.eflags);
1977 else
1978 emulate_1op("inc", dst, _regs.eflags);
1979 break;
1981 case 0x50 ... 0x57: /* push reg */
1982 src.val = *(unsigned long *)decode_register(
1983 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1984 goto push;
1986 case 0x58 ... 0x5f: /* pop reg */
1987 dst.type = OP_REG;
1988 dst.reg = decode_register(
1989 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1990 dst.bytes = op_bytes;
1991 if ( mode_64bit() && (dst.bytes == 4) )
1992 dst.bytes = 8;
1993 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1994 &dst.val, dst.bytes, ctxt)) != 0 )
1995 goto done;
1996 break;
1998 case 0x60: /* pusha */ {
1999 int i;
2000 unsigned long regs[] = {
2001 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
2002 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
2003 generate_exception_if(mode_64bit(), EXC_UD);
2004 for ( i = 0; i < 8; i++ )
2005 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2006 regs[i], op_bytes, ctxt)) != 0 )
2007 goto done;
2008 break;
2011 case 0x61: /* popa */ {
2012 int i;
2013 unsigned long dummy_esp, *regs[] = {
2014 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
2015 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
2016 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
2017 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
2018 generate_exception_if(mode_64bit(), EXC_UD);
2019 for ( i = 0; i < 8; i++ )
2021 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2022 &dst.val, op_bytes, ctxt)) != 0 )
2023 goto done;
2024 switch ( op_bytes )
2026 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
2027 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
2028 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
2029 case 8: *regs[i] = dst.val; break;
2032 break;
2035 case 0x68: /* push imm{16,32,64} */
2036 src.val = ((op_bytes == 2)
2037 ? (int32_t)insn_fetch_type(int16_t)
2038 : insn_fetch_type(int32_t));
2039 goto push;
2041 case 0x6a: /* push imm8 */
2042 src.val = insn_fetch_type(int8_t);
2043 push:
2044 d |= Mov; /* force writeback */
2045 dst.type = OP_MEM;
2046 dst.bytes = op_bytes;
2047 if ( mode_64bit() && (dst.bytes == 4) )
2048 dst.bytes = 8;
2049 dst.val = src.val;
2050 dst.mem.seg = x86_seg_ss;
2051 dst.mem.off = sp_pre_dec(dst.bytes);
2052 break;
2054 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */
2055 handle_rep_prefix();
2056 generate_exception_if(!mode_iopl(), EXC_GP);
2057 dst.type = OP_MEM;
2058 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2059 dst.mem.seg = x86_seg_es;
2060 dst.mem.off = truncate_ea(_regs.edi);
2061 fail_if(ops->read_io == NULL);
2062 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
2063 &dst.val, ctxt)) != 0 )
2064 goto done;
2065 register_address_increment(
2066 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2067 break;
2069 case 0x6e ... 0x6f: /* outs %esi,%dx */
2070 handle_rep_prefix();
2071 generate_exception_if(!mode_iopl(), EXC_GP);
2072 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2073 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2074 &dst.val, dst.bytes, ctxt)) != 0 )
2075 goto done;
2076 fail_if(ops->write_io == NULL);
2077 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
2078 dst.val, ctxt)) != 0 )
2079 goto done;
2080 register_address_increment(
2081 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2082 break;
2084 case 0x70 ... 0x7f: /* jcc (short) */ {
2085 int rel = insn_fetch_type(int8_t);
2086 if ( test_cc(b, _regs.eflags) )
2087 jmp_rel(rel);
2088 break;
2091 case 0x90: /* nop / xchg %%r8,%%rax */
2092 if ( !(rex_prefix & 1) )
2093 break; /* nop */
2095 case 0x91 ... 0x97: /* xchg reg,%%rax */
2096 src.type = dst.type = OP_REG;
2097 src.bytes = dst.bytes = op_bytes;
2098 src.reg = (unsigned long *)&_regs.eax;
2099 src.val = *src.reg;
2100 dst.reg = decode_register(
2101 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2102 dst.val = *dst.reg;
2103 goto xchg;
2105 case 0x98: /* cbw/cwde/cdqe */
2106 switch ( op_bytes )
2108 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2109 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2110 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2112 break;
2114 case 0x99: /* cwd/cdq/cqo */
2115 switch ( op_bytes )
2117 case 2:
2118 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2119 break;
2120 case 4:
2121 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2122 break;
2123 case 8:
2124 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2125 break;
2127 break;
2129 case 0x9a: /* call (far, absolute) */ {
2130 struct segment_register reg;
2131 uint16_t sel;
2132 uint32_t eip;
2134 fail_if(ops->read_segment == NULL);
2135 generate_exception_if(mode_64bit(), EXC_UD);
2137 eip = insn_fetch_bytes(op_bytes);
2138 sel = insn_fetch_type(uint16_t);
2140 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2141 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2142 reg.sel, op_bytes, ctxt)) ||
2143 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2144 _regs.eip, op_bytes, ctxt)) )
2145 goto done;
2147 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2148 goto done;
2149 _regs.eip = eip;
2150 break;
2153 case 0x9c: /* pushf */
2154 src.val = _regs.eflags;
2155 goto push;
2157 case 0x9d: /* popf */ {
2158 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2159 if ( !mode_iopl() )
2160 mask |= EFLG_IOPL;
2161 fail_if(ops->write_rflags == NULL);
2162 /* 64-bit mode: POP defaults to a 64-bit operand. */
2163 if ( mode_64bit() && (op_bytes == 4) )
2164 op_bytes = 8;
2165 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2166 &dst.val, op_bytes, ctxt)) != 0 )
2167 goto done;
2168 if ( op_bytes == 2 )
2169 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2170 dst.val &= 0x257fd5;
2171 _regs.eflags &= mask;
2172 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2173 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2174 goto done;
2175 break;
2178 case 0x9e: /* sahf */
2179 *(uint8_t *)_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2180 break;
2182 case 0x9f: /* lahf */
2183 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2184 break;
2186 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2187 /* Source EA is not encoded via ModRM. */
2188 dst.type = OP_REG;
2189 dst.reg = (unsigned long *)&_regs.eax;
2190 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2191 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2192 &dst.val, dst.bytes, ctxt)) != 0 )
2193 goto done;
2194 break;
2196 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2197 /* Destination EA is not encoded via ModRM. */
2198 dst.type = OP_MEM;
2199 dst.mem.seg = ea.mem.seg;
2200 dst.mem.off = insn_fetch_bytes(ad_bytes);
2201 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2202 dst.val = (unsigned long)_regs.eax;
2203 break;
2205 case 0xa4 ... 0xa5: /* movs */
2206 handle_rep_prefix();
2207 dst.type = OP_MEM;
2208 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2209 dst.mem.seg = x86_seg_es;
2210 dst.mem.off = truncate_ea(_regs.edi);
2211 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2212 &dst.val, dst.bytes, ctxt)) != 0 )
2213 goto done;
2214 register_address_increment(
2215 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2216 register_address_increment(
2217 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2218 break;
2220 case 0xa6 ... 0xa7: /* cmps */ {
2221 unsigned long next_eip = _regs.eip;
2222 handle_rep_prefix();
2223 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2224 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2225 &dst.val, dst.bytes, ctxt)) ||
2226 (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2227 &src.val, src.bytes, ctxt)) )
2228 goto done;
2229 register_address_increment(
2230 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2231 register_address_increment(
2232 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2233 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2234 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2235 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2236 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2237 _regs.eip = next_eip;
2238 break;
2241 case 0xaa ... 0xab: /* stos */
2242 handle_rep_prefix();
2243 dst.type = OP_MEM;
2244 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2245 dst.mem.seg = x86_seg_es;
2246 dst.mem.off = truncate_ea(_regs.edi);
2247 dst.val = _regs.eax;
2248 register_address_increment(
2249 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2250 break;
2252 case 0xac ... 0xad: /* lods */
2253 handle_rep_prefix();
2254 dst.type = OP_REG;
2255 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2256 dst.reg = (unsigned long *)&_regs.eax;
2257 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2258 &dst.val, dst.bytes, ctxt)) != 0 )
2259 goto done;
2260 register_address_increment(
2261 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2262 break;
2264 case 0xae ... 0xaf: /* scas */ {
2265 unsigned long next_eip = _regs.eip;
2266 handle_rep_prefix();
2267 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2268 dst.val = _regs.eax;
2269 if ( (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2270 &src.val, src.bytes, ctxt)) != 0 )
2271 goto done;
2272 register_address_increment(
2273 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2274 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2275 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2276 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2277 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2278 _regs.eip = next_eip;
2279 break;
2282 case 0xc2: /* ret imm16 (near) */
2283 case 0xc3: /* ret (near) */ {
2284 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2285 op_bytes = mode_64bit() ? 8 : op_bytes;
2286 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2287 &dst.val, op_bytes, ctxt)) != 0 )
2288 goto done;
2289 _regs.eip = dst.val;
2290 break;
2293 case 0xc8: /* enter imm16,imm8 */ {
2294 uint16_t size = insn_fetch_type(uint16_t);
2295 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2296 int i;
2298 dst.type = OP_REG;
2299 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2300 dst.reg = (unsigned long *)&_regs.ebp;
2301 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2302 _regs.ebp, dst.bytes, ctxt)) )
2303 goto done;
2304 dst.val = _regs.esp;
2306 if ( depth > 0 )
2308 for ( i = 1; i < depth; i++ )
2310 unsigned long ebp, temp_data;
2311 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2312 if ( (rc = ops->read(x86_seg_ss, ebp,
2313 &temp_data, dst.bytes, ctxt)) ||
2314 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2315 temp_data, dst.bytes, ctxt)) )
2316 goto done;
2318 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2319 dst.val, dst.bytes, ctxt)) )
2320 goto done;
2323 sp_pre_dec(size);
2324 break;
2327 case 0xc9: /* leave */
2328 /* First writeback, to %%esp. */
2329 dst.type = OP_REG;
2330 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2331 dst.reg = (unsigned long *)&_regs.esp;
2332 dst.val = _regs.ebp;
2334 /* Flush first writeback, since there is a second. */
2335 switch ( dst.bytes )
2337 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2338 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2339 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2340 case 8: *dst.reg = dst.val; break;
2343 /* Second writeback, to %%ebp. */
2344 dst.reg = (unsigned long *)&_regs.ebp;
2345 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2346 &dst.val, dst.bytes, ctxt)) )
2347 goto done;
2348 break;
2350 case 0xca: /* ret imm16 (far) */
2351 case 0xcb: /* ret (far) */ {
2352 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2353 op_bytes = mode_64bit() ? 8 : op_bytes;
2354 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2355 &dst.val, op_bytes, ctxt)) ||
2356 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2357 &src.val, op_bytes, ctxt)) ||
2358 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2359 goto done;
2360 _regs.eip = dst.val;
2361 break;
2364 case 0xcc: /* int3 */
2365 src.val = EXC_BP;
2366 goto swint;
2368 case 0xcd: /* int imm8 */
2369 src.val = insn_fetch_type(uint8_t);
2370 swint:
2371 fail_if(ops->inject_sw_interrupt == NULL);
2372 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2373 ctxt) ? : X86EMUL_EXCEPTION;
2374 goto done;
2376 case 0xce: /* into */
2377 generate_exception_if(mode_64bit(), EXC_UD);
2378 if ( !(_regs.eflags & EFLG_OF) )
2379 break;
2380 src.val = EXC_OF;
2381 goto swint;
2383 case 0xcf: /* iret */ {
2384 unsigned long cs, eip, eflags;
2385 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2386 if ( !mode_iopl() )
2387 mask |= EFLG_IOPL;
2388 fail_if(!in_realmode(ctxt, ops));
2389 fail_if(ops->write_rflags == NULL);
2390 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2391 &eip, op_bytes, ctxt)) ||
2392 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2393 &cs, op_bytes, ctxt)) ||
2394 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2395 &eflags, op_bytes, ctxt)) )
2396 goto done;
2397 if ( op_bytes == 2 )
2398 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2399 eflags &= 0x257fd5;
2400 _regs.eflags &= mask;
2401 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2402 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2403 goto done;
2404 _regs.eip = eip;
2405 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2406 goto done;
2407 break;
2410 case 0xd4: /* aam */ {
2411 unsigned int base = insn_fetch_type(uint8_t);
2412 uint8_t al = _regs.eax;
2413 generate_exception_if(mode_64bit(), EXC_UD);
2414 generate_exception_if(base == 0, EXC_DE);
2415 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2416 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2417 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2418 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2419 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2420 break;
2423 case 0xd5: /* aad */ {
2424 unsigned int base = insn_fetch_type(uint8_t);
2425 uint16_t ax = _regs.eax;
2426 generate_exception_if(mode_64bit(), EXC_UD);
2427 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2428 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2429 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2430 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2431 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2432 break;
2435 case 0xd6: /* salc */
2436 generate_exception_if(mode_64bit(), EXC_UD);
2437 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2438 break;
2440 case 0xd7: /* xlat */ {
2441 unsigned long al = (uint8_t)_regs.eax;
2442 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
2443 &al, 1, ctxt)) != 0 )
2444 goto done;
2445 *(uint8_t *)&_regs.eax = al;
2446 break;
2449 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2450 int rel = insn_fetch_type(int8_t);
2451 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2452 if ( b == 0xe1 )
2453 do_jmp = !do_jmp; /* loopz */
2454 else if ( b == 0xe2 )
2455 do_jmp = 1; /* loop */
2456 switch ( ad_bytes )
2458 case 2:
2459 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2460 break;
2461 case 4:
2462 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
2463 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
2464 break;
2465 default: /* case 8: */
2466 do_jmp &= --_regs.ecx != 0;
2467 break;
2469 if ( do_jmp )
2470 jmp_rel(rel);
2471 break;
2474 case 0xe3: /* jcxz/jecxz (short) */ {
2475 int rel = insn_fetch_type(int8_t);
2476 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2477 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2478 jmp_rel(rel);
2479 break;
2482 case 0xe4: /* in imm8,%al */
2483 case 0xe5: /* in imm8,%eax */
2484 case 0xe6: /* out %al,imm8 */
2485 case 0xe7: /* out %eax,imm8 */
2486 case 0xec: /* in %dx,%al */
2487 case 0xed: /* in %dx,%eax */
2488 case 0xee: /* out %al,%dx */
2489 case 0xef: /* out %eax,%dx */ {
2490 unsigned int port = ((b < 0xe8)
2491 ? insn_fetch_type(uint8_t)
2492 : (uint16_t)_regs.edx);
2493 generate_exception_if(!mode_iopl(), EXC_GP);
2494 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2495 if ( b & 2 )
2497 /* out */
2498 fail_if(ops->write_io == NULL);
2499 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2502 else
2504 /* in */
2505 dst.type = OP_REG;
2506 dst.bytes = op_bytes;
2507 dst.reg = (unsigned long *)&_regs.eax;
2508 fail_if(ops->read_io == NULL);
2509 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2511 if ( rc != 0 )
2512 goto done;
2513 break;
2516 case 0xe8: /* call (near) */ {
2517 int rel = (((op_bytes == 2) && !mode_64bit())
2518 ? (int32_t)insn_fetch_type(int16_t)
2519 : insn_fetch_type(int32_t));
2520 op_bytes = mode_64bit() ? 8 : op_bytes;
2521 src.val = _regs.eip;
2522 jmp_rel(rel);
2523 goto push;
2526 case 0xe9: /* jmp (near) */ {
2527 int rel = (((op_bytes == 2) && !mode_64bit())
2528 ? (int32_t)insn_fetch_type(int16_t)
2529 : insn_fetch_type(int32_t));
2530 jmp_rel(rel);
2531 break;
2534 case 0xea: /* jmp (far, absolute) */ {
2535 uint16_t sel;
2536 uint32_t eip;
2537 generate_exception_if(mode_64bit(), EXC_UD);
2538 eip = insn_fetch_bytes(op_bytes);
2539 sel = insn_fetch_type(uint16_t);
2540 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2541 goto done;
2542 _regs.eip = eip;
2543 break;
2546 case 0xeb: /* jmp (short) */
2547 jmp_rel(insn_fetch_type(int8_t));
2548 break;
2550 case 0xf1: /* int1 (icebp) */
2551 src.val = EXC_DB;
2552 goto swint;
2554 case 0xf4: /* hlt */
2555 fail_if(ops->hlt == NULL);
2556 if ( (rc = ops->hlt(ctxt)) != 0 )
2557 goto done;
2558 break;
2560 case 0xf5: /* cmc */
2561 _regs.eflags ^= EFLG_CF;
2562 break;
2564 case 0xf8: /* clc */
2565 _regs.eflags &= ~EFLG_CF;
2566 break;
2568 case 0xf9: /* stc */
2569 _regs.eflags |= EFLG_CF;
2570 break;
2572 case 0xfa: /* cli */
2573 case 0xfb: /* sti */
2574 generate_exception_if(!mode_iopl(), EXC_GP);
2575 fail_if(ops->write_rflags == NULL);
2576 _regs.eflags &= ~EFLG_IF;
2577 if ( b == 0xfb ) /* sti */
2578 _regs.eflags |= EFLG_IF;
2579 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2580 goto done;
2581 break;
2583 case 0xfc: /* cld */
2584 _regs.eflags &= ~EFLG_DF;
2585 break;
2587 case 0xfd: /* std */
2588 _regs.eflags |= EFLG_DF;
2589 break;
2591 goto writeback;
2593 twobyte_insn:
2594 switch ( b )
2596 case 0x40 ... 0x4f: /* cmovcc */
2597 dst.val = src.val;
2598 if ( !test_cc(b, _regs.eflags) )
2599 dst.type = OP_NONE;
2600 break;
2602 case 0x90 ... 0x9f: /* setcc */
2603 dst.val = test_cc(b, _regs.eflags);
2604 break;
2606 case 0xb0 ... 0xb1: /* cmpxchg */
2607 /* Save real source value, then compare EAX against destination. */
2608 src.orig_val = src.val;
2609 src.val = _regs.eax;
2610 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2611 /* Always write back. The question is: where to? */
2612 d |= Mov;
2613 if ( _regs.eflags & EFLG_ZF )
2615 /* Success: write back to memory. */
2616 dst.val = src.orig_val;
2618 else
2620 /* Failure: write the value we saw to EAX. */
2621 dst.type = OP_REG;
2622 dst.reg = (unsigned long *)&_regs.eax;
2624 break;
2626 case 0xa3: bt: /* bt */
2627 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2628 break;
2630 case 0xa4: /* shld imm8,r,r/m */
2631 case 0xa5: /* shld %%cl,r,r/m */
2632 case 0xac: /* shrd imm8,r,r/m */
2633 case 0xad: /* shrd %%cl,r,r/m */ {
2634 uint8_t shift, width = dst.bytes << 3;
2635 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
2636 if ( (shift &= width - 1) == 0 )
2637 break;
2638 dst.orig_val = truncate_word(dst.val, dst.bytes);
2639 dst.val = ((shift == width) ? src.val :
2640 (b & 8) ?
2641 /* shrd */
2642 ((dst.orig_val >> shift) |
2643 truncate_word(src.val << (width - shift), dst.bytes)) :
2644 /* shld */
2645 ((dst.orig_val << shift) |
2646 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
2647 dst.val = truncate_word(dst.val, dst.bytes);
2648 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
2649 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
2650 _regs.eflags |= EFLG_CF;
2651 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
2652 _regs.eflags |= EFLG_OF;
2653 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
2654 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
2655 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
2656 break;
2659 case 0xb3: btr: /* btr */
2660 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
2661 break;
2663 case 0xab: bts: /* bts */
2664 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
2665 break;
2667 case 0xaf: /* imul */
2668 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
2669 switch ( dst.bytes )
2671 case 2:
2672 dst.val = ((uint32_t)(int16_t)src.val *
2673 (uint32_t)(int16_t)dst.val);
2674 if ( (int16_t)dst.val != (uint32_t)dst.val )
2675 _regs.eflags |= EFLG_OF|EFLG_CF;
2676 break;
2677 #ifdef __x86_64__
2678 case 4:
2679 dst.val = ((uint64_t)(int32_t)src.val *
2680 (uint64_t)(int32_t)dst.val);
2681 if ( (int32_t)dst.val != dst.val )
2682 _regs.eflags |= EFLG_OF|EFLG_CF;
2683 break;
2684 #endif
2685 default: {
2686 unsigned long m[2] = { src.val, dst.val };
2687 if ( imul_dbl(m) )
2688 _regs.eflags |= EFLG_OF|EFLG_CF;
2689 dst.val = m[0];
2690 break;
2693 break;
2695 case 0xb2: /* lss */
2696 dst.val = x86_seg_ss;
2697 goto les;
2699 case 0xb4: /* lfs */
2700 dst.val = x86_seg_fs;
2701 goto les;
2703 case 0xb5: /* lgs */
2704 dst.val = x86_seg_gs;
2705 goto les;
2707 case 0xb6: /* movzx rm8,r{16,32,64} */
2708 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2709 dst.reg = decode_register(modrm_reg, &_regs, 0);
2710 dst.bytes = op_bytes;
2711 dst.val = (uint8_t)src.val;
2712 break;
2714 case 0xbc: /* bsf */ {
2715 int zf;
2716 asm ( "bsf %2,%0; setz %b1"
2717 : "=r" (dst.val), "=q" (zf)
2718 : "r" (src.val), "1" (0) );
2719 _regs.eflags &= ~EFLG_ZF;
2720 _regs.eflags |= zf ? EFLG_ZF : 0;
2721 break;
2724 case 0xbd: /* bsr */ {
2725 int zf;
2726 asm ( "bsr %2,%0; setz %b1"
2727 : "=r" (dst.val), "=q" (zf)
2728 : "r" (src.val), "1" (0) );
2729 _regs.eflags &= ~EFLG_ZF;
2730 _regs.eflags |= zf ? EFLG_ZF : 0;
2731 break;
2734 case 0xb7: /* movzx rm16,r{16,32,64} */
2735 dst.val = (uint16_t)src.val;
2736 break;
2738 case 0xbb: btc: /* btc */
2739 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
2740 break;
2742 case 0xba: /* Grp8 */
2743 switch ( modrm_reg & 7 )
2745 case 4: goto bt;
2746 case 5: goto bts;
2747 case 6: goto btr;
2748 case 7: goto btc;
2749 default: generate_exception_if(1, EXC_UD);
2751 break;
2753 case 0xbe: /* movsx rm8,r{16,32,64} */
2754 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2755 dst.reg = decode_register(modrm_reg, &_regs, 0);
2756 dst.bytes = op_bytes;
2757 dst.val = (int8_t)src.val;
2758 break;
2760 case 0xbf: /* movsx rm16,r{16,32,64} */
2761 dst.val = (int16_t)src.val;
2762 break;
2764 case 0xc0 ... 0xc1: /* xadd */
2765 /* Write back the register source. */
2766 switch ( dst.bytes )
2768 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2769 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2770 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2771 case 8: *src.reg = dst.val; break;
2773 goto add;
2775 goto writeback;
2777 twobyte_special_insn:
2778 switch ( b )
2780 case 0x01: /* Grp7 */ {
2781 struct segment_register reg;
2782 unsigned long base, limit, cr0, cr0w;
2784 switch ( modrm_reg & 7 )
2786 case 0: /* sgdt */
2787 case 1: /* sidt */
2788 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2789 fail_if(ops->read_segment == NULL);
2790 if ( (rc = ops->read_segment((modrm_reg & 1) ?
2791 x86_seg_idtr : x86_seg_gdtr,
2792 &reg, ctxt)) )
2793 goto done;
2794 if ( op_bytes == 2 )
2795 reg.base &= 0xffffff;
2796 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
2797 reg.limit, 2, ctxt)) ||
2798 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
2799 reg.base, mode_64bit() ? 8 : 4, ctxt)) )
2800 goto done;
2801 break;
2802 case 2: /* lgdt */
2803 case 3: /* lidt */
2804 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2805 fail_if(ops->write_segment == NULL);
2806 memset(&reg, 0, sizeof(reg));
2807 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0,
2808 &limit, 2, ctxt)) ||
2809 (rc = ops->read(ea.mem.seg, ea.mem.off+2,
2810 &base, mode_64bit() ? 8 : 4, ctxt)) )
2811 goto done;
2812 reg.base = base;
2813 reg.limit = limit;
2814 if ( op_bytes == 2 )
2815 reg.base &= 0xffffff;
2816 if ( (rc = ops->write_segment((modrm_reg & 1) ?
2817 x86_seg_idtr : x86_seg_gdtr,
2818 &reg, ctxt)) )
2819 goto done;
2820 break;
2821 case 4: /* smsw */
2822 ea.bytes = 2;
2823 dst = ea;
2824 fail_if(ops->read_cr == NULL);
2825 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
2826 goto done;
2827 d |= Mov; /* force writeback */
2828 break;
2829 case 6: /* lmsw */
2830 fail_if(ops->read_cr == NULL);
2831 fail_if(ops->write_cr == NULL);
2832 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
2833 goto done;
2834 if ( ea.type == OP_REG )
2835 cr0w = *ea.reg;
2836 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
2837 &cr0w, 2, ctxt)) )
2838 goto done;
2839 cr0 &= 0xffff0000;
2840 cr0 |= (uint16_t)cr0w;
2841 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
2842 goto done;
2843 break;
2844 default:
2845 goto cannot_emulate;
2847 break;
2850 case 0x06: /* clts */
2851 generate_exception_if(!mode_ring0(), EXC_GP);
2852 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
2853 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
2854 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
2855 goto done;
2856 break;
2858 case 0x08: /* invd */
2859 case 0x09: /* wbinvd */
2860 generate_exception_if(!mode_ring0(), EXC_GP);
2861 fail_if(ops->wbinvd == NULL);
2862 if ( (rc = ops->wbinvd(ctxt)) != 0 )
2863 goto done;
2864 break;
2866 case 0x0d: /* GrpP (prefetch) */
2867 case 0x18: /* Grp16 (prefetch/nop) */
2868 case 0x19 ... 0x1f: /* nop (amd-defined) */
2869 break;
2871 case 0x20: /* mov cr,reg */
2872 case 0x21: /* mov dr,reg */
2873 case 0x22: /* mov reg,cr */
2874 case 0x23: /* mov reg,dr */
2875 generate_exception_if(!mode_ring0(), EXC_GP);
2876 modrm_rm |= (rex_prefix & 1) << 3;
2877 modrm_reg |= lock_prefix << 3;
2878 if ( b & 2 )
2880 /* Write to CR/DR. */
2881 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
2882 if ( !mode_64bit() )
2883 src.val = (uint32_t)src.val;
2884 rc = ((b & 1)
2885 ? (ops->write_dr
2886 ? ops->write_dr(modrm_reg, src.val, ctxt)
2887 : X86EMUL_UNHANDLEABLE)
2888 : (ops->write_cr
2889 ? ops->write_cr(modrm_reg, src.val, ctxt)
2890 : X86EMUL_UNHANDLEABLE));
2892 else
2894 /* Read from CR/DR. */
2895 dst.type = OP_REG;
2896 dst.bytes = mode_64bit() ? 8 : 4;
2897 dst.reg = decode_register(modrm_rm, &_regs, 0);
2898 rc = ((b & 1)
2899 ? (ops->read_dr
2900 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2901 : X86EMUL_UNHANDLEABLE)
2902 : (ops->read_cr
2903 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
2904 : X86EMUL_UNHANDLEABLE));
2906 if ( rc != 0 )
2907 goto done;
2908 break;
2910 case 0x30: /* wrmsr */ {
2911 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
2912 generate_exception_if(!mode_ring0(), EXC_GP);
2913 fail_if(ops->write_msr == NULL);
2914 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
2915 goto done;
2916 break;
2919 case 0x31: /* rdtsc */ {
2920 unsigned long cr4;
2921 uint64_t val;
2922 fail_if(ops->read_cr == NULL);
2923 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
2924 goto done;
2925 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP);
2926 fail_if(ops->read_msr == NULL);
2927 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
2928 goto done;
2929 _regs.edx = (uint32_t)(val >> 32);
2930 _regs.eax = (uint32_t)(val >> 0);
2931 break;
2934 case 0x32: /* rdmsr */ {
2935 uint64_t val;
2936 generate_exception_if(!mode_ring0(), EXC_GP);
2937 fail_if(ops->read_msr == NULL);
2938 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
2939 goto done;
2940 _regs.edx = (uint32_t)(val >> 32);
2941 _regs.eax = (uint32_t)(val >> 0);
2942 break;
2945 case 0x80 ... 0x8f: /* jcc (near) */ {
2946 int rel = (((op_bytes == 2) && !mode_64bit())
2947 ? (int32_t)insn_fetch_type(int16_t)
2948 : insn_fetch_type(int32_t));
2949 if ( test_cc(b, _regs.eflags) )
2950 jmp_rel(rel);
2951 break;
2954 case 0xa0: /* push %%fs */
2955 src.val = x86_seg_fs;
2956 goto push_seg;
2958 case 0xa1: /* pop %%fs */
2959 src.val = x86_seg_fs;
2960 goto pop_seg;
2962 case 0xa2: /* cpuid */ {
2963 unsigned int eax = _regs.eax, ebx = _regs.ebx;
2964 unsigned int ecx = _regs.ecx, edx = _regs.edx;
2965 fail_if(ops->cpuid == NULL);
2966 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
2967 goto done;
2968 _regs.eax = eax; _regs.ebx = ebx;
2969 _regs.ecx = ecx; _regs.edx = edx;
2970 break;
2973 case 0xa8: /* push %%gs */
2974 src.val = x86_seg_gs;
2975 goto push_seg;
2977 case 0xa9: /* pop %%gs */
2978 src.val = x86_seg_gs;
2979 goto pop_seg;
2981 case 0xc7: /* Grp9 (cmpxchg8b) */
2982 #if defined(__i386__)
2984 unsigned long old_lo, old_hi;
2985 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2986 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2987 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
2988 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
2989 goto done;
2990 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
2992 _regs.eax = old_lo;
2993 _regs.edx = old_hi;
2994 _regs.eflags &= ~EFLG_ZF;
2996 else if ( ops->cmpxchg8b == NULL )
2998 rc = X86EMUL_UNHANDLEABLE;
2999 goto done;
3001 else
3003 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
3004 _regs.ebx, _regs.ecx, ctxt)) != 0 )
3005 goto done;
3006 _regs.eflags |= EFLG_ZF;
3008 break;
3010 #elif defined(__x86_64__)
3012 unsigned long old, new;
3013 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
3014 generate_exception_if(ea.type != OP_MEM, EXC_UD);
3015 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
3016 goto done;
3017 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
3018 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
3020 _regs.eax = (uint32_t)(old>>0);
3021 _regs.edx = (uint32_t)(old>>32);
3022 _regs.eflags &= ~EFLG_ZF;
3024 else
3026 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
3027 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3028 new, 8, ctxt)) != 0 )
3029 goto done;
3030 _regs.eflags |= EFLG_ZF;
3032 break;
3034 #endif
3036 case 0xc8 ... 0xcf: /* bswap */
3037 dst.type = OP_REG;
3038 dst.reg = decode_register(
3039 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3040 switch ( dst.bytes = op_bytes )
3042 default: /* case 2: */
3043 /* Undefined behaviour. Writes zero on all tested CPUs. */
3044 dst.val = 0;
3045 break;
3046 case 4:
3047 #ifdef __x86_64__
3048 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3049 break;
3050 case 8:
3051 #endif
3052 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3053 break;
3055 break;
3057 goto writeback;
3059 cannot_emulate:
3060 #if 0
3061 gdprintk(XENLOG_DEBUG, "Instr:");
3062 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
3064 unsigned long x;
3065 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
3066 printk(" %02x", (uint8_t)x);
3068 printk("\n");
3069 #endif
3070 return X86EMUL_UNHANDLEABLE;