ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 16696:b5b3e27f1af3

x86_emulate: Correct RIP-relative addressing offset for SHLD/SHRD with
immediate byte third operand.
Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Wed Jan 09 10:11:31 2008 +0000 (2008-01-09)
parents 4c1a0d2a318d
children 7e400607cdd8
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <public/xen.h>
28 #else
29 #include <xen/config.h>
30 #include <xen/types.h>
31 #include <xen/lib.h>
32 #include <asm/regs.h>
33 #undef cmpxchg
34 #endif
35 #include <asm-x86/x86_emulate.h>
37 /* Operand sizes: 8-bit operands or specified/overridden size. */
38 #define ByteOp (1<<0) /* 8-bit operands. */
39 /* Destination operand type. */
40 #define DstBitBase (0<<1) /* Memory operand, bit string. */
41 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
42 #define DstReg (2<<1) /* Register operand. */
43 #define DstMem (3<<1) /* Memory operand. */
44 #define DstMask (3<<1)
45 /* Source operand type. */
46 #define SrcNone (0<<3) /* No source operand. */
47 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
48 #define SrcReg (1<<3) /* Register operand. */
49 #define SrcMem (2<<3) /* Memory operand. */
50 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
51 #define SrcImm (4<<3) /* Immediate operand. */
52 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
53 #define SrcMask (7<<3)
54 /* Generic ModRM decode. */
55 #define ModRM (1<<6)
56 /* Destination is only written; never read. */
57 #define Mov (1<<7)
59 static uint8_t opcode_table[256] = {
60 /* 0x00 - 0x07 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
64 /* 0x08 - 0x0F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
68 /* 0x10 - 0x17 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
72 /* 0x18 - 0x1F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
76 /* 0x20 - 0x27 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
80 /* 0x28 - 0x2F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
84 /* 0x30 - 0x37 */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
88 /* 0x38 - 0x3F */
89 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
90 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
91 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
92 /* 0x40 - 0x4F */
93 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 /* 0x50 - 0x5F */
98 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 /* 0x60 - 0x67 */
103 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
104 0, 0, 0, 0,
105 /* 0x68 - 0x6F */
106 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
107 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
108 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
109 /* 0x70 - 0x77 */
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 /* 0x78 - 0x7F */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x80 - 0x87 */
116 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
118 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 /* 0x88 - 0x8F */
121 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
122 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
123 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
124 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
125 /* 0x90 - 0x97 */
126 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
128 /* 0x98 - 0x9F */
129 ImplicitOps, ImplicitOps, ImplicitOps, 0,
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 /* 0xA0 - 0xA7 */
132 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
133 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
134 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
135 ByteOp|ImplicitOps, ImplicitOps,
136 /* 0xA8 - 0xAF */
137 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
138 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
139 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
140 ByteOp|ImplicitOps, ImplicitOps,
141 /* 0xB0 - 0xB7 */
142 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
143 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
144 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
145 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
146 /* 0xB8 - 0xBF */
147 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
148 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
149 /* 0xC0 - 0xC7 */
150 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
151 ImplicitOps, ImplicitOps,
152 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
153 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
154 /* 0xC8 - 0xCF */
155 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
156 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
157 /* 0xD0 - 0xD7 */
158 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
159 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
160 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
161 /* 0xD8 - 0xDF */
162 0, 0, 0, 0, 0, 0, 0, 0,
163 /* 0xE0 - 0xE7 */
164 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
165 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
166 /* 0xE8 - 0xEF */
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
169 /* 0xF0 - 0xF7 */
170 0, ImplicitOps, 0, 0,
171 ImplicitOps, ImplicitOps,
172 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
173 /* 0xF8 - 0xFF */
174 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
175 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
176 };
178 static uint8_t twobyte_table[256] = {
179 /* 0x00 - 0x07 */
180 0, ImplicitOps|ModRM, 0, 0, 0, ImplicitOps, 0, 0,
181 /* 0x08 - 0x0F */
182 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
183 /* 0x10 - 0x17 */
184 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0x18 - 0x1F */
186 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
187 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
188 /* 0x20 - 0x27 */
189 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
190 0, 0, 0, 0,
191 /* 0x28 - 0x2F */
192 0, 0, 0, 0, 0, 0, 0, 0,
193 /* 0x30 - 0x37 */
194 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
195 /* 0x38 - 0x3F */
196 0, 0, 0, 0, 0, 0, 0, 0,
197 /* 0x40 - 0x47 */
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
202 /* 0x48 - 0x4F */
203 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
204 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
205 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
206 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
207 /* 0x50 - 0x5F */
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 /* 0x60 - 0x6F */
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 /* 0x70 - 0x7F */
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 /* 0x80 - 0x87 */
214 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
215 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
216 /* 0x88 - 0x8F */
217 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
218 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
219 /* 0x90 - 0x97 */
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
224 /* 0x98 - 0x9F */
225 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
226 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
227 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
228 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
229 /* 0xA0 - 0xA7 */
230 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
231 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
232 /* 0xA8 - 0xAF */
233 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
234 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
235 /* 0xB0 - 0xB7 */
236 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
237 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
238 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
239 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
240 /* 0xB8 - 0xBF */
241 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
242 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
243 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
244 /* 0xC0 - 0xC7 */
245 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
246 0, 0, 0, ImplicitOps|ModRM,
247 /* 0xC8 - 0xCF */
248 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
249 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
250 /* 0xD0 - 0xDF */
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
252 /* 0xE0 - 0xEF */
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
254 /* 0xF0 - 0xFF */
255 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
256 };
258 /* Type, address-of, and value of an instruction's operand. */
259 struct operand {
260 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
261 unsigned int bytes;
262 unsigned long val, orig_val;
263 union {
264 /* OP_REG: Pointer to register field. */
265 unsigned long *reg;
266 /* OP_MEM: Segment and offset. */
267 struct {
268 enum x86_segment seg;
269 unsigned long off;
270 } mem;
271 };
272 };
274 /* MSRs. */
275 #define MSR_TSC 0x10
277 /* Control register flags. */
278 #define CR0_PE (1<<0)
279 #define CR4_TSD (1<<2)
281 /* EFLAGS bit definitions. */
282 #define EFLG_VIP (1<<20)
283 #define EFLG_VIF (1<<19)
284 #define EFLG_AC (1<<18)
285 #define EFLG_VM (1<<17)
286 #define EFLG_RF (1<<16)
287 #define EFLG_NT (1<<14)
288 #define EFLG_IOPL (3<<12)
289 #define EFLG_OF (1<<11)
290 #define EFLG_DF (1<<10)
291 #define EFLG_IF (1<<9)
292 #define EFLG_TF (1<<8)
293 #define EFLG_SF (1<<7)
294 #define EFLG_ZF (1<<6)
295 #define EFLG_AF (1<<4)
296 #define EFLG_PF (1<<2)
297 #define EFLG_CF (1<<0)
299 /* Exception definitions. */
300 #define EXC_DE 0
301 #define EXC_DB 1
302 #define EXC_BP 3
303 #define EXC_OF 4
304 #define EXC_BR 5
305 #define EXC_UD 6
306 #define EXC_GP 13
308 /*
309 * Instruction emulation:
310 * Most instructions are emulated directly via a fragment of inline assembly
311 * code. This allows us to save/restore EFLAGS and thus very easily pick up
312 * any modified flags.
313 */
315 #if defined(__x86_64__)
316 #define _LO32 "k" /* force 32-bit operand */
317 #define _STK "%%rsp" /* stack pointer */
318 #define _BYTES_PER_LONG "8"
319 #elif defined(__i386__)
320 #define _LO32 "" /* force 32-bit operand */
321 #define _STK "%%esp" /* stack pointer */
322 #define _BYTES_PER_LONG "4"
323 #endif
325 /*
326 * These EFLAGS bits are restored from saved value during emulation, and
327 * any changes are written back to the saved value after emulation.
328 */
329 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
331 /* Before executing instruction: restore necessary bits in EFLAGS. */
332 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
333 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
334 "movl %"_sav",%"_LO32 _tmp"; " \
335 "push %"_tmp"; " \
336 "push %"_tmp"; " \
337 "movl %"_msk",%"_LO32 _tmp"; " \
338 "andl %"_LO32 _tmp",("_STK"); " \
339 "pushf; " \
340 "notl %"_LO32 _tmp"; " \
341 "andl %"_LO32 _tmp",("_STK"); " \
342 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
343 "pop %"_tmp"; " \
344 "orl %"_LO32 _tmp",("_STK"); " \
345 "popf; " \
346 "pop %"_sav"; "
348 /* After executing instruction: write-back necessary bits in EFLAGS. */
349 #define _POST_EFLAGS(_sav, _msk, _tmp) \
350 /* _sav |= EFLAGS & _msk; */ \
351 "pushf; " \
352 "pop %"_tmp"; " \
353 "andl %"_msk",%"_LO32 _tmp"; " \
354 "orl %"_LO32 _tmp",%"_sav"; "
356 /* Raw emulation: instruction has two explicit operands. */
357 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
358 do{ unsigned long _tmp; \
359 switch ( (_dst).bytes ) \
360 { \
361 case 2: \
362 asm volatile ( \
363 _PRE_EFLAGS("0","4","2") \
364 _op"w %"_wx"3,%1; " \
365 _POST_EFLAGS("0","4","2") \
366 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
367 : _wy ((_src).val), "i" (EFLAGS_MASK), \
368 "m" (_eflags), "m" ((_dst).val) ); \
369 break; \
370 case 4: \
371 asm volatile ( \
372 _PRE_EFLAGS("0","4","2") \
373 _op"l %"_lx"3,%1; " \
374 _POST_EFLAGS("0","4","2") \
375 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
376 : _ly ((_src).val), "i" (EFLAGS_MASK), \
377 "m" (_eflags), "m" ((_dst).val) ); \
378 break; \
379 case 8: \
380 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
381 break; \
382 } \
383 } while (0)
384 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
385 do{ unsigned long _tmp; \
386 switch ( (_dst).bytes ) \
387 { \
388 case 1: \
389 asm volatile ( \
390 _PRE_EFLAGS("0","4","2") \
391 _op"b %"_bx"3,%1; " \
392 _POST_EFLAGS("0","4","2") \
393 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
394 : _by ((_src).val), "i" (EFLAGS_MASK), \
395 "m" (_eflags), "m" ((_dst).val) ); \
396 break; \
397 default: \
398 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
399 break; \
400 } \
401 } while (0)
402 /* Source operand is byte-sized and may be restricted to just %cl. */
403 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
404 __emulate_2op(_op, _src, _dst, _eflags, \
405 "b", "c", "b", "c", "b", "c", "b", "c")
406 /* Source operand is byte, word, long or quad sized. */
407 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
408 __emulate_2op(_op, _src, _dst, _eflags, \
409 "b", "q", "w", "r", _LO32, "r", "", "r")
410 /* Source operand is word, long or quad sized. */
411 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
412 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
413 "w", "r", _LO32, "r", "", "r")
415 /* Instruction has only one explicit operand (no source operand). */
416 #define emulate_1op(_op,_dst,_eflags) \
417 do{ unsigned long _tmp; \
418 switch ( (_dst).bytes ) \
419 { \
420 case 1: \
421 asm volatile ( \
422 _PRE_EFLAGS("0","3","2") \
423 _op"b %1; " \
424 _POST_EFLAGS("0","3","2") \
425 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
426 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
427 break; \
428 case 2: \
429 asm volatile ( \
430 _PRE_EFLAGS("0","3","2") \
431 _op"w %1; " \
432 _POST_EFLAGS("0","3","2") \
433 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
434 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
435 break; \
436 case 4: \
437 asm volatile ( \
438 _PRE_EFLAGS("0","3","2") \
439 _op"l %1; " \
440 _POST_EFLAGS("0","3","2") \
441 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
442 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
443 break; \
444 case 8: \
445 __emulate_1op_8byte(_op, _dst, _eflags); \
446 break; \
447 } \
448 } while (0)
450 /* Emulate an instruction with quadword operands (x86/64 only). */
451 #if defined(__x86_64__)
452 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
453 do{ asm volatile ( \
454 _PRE_EFLAGS("0","4","2") \
455 _op"q %"_qx"3,%1; " \
456 _POST_EFLAGS("0","4","2") \
457 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
458 : _qy ((_src).val), "i" (EFLAGS_MASK), \
459 "m" (_eflags), "m" ((_dst).val) ); \
460 } while (0)
461 #define __emulate_1op_8byte(_op, _dst, _eflags) \
462 do{ asm volatile ( \
463 _PRE_EFLAGS("0","3","2") \
464 _op"q %1; " \
465 _POST_EFLAGS("0","3","2") \
466 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
467 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
468 } while (0)
469 #elif defined(__i386__)
470 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
471 #define __emulate_1op_8byte(_op, _dst, _eflags)
472 #endif /* __i386__ */
474 /* Fetch next part of the instruction being emulated. */
475 #define insn_fetch_bytes(_size) \
476 ({ unsigned long _x, _eip = _regs.eip; \
477 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
478 _regs.eip += (_size); /* real hardware doesn't truncate */ \
479 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
480 EXC_GP); \
481 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
482 if ( rc ) goto done; \
483 _x; \
484 })
485 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
487 #define truncate_word(ea, byte_width) \
488 ({ unsigned long __ea = (ea); \
489 unsigned int _width = (byte_width); \
490 ((_width == sizeof(unsigned long)) ? __ea : \
491 (__ea & ((1UL << (_width << 3)) - 1))); \
492 })
493 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
495 #define mode_64bit() (def_ad_bytes == 8)
497 #define fail_if(p) \
498 do { \
499 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
500 if ( rc ) goto done; \
501 } while (0)
503 #define generate_exception_if(p, e) \
504 ({ if ( (p) ) { \
505 fail_if(ops->inject_hw_exception == NULL); \
506 rc = ops->inject_hw_exception(e, ctxt) ? : X86EMUL_EXCEPTION; \
507 goto done; \
508 } \
509 })
511 /*
512 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
513 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
514 */
515 static int even_parity(uint8_t v)
516 {
517 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
518 return v;
519 }
521 /* Update address held in a register, based on addressing mode. */
522 #define _register_address_increment(reg, inc, byte_width) \
523 do { \
524 int _inc = (inc); /* signed type ensures sign extension to long */ \
525 unsigned int _width = (byte_width); \
526 if ( _width == sizeof(unsigned long) ) \
527 (reg) += _inc; \
528 else if ( mode_64bit() ) \
529 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
530 else \
531 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
532 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
533 } while (0)
534 #define register_address_increment(reg, inc) \
535 _register_address_increment((reg), (inc), ad_bytes)
537 #define sp_pre_dec(dec) ({ \
538 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
539 truncate_word(_regs.esp, ctxt->sp_size/8); \
540 })
541 #define sp_post_inc(inc) ({ \
542 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
543 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
544 __esp; \
545 })
547 #define jmp_rel(rel) \
548 do { \
549 _regs.eip += (int)(rel); \
550 if ( !mode_64bit() ) \
551 _regs.eip = ((op_bytes == 2) \
552 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
553 } while (0)
555 static int __handle_rep_prefix(
556 struct cpu_user_regs *int_regs,
557 struct cpu_user_regs *ext_regs,
558 int ad_bytes)
559 {
560 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
561 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
562 int_regs->ecx);
564 if ( ecx-- == 0 )
565 {
566 ext_regs->eip = int_regs->eip;
567 return 1;
568 }
570 if ( ad_bytes == 2 )
571 *(uint16_t *)&int_regs->ecx = ecx;
572 else if ( ad_bytes == 4 )
573 int_regs->ecx = (uint32_t)ecx;
574 else
575 int_regs->ecx = ecx;
576 int_regs->eip = ext_regs->eip;
577 return 0;
578 }
580 #define handle_rep_prefix() \
581 do { \
582 if ( rep_prefix && __handle_rep_prefix(&_regs, ctxt->regs, ad_bytes) ) \
583 goto done; \
584 } while (0)
586 /*
587 * Unsigned multiplication with double-word result.
588 * IN: Multiplicand=m[0], Multiplier=m[1]
589 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
590 */
591 static int mul_dbl(unsigned long m[2])
592 {
593 int rc;
594 asm ( "mul %4; seto %b2"
595 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
596 : "0" (m[0]), "1" (m[1]), "2" (0) );
597 return rc;
598 }
600 /*
601 * Signed multiplication with double-word result.
602 * IN: Multiplicand=m[0], Multiplier=m[1]
603 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
604 */
605 static int imul_dbl(unsigned long m[2])
606 {
607 int rc;
608 asm ( "imul %4; seto %b2"
609 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
610 : "0" (m[0]), "1" (m[1]), "2" (0) );
611 return rc;
612 }
614 /*
615 * Unsigned division of double-word dividend.
616 * IN: Dividend=u[1]:u[0], Divisor=v
617 * OUT: Return 1: #DE
618 * Return 0: Quotient=u[0], Remainder=u[1]
619 */
620 static int div_dbl(unsigned long u[2], unsigned long v)
621 {
622 if ( (v == 0) || (u[1] >= v) )
623 return 1;
624 asm ( "div %4"
625 : "=a" (u[0]), "=d" (u[1])
626 : "0" (u[0]), "1" (u[1]), "r" (v) );
627 return 0;
628 }
630 /*
631 * Signed division of double-word dividend.
632 * IN: Dividend=u[1]:u[0], Divisor=v
633 * OUT: Return 1: #DE
634 * Return 0: Quotient=u[0], Remainder=u[1]
635 * NB. We don't use idiv directly as it's moderately hard to work out
636 * ahead of time whether it will #DE, which we cannot allow to happen.
637 */
638 static int idiv_dbl(unsigned long u[2], unsigned long v)
639 {
640 int negu = (long)u[1] < 0, negv = (long)v < 0;
642 /* u = abs(u) */
643 if ( negu )
644 {
645 u[1] = ~u[1];
646 if ( (u[0] = -u[0]) == 0 )
647 u[1]++;
648 }
650 /* abs(u) / abs(v) */
651 if ( div_dbl(u, negv ? -v : v) )
652 return 1;
654 /* Remainder has same sign as dividend. It cannot overflow. */
655 if ( negu )
656 u[1] = -u[1];
658 /* Quotient is overflowed if sign bit is set. */
659 if ( negu ^ negv )
660 {
661 if ( (long)u[0] >= 0 )
662 u[0] = -u[0];
663 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
664 return 1;
665 }
666 else if ( (long)u[0] < 0 )
667 return 1;
669 return 0;
670 }
672 static int
673 test_cc(
674 unsigned int condition, unsigned int flags)
675 {
676 int rc = 0;
678 switch ( (condition & 15) >> 1 )
679 {
680 case 0: /* o */
681 rc |= (flags & EFLG_OF);
682 break;
683 case 1: /* b/c/nae */
684 rc |= (flags & EFLG_CF);
685 break;
686 case 2: /* z/e */
687 rc |= (flags & EFLG_ZF);
688 break;
689 case 3: /* be/na */
690 rc |= (flags & (EFLG_CF|EFLG_ZF));
691 break;
692 case 4: /* s */
693 rc |= (flags & EFLG_SF);
694 break;
695 case 5: /* p/pe */
696 rc |= (flags & EFLG_PF);
697 break;
698 case 7: /* le/ng */
699 rc |= (flags & EFLG_ZF);
700 /* fall through */
701 case 6: /* l/nge */
702 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
703 break;
704 }
706 /* Odd condition identifiers (lsb == 1) have inverted sense. */
707 return (!!rc ^ (condition & 1));
708 }
710 static int
711 get_cpl(
712 struct x86_emulate_ctxt *ctxt,
713 struct x86_emulate_ops *ops)
714 {
715 struct segment_register reg;
717 if ( ctxt->regs->eflags & EFLG_VM )
718 return 3;
720 if ( (ops->read_segment == NULL) ||
721 ops->read_segment(x86_seg_ss, &reg, ctxt) )
722 return -1;
724 return reg.attr.fields.dpl;
725 }
727 static int
728 _mode_iopl(
729 struct x86_emulate_ctxt *ctxt,
730 struct x86_emulate_ops *ops)
731 {
732 int cpl = get_cpl(ctxt, ops);
733 return ((cpl >= 0) && (cpl <= ((ctxt->regs->eflags >> 12) & 3)));
734 }
736 #define mode_ring0() (get_cpl(ctxt, ops) == 0)
737 #define mode_iopl() _mode_iopl(ctxt, ops)
739 static int
740 in_realmode(
741 struct x86_emulate_ctxt *ctxt,
742 struct x86_emulate_ops *ops)
743 {
744 unsigned long cr0;
745 int rc;
747 if ( ops->read_cr == NULL )
748 return 0;
750 rc = ops->read_cr(0, &cr0, ctxt);
751 return (!rc && !(cr0 & CR0_PE));
752 }
754 static int
755 load_seg(
756 enum x86_segment seg,
757 uint16_t sel,
758 struct x86_emulate_ctxt *ctxt,
759 struct x86_emulate_ops *ops)
760 {
761 struct segment_register reg;
762 int rc;
764 if ( !in_realmode(ctxt, ops) ||
765 (ops->read_segment == NULL) ||
766 (ops->write_segment == NULL) )
767 return X86EMUL_UNHANDLEABLE;
769 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
770 return rc;
772 reg.sel = sel;
773 reg.base = (uint32_t)sel << 4;
775 return ops->write_segment(seg, &reg, ctxt);
776 }
778 void *
779 decode_register(
780 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
781 {
782 void *p;
784 switch ( modrm_reg )
785 {
786 case 0: p = &regs->eax; break;
787 case 1: p = &regs->ecx; break;
788 case 2: p = &regs->edx; break;
789 case 3: p = &regs->ebx; break;
790 case 4: p = (highbyte_regs ?
791 ((unsigned char *)&regs->eax + 1) :
792 (unsigned char *)&regs->esp); break;
793 case 5: p = (highbyte_regs ?
794 ((unsigned char *)&regs->ecx + 1) :
795 (unsigned char *)&regs->ebp); break;
796 case 6: p = (highbyte_regs ?
797 ((unsigned char *)&regs->edx + 1) :
798 (unsigned char *)&regs->esi); break;
799 case 7: p = (highbyte_regs ?
800 ((unsigned char *)&regs->ebx + 1) :
801 (unsigned char *)&regs->edi); break;
802 #if defined(__x86_64__)
803 case 8: p = &regs->r8; break;
804 case 9: p = &regs->r9; break;
805 case 10: p = &regs->r10; break;
806 case 11: p = &regs->r11; break;
807 case 12: p = &regs->r12; break;
808 case 13: p = &regs->r13; break;
809 case 14: p = &regs->r14; break;
810 case 15: p = &regs->r15; break;
811 #endif
812 default: p = NULL; break;
813 }
815 return p;
816 }
818 #define decode_segment_failed x86_seg_tr
819 enum x86_segment
820 decode_segment(
821 uint8_t modrm_reg)
822 {
823 switch ( modrm_reg )
824 {
825 case 0: return x86_seg_es;
826 case 1: return x86_seg_cs;
827 case 2: return x86_seg_ss;
828 case 3: return x86_seg_ds;
829 case 4: return x86_seg_fs;
830 case 5: return x86_seg_gs;
831 default: break;
832 }
833 return decode_segment_failed;
834 }
836 int
837 x86_emulate(
838 struct x86_emulate_ctxt *ctxt,
839 struct x86_emulate_ops *ops)
840 {
841 /* Shadow copy of register state. Committed on successful emulation. */
842 struct cpu_user_regs _regs = *ctxt->regs;
844 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
845 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
846 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
847 #define REPE_PREFIX 1
848 #define REPNE_PREFIX 2
849 unsigned int lock_prefix = 0, rep_prefix = 0;
850 int override_seg = -1, rc = X86EMUL_OKAY;
851 struct operand src, dst;
853 /* Data operand effective address (usually computed from ModRM). */
854 struct operand ea;
856 /* Default is a memory operand relative to segment DS. */
857 ea.type = OP_MEM;
858 ea.mem.seg = x86_seg_ds;
859 ea.mem.off = 0;
861 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
862 if ( op_bytes == 8 )
863 {
864 op_bytes = def_op_bytes = 4;
865 #ifndef __x86_64__
866 return X86EMUL_UNHANDLEABLE;
867 #endif
868 }
870 /* Prefix bytes. */
871 for ( ; ; )
872 {
873 switch ( b = insn_fetch_type(uint8_t) )
874 {
875 case 0x66: /* operand-size override */
876 op_bytes = def_op_bytes ^ 6;
877 break;
878 case 0x67: /* address-size override */
879 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
880 break;
881 case 0x2e: /* CS override */
882 override_seg = x86_seg_cs;
883 break;
884 case 0x3e: /* DS override */
885 override_seg = x86_seg_ds;
886 break;
887 case 0x26: /* ES override */
888 override_seg = x86_seg_es;
889 break;
890 case 0x64: /* FS override */
891 override_seg = x86_seg_fs;
892 break;
893 case 0x65: /* GS override */
894 override_seg = x86_seg_gs;
895 break;
896 case 0x36: /* SS override */
897 override_seg = x86_seg_ss;
898 break;
899 case 0xf0: /* LOCK */
900 lock_prefix = 1;
901 break;
902 case 0xf2: /* REPNE/REPNZ */
903 rep_prefix = REPNE_PREFIX;
904 break;
905 case 0xf3: /* REP/REPE/REPZ */
906 rep_prefix = REPE_PREFIX;
907 break;
908 case 0x40 ... 0x4f: /* REX */
909 if ( !mode_64bit() )
910 goto done_prefixes;
911 rex_prefix = b;
912 continue;
913 default:
914 goto done_prefixes;
915 }
917 /* Any legacy prefix after a REX prefix nullifies its effect. */
918 rex_prefix = 0;
919 }
920 done_prefixes:
922 if ( rex_prefix & 8 ) /* REX.W */
923 op_bytes = 8;
925 /* Opcode byte(s). */
926 d = opcode_table[b];
927 if ( d == 0 )
928 {
929 /* Two-byte opcode? */
930 if ( b == 0x0f )
931 {
932 twobyte = 1;
933 b = insn_fetch_type(uint8_t);
934 d = twobyte_table[b];
935 }
937 /* Unrecognised? */
938 if ( d == 0 )
939 goto cannot_emulate;
940 }
942 /* Lock prefix is allowed only on RMW instructions. */
943 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
945 /* ModRM and SIB bytes. */
946 if ( d & ModRM )
947 {
948 modrm = insn_fetch_type(uint8_t);
949 modrm_mod = (modrm & 0xc0) >> 6;
950 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
951 modrm_rm = modrm & 0x07;
953 if ( modrm_mod == 3 )
954 {
955 modrm_rm |= (rex_prefix & 1) << 3;
956 ea.type = OP_REG;
957 ea.reg = decode_register(
958 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
959 }
960 else if ( ad_bytes == 2 )
961 {
962 /* 16-bit ModR/M decode. */
963 switch ( modrm_rm )
964 {
965 case 0:
966 ea.mem.off = _regs.ebx + _regs.esi;
967 break;
968 case 1:
969 ea.mem.off = _regs.ebx + _regs.edi;
970 break;
971 case 2:
972 ea.mem.seg = x86_seg_ss;
973 ea.mem.off = _regs.ebp + _regs.esi;
974 break;
975 case 3:
976 ea.mem.seg = x86_seg_ss;
977 ea.mem.off = _regs.ebp + _regs.edi;
978 break;
979 case 4:
980 ea.mem.off = _regs.esi;
981 break;
982 case 5:
983 ea.mem.off = _regs.edi;
984 break;
985 case 6:
986 if ( modrm_mod == 0 )
987 break;
988 ea.mem.seg = x86_seg_ss;
989 ea.mem.off = _regs.ebp;
990 break;
991 case 7:
992 ea.mem.off = _regs.ebx;
993 break;
994 }
995 switch ( modrm_mod )
996 {
997 case 0:
998 if ( modrm_rm == 6 )
999 ea.mem.off = insn_fetch_type(int16_t);
1000 break;
1001 case 1:
1002 ea.mem.off += insn_fetch_type(int8_t);
1003 break;
1004 case 2:
1005 ea.mem.off += insn_fetch_type(int16_t);
1006 break;
1008 ea.mem.off = truncate_ea(ea.mem.off);
1010 else
1012 /* 32/64-bit ModR/M decode. */
1013 if ( modrm_rm == 4 )
1015 sib = insn_fetch_type(uint8_t);
1016 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1017 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1018 if ( sib_index != 4 )
1019 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1020 ea.mem.off <<= (sib >> 6) & 3;
1021 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1022 ea.mem.off += insn_fetch_type(int32_t);
1023 else if ( sib_base == 4 )
1025 ea.mem.seg = x86_seg_ss;
1026 ea.mem.off += _regs.esp;
1027 if ( !twobyte && (b == 0x8f) )
1028 /* POP <rm> computes its EA post increment. */
1029 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1030 ? 8 : op_bytes);
1032 else if ( sib_base == 5 )
1034 ea.mem.seg = x86_seg_ss;
1035 ea.mem.off += _regs.ebp;
1037 else
1038 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1040 else
1042 modrm_rm |= (rex_prefix & 1) << 3;
1043 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1044 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1045 ea.mem.seg = x86_seg_ss;
1047 switch ( modrm_mod )
1049 case 0:
1050 if ( (modrm_rm & 7) != 5 )
1051 break;
1052 ea.mem.off = insn_fetch_type(int32_t);
1053 if ( !mode_64bit() )
1054 break;
1055 /* Relative to RIP of next instruction. Argh! */
1056 ea.mem.off += _regs.eip;
1057 if ( (d & SrcMask) == SrcImm )
1058 ea.mem.off += (d & ByteOp) ? 1 :
1059 ((op_bytes == 8) ? 4 : op_bytes);
1060 else if ( (d & SrcMask) == SrcImmByte )
1061 ea.mem.off += 1;
1062 else if ( ((b == 0xf6) || (b == 0xf7)) &&
1063 ((modrm_reg & 7) <= 1) )
1064 /* Special case in Grp3: test has immediate operand. */
1065 ea.mem.off += (d & ByteOp) ? 1
1066 : ((op_bytes == 8) ? 4 : op_bytes);
1067 else if ( (b == 0xf7) == 0xa4 )
1068 /* SHLD/SHRD with immediate byte third operand. */
1069 ea.mem.off++;
1070 break;
1071 case 1:
1072 ea.mem.off += insn_fetch_type(int8_t);
1073 break;
1074 case 2:
1075 ea.mem.off += insn_fetch_type(int32_t);
1076 break;
1078 ea.mem.off = truncate_ea(ea.mem.off);
1082 if ( override_seg != -1 )
1083 ea.mem.seg = override_seg;
1085 /* Special instructions do their own operand decoding. */
1086 if ( (d & DstMask) == ImplicitOps )
1087 goto special_insn;
1089 /* Decode and fetch the source operand: register, memory or immediate. */
1090 switch ( d & SrcMask )
1092 case SrcNone:
1093 break;
1094 case SrcReg:
1095 src.type = OP_REG;
1096 if ( d & ByteOp )
1098 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1099 src.val = *(uint8_t *)src.reg;
1100 src.bytes = 1;
1102 else
1104 src.reg = decode_register(modrm_reg, &_regs, 0);
1105 switch ( (src.bytes = op_bytes) )
1107 case 2: src.val = *(uint16_t *)src.reg; break;
1108 case 4: src.val = *(uint32_t *)src.reg; break;
1109 case 8: src.val = *(uint64_t *)src.reg; break;
1112 break;
1113 case SrcMem16:
1114 ea.bytes = 2;
1115 goto srcmem_common;
1116 case SrcMem:
1117 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1118 srcmem_common:
1119 src = ea;
1120 if ( src.type == OP_REG )
1122 switch ( src.bytes )
1124 case 1: src.val = *(uint8_t *)src.reg; break;
1125 case 2: src.val = *(uint16_t *)src.reg; break;
1126 case 4: src.val = *(uint32_t *)src.reg; break;
1127 case 8: src.val = *(uint64_t *)src.reg; break;
1130 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1131 &src.val, src.bytes, ctxt)) )
1132 goto done;
1133 break;
1134 case SrcImm:
1135 src.type = OP_IMM;
1136 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1137 if ( src.bytes == 8 ) src.bytes = 4;
1138 /* NB. Immediates are sign-extended as necessary. */
1139 switch ( src.bytes )
1141 case 1: src.val = insn_fetch_type(int8_t); break;
1142 case 2: src.val = insn_fetch_type(int16_t); break;
1143 case 4: src.val = insn_fetch_type(int32_t); break;
1145 break;
1146 case SrcImmByte:
1147 src.type = OP_IMM;
1148 src.bytes = 1;
1149 src.val = insn_fetch_type(int8_t);
1150 break;
1153 /* Decode and fetch the destination operand: register or memory. */
1154 switch ( d & DstMask )
1156 case DstReg:
1157 dst.type = OP_REG;
1158 if ( d & ByteOp )
1160 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1161 dst.val = *(uint8_t *)dst.reg;
1162 dst.bytes = 1;
1164 else
1166 dst.reg = decode_register(modrm_reg, &_regs, 0);
1167 switch ( (dst.bytes = op_bytes) )
1169 case 2: dst.val = *(uint16_t *)dst.reg; break;
1170 case 4: dst.val = *(uint32_t *)dst.reg; break;
1171 case 8: dst.val = *(uint64_t *)dst.reg; break;
1174 break;
1175 case DstBitBase:
1176 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1178 src.val &= (op_bytes << 3) - 1;
1180 else
1182 /*
1183 * EA += BitOffset DIV op_bytes*8
1184 * BitOffset = BitOffset MOD op_bytes*8
1185 * DIV truncates towards negative infinity.
1186 * MOD always produces a positive result.
1187 */
1188 if ( op_bytes == 2 )
1189 src.val = (int16_t)src.val;
1190 else if ( op_bytes == 4 )
1191 src.val = (int32_t)src.val;
1192 if ( (long)src.val < 0 )
1194 unsigned long byte_offset;
1195 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1196 ea.mem.off -= byte_offset;
1197 src.val = (byte_offset << 3) + src.val;
1199 else
1201 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1202 src.val &= (op_bytes << 3) - 1;
1205 /* Becomes a normal DstMem operation from here on. */
1206 d = (d & ~DstMask) | DstMem;
1207 case DstMem:
1208 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1209 dst = ea;
1210 if ( dst.type == OP_REG )
1212 switch ( dst.bytes )
1214 case 1: dst.val = *(uint8_t *)dst.reg; break;
1215 case 2: dst.val = *(uint16_t *)dst.reg; break;
1216 case 4: dst.val = *(uint32_t *)dst.reg; break;
1217 case 8: dst.val = *(uint64_t *)dst.reg; break;
1220 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1222 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1223 &dst.val, dst.bytes, ctxt)) )
1224 goto done;
1225 dst.orig_val = dst.val;
1227 break;
1230 /* LOCK prefix allowed only on instructions with memory destination. */
1231 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
1233 if ( twobyte )
1234 goto twobyte_insn;
1236 switch ( b )
1238 case 0x04 ... 0x05: /* add imm,%%eax */
1239 dst.reg = (unsigned long *)&_regs.eax;
1240 dst.val = _regs.eax;
1241 case 0x00 ... 0x03: add: /* add */
1242 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1243 break;
1245 case 0x0c ... 0x0d: /* or imm,%%eax */
1246 dst.reg = (unsigned long *)&_regs.eax;
1247 dst.val = _regs.eax;
1248 case 0x08 ... 0x0b: or: /* or */
1249 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1250 break;
1252 case 0x14 ... 0x15: /* adc imm,%%eax */
1253 dst.reg = (unsigned long *)&_regs.eax;
1254 dst.val = _regs.eax;
1255 case 0x10 ... 0x13: adc: /* adc */
1256 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1257 break;
1259 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1260 dst.reg = (unsigned long *)&_regs.eax;
1261 dst.val = _regs.eax;
1262 case 0x18 ... 0x1b: sbb: /* sbb */
1263 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1264 break;
1266 case 0x24 ... 0x25: /* and imm,%%eax */
1267 dst.reg = (unsigned long *)&_regs.eax;
1268 dst.val = _regs.eax;
1269 case 0x20 ... 0x23: and: /* and */
1270 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1271 break;
1273 case 0x2c ... 0x2d: /* sub imm,%%eax */
1274 dst.reg = (unsigned long *)&_regs.eax;
1275 dst.val = _regs.eax;
1276 case 0x28 ... 0x2b: sub: /* sub */
1277 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1278 break;
1280 case 0x34 ... 0x35: /* xor imm,%%eax */
1281 dst.reg = (unsigned long *)&_regs.eax;
1282 dst.val = _regs.eax;
1283 case 0x30 ... 0x33: xor: /* xor */
1284 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1285 break;
1287 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1288 dst.reg = (unsigned long *)&_regs.eax;
1289 dst.val = _regs.eax;
1290 case 0x38 ... 0x3b: cmp: /* cmp */
1291 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1292 break;
1294 case 0x62: /* bound */ {
1295 unsigned long src_val2;
1296 int lb, ub, idx;
1297 generate_exception_if(mode_64bit() || (src.type != OP_MEM), EXC_UD);
1298 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1299 &src_val2, op_bytes, ctxt)) )
1300 goto done;
1301 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1302 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1303 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1304 generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
1305 dst.type = OP_NONE;
1306 break;
1309 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1310 if ( mode_64bit() )
1312 /* movsxd */
1313 if ( src.type == OP_REG )
1314 src.val = *(int32_t *)src.reg;
1315 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1316 &src.val, 4, ctxt)) )
1317 goto done;
1318 dst.val = (int32_t)src.val;
1320 else
1322 /* arpl */
1323 uint16_t src_val = dst.val;
1324 dst = src;
1325 _regs.eflags &= ~EFLG_ZF;
1326 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1327 if ( _regs.eflags & EFLG_ZF )
1328 dst.val = (dst.val & ~3) | (src_val & 3);
1329 else
1330 dst.type = OP_NONE;
1331 generate_exception_if(in_realmode(ctxt, ops), EXC_UD);
1333 break;
1335 case 0x69: /* imul imm16/32 */
1336 case 0x6b: /* imul imm8 */ {
1337 unsigned long src1; /* ModR/M source operand */
1338 if ( ea.type == OP_REG )
1339 src1 = *ea.reg;
1340 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
1341 &src1, op_bytes, ctxt)) )
1342 goto done;
1343 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1344 switch ( dst.bytes )
1346 case 2:
1347 dst.val = ((uint32_t)(int16_t)src.val *
1348 (uint32_t)(int16_t)src1);
1349 if ( (int16_t)dst.val != (uint32_t)dst.val )
1350 _regs.eflags |= EFLG_OF|EFLG_CF;
1351 break;
1352 #ifdef __x86_64__
1353 case 4:
1354 dst.val = ((uint64_t)(int32_t)src.val *
1355 (uint64_t)(int32_t)src1);
1356 if ( (int32_t)dst.val != dst.val )
1357 _regs.eflags |= EFLG_OF|EFLG_CF;
1358 break;
1359 #endif
1360 default: {
1361 unsigned long m[2] = { src.val, src1 };
1362 if ( imul_dbl(m) )
1363 _regs.eflags |= EFLG_OF|EFLG_CF;
1364 dst.val = m[0];
1365 break;
1368 break;
1371 case 0x82: /* Grp1 (x86/32 only) */
1372 generate_exception_if(mode_64bit(), EXC_UD);
1373 case 0x80: case 0x81: case 0x83: /* Grp1 */
1374 switch ( modrm_reg & 7 )
1376 case 0: goto add;
1377 case 1: goto or;
1378 case 2: goto adc;
1379 case 3: goto sbb;
1380 case 4: goto and;
1381 case 5: goto sub;
1382 case 6: goto xor;
1383 case 7: goto cmp;
1385 break;
1387 case 0xa8 ... 0xa9: /* test imm,%%eax */
1388 dst.reg = (unsigned long *)&_regs.eax;
1389 dst.val = _regs.eax;
1390 case 0x84 ... 0x85: test: /* test */
1391 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1392 break;
1394 case 0x86 ... 0x87: xchg: /* xchg */
1395 /* Write back the register source. */
1396 switch ( dst.bytes )
1398 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1399 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1400 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1401 case 8: *src.reg = dst.val; break;
1403 /* Write back the memory destination with implicit LOCK prefix. */
1404 dst.val = src.val;
1405 lock_prefix = 1;
1406 break;
1408 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1409 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1410 case 0x88 ... 0x8b: /* mov */
1411 dst.val = src.val;
1412 break;
1414 case 0x8c: /* mov Sreg,r/m */ {
1415 struct segment_register reg;
1416 enum x86_segment seg = decode_segment(modrm_reg);
1417 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1418 fail_if(ops->read_segment == NULL);
1419 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
1420 goto done;
1421 dst.val = reg.sel;
1422 if ( dst.type == OP_MEM )
1423 dst.bytes = 2;
1424 break;
1427 case 0x8e: /* mov r/m,Sreg */ {
1428 enum x86_segment seg = decode_segment(modrm_reg);
1429 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1430 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
1431 goto done;
1432 dst.type = OP_NONE;
1433 break;
1436 case 0x8d: /* lea */
1437 dst.val = ea.mem.off;
1438 break;
1440 case 0x8f: /* pop (sole member of Grp1a) */
1441 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1442 /* 64-bit mode: POP defaults to a 64-bit operand. */
1443 if ( mode_64bit() && (dst.bytes == 4) )
1444 dst.bytes = 8;
1445 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1446 &dst.val, dst.bytes, ctxt)) != 0 )
1447 goto done;
1448 break;
1450 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1451 dst.reg = decode_register(
1452 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1453 dst.val = src.val;
1454 break;
1456 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1457 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1458 src.val = ((uint32_t)src.val |
1459 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1460 dst.reg = decode_register(
1461 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1462 dst.val = src.val;
1463 break;
1465 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1466 switch ( modrm_reg & 7 )
1468 case 0: /* rol */
1469 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1470 break;
1471 case 1: /* ror */
1472 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1473 break;
1474 case 2: /* rcl */
1475 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1476 break;
1477 case 3: /* rcr */
1478 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1479 break;
1480 case 4: /* sal/shl */
1481 case 6: /* sal/shl */
1482 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1483 break;
1484 case 5: /* shr */
1485 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1486 break;
1487 case 7: /* sar */
1488 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1489 break;
1491 break;
1493 case 0xc4: /* les */ {
1494 unsigned long sel;
1495 dst.val = x86_seg_es;
1496 les: /* dst.val identifies the segment */
1497 generate_exception_if(src.type != OP_MEM, EXC_UD);
1498 if ( (rc = ops->read(src.mem.seg, src.mem.off + src.bytes,
1499 &sel, 2, ctxt)) != 0 )
1500 goto done;
1501 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
1502 goto done;
1503 dst.val = src.val;
1504 break;
1507 case 0xc5: /* lds */
1508 dst.val = x86_seg_ds;
1509 goto les;
1511 case 0xd0 ... 0xd1: /* Grp2 */
1512 src.val = 1;
1513 goto grp2;
1515 case 0xd2 ... 0xd3: /* Grp2 */
1516 src.val = _regs.ecx;
1517 goto grp2;
1519 case 0xf6 ... 0xf7: /* Grp3 */
1520 switch ( modrm_reg & 7 )
1522 case 0 ... 1: /* test */
1523 /* Special case in Grp3: test has an immediate source operand. */
1524 src.type = OP_IMM;
1525 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1526 if ( src.bytes == 8 ) src.bytes = 4;
1527 switch ( src.bytes )
1529 case 1: src.val = insn_fetch_type(int8_t); break;
1530 case 2: src.val = insn_fetch_type(int16_t); break;
1531 case 4: src.val = insn_fetch_type(int32_t); break;
1533 goto test;
1534 case 2: /* not */
1535 dst.val = ~dst.val;
1536 break;
1537 case 3: /* neg */
1538 emulate_1op("neg", dst, _regs.eflags);
1539 break;
1540 case 4: /* mul */
1541 src = dst;
1542 dst.type = OP_REG;
1543 dst.reg = (unsigned long *)&_regs.eax;
1544 dst.val = *dst.reg;
1545 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1546 switch ( src.bytes )
1548 case 1:
1549 dst.val *= src.val;
1550 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1551 _regs.eflags |= EFLG_OF|EFLG_CF;
1552 break;
1553 case 2:
1554 dst.val *= src.val;
1555 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1556 _regs.eflags |= EFLG_OF|EFLG_CF;
1557 *(uint16_t *)&_regs.edx = dst.val >> 16;
1558 break;
1559 #ifdef __x86_64__
1560 case 4:
1561 dst.val *= src.val;
1562 if ( (uint32_t)dst.val != dst.val )
1563 _regs.eflags |= EFLG_OF|EFLG_CF;
1564 _regs.edx = (uint32_t)(dst.val >> 32);
1565 break;
1566 #endif
1567 default: {
1568 unsigned long m[2] = { src.val, dst.val };
1569 if ( mul_dbl(m) )
1570 _regs.eflags |= EFLG_OF|EFLG_CF;
1571 _regs.edx = m[1];
1572 dst.val = m[0];
1573 break;
1576 break;
1577 case 5: /* imul */
1578 src = dst;
1579 dst.type = OP_REG;
1580 dst.reg = (unsigned long *)&_regs.eax;
1581 dst.val = *dst.reg;
1582 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1583 switch ( src.bytes )
1585 case 1:
1586 dst.val = ((uint16_t)(int8_t)src.val *
1587 (uint16_t)(int8_t)dst.val);
1588 if ( (int8_t)dst.val != (uint16_t)dst.val )
1589 _regs.eflags |= EFLG_OF|EFLG_CF;
1590 break;
1591 case 2:
1592 dst.val = ((uint32_t)(int16_t)src.val *
1593 (uint32_t)(int16_t)dst.val);
1594 if ( (int16_t)dst.val != (uint32_t)dst.val )
1595 _regs.eflags |= EFLG_OF|EFLG_CF;
1596 *(uint16_t *)&_regs.edx = dst.val >> 16;
1597 break;
1598 #ifdef __x86_64__
1599 case 4:
1600 dst.val = ((uint64_t)(int32_t)src.val *
1601 (uint64_t)(int32_t)dst.val);
1602 if ( (int32_t)dst.val != dst.val )
1603 _regs.eflags |= EFLG_OF|EFLG_CF;
1604 _regs.edx = (uint32_t)(dst.val >> 32);
1605 break;
1606 #endif
1607 default: {
1608 unsigned long m[2] = { src.val, dst.val };
1609 if ( imul_dbl(m) )
1610 _regs.eflags |= EFLG_OF|EFLG_CF;
1611 _regs.edx = m[1];
1612 dst.val = m[0];
1613 break;
1616 break;
1617 case 6: /* div */ {
1618 unsigned long u[2], v;
1619 src = dst;
1620 dst.type = OP_REG;
1621 dst.reg = (unsigned long *)&_regs.eax;
1622 switch ( src.bytes )
1624 case 1:
1625 u[0] = (uint16_t)_regs.eax;
1626 u[1] = 0;
1627 v = (uint8_t)src.val;
1628 generate_exception_if(
1629 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1630 EXC_DE);
1631 dst.val = (uint8_t)u[0];
1632 ((uint8_t *)&_regs.eax)[1] = u[1];
1633 break;
1634 case 2:
1635 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1636 u[1] = 0;
1637 v = (uint16_t)src.val;
1638 generate_exception_if(
1639 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1640 EXC_DE);
1641 dst.val = (uint16_t)u[0];
1642 *(uint16_t *)&_regs.edx = u[1];
1643 break;
1644 #ifdef __x86_64__
1645 case 4:
1646 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1647 u[1] = 0;
1648 v = (uint32_t)src.val;
1649 generate_exception_if(
1650 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1651 EXC_DE);
1652 dst.val = (uint32_t)u[0];
1653 _regs.edx = (uint32_t)u[1];
1654 break;
1655 #endif
1656 default:
1657 u[0] = _regs.eax;
1658 u[1] = _regs.edx;
1659 v = src.val;
1660 generate_exception_if(div_dbl(u, v), EXC_DE);
1661 dst.val = u[0];
1662 _regs.edx = u[1];
1663 break;
1665 break;
1667 case 7: /* idiv */ {
1668 unsigned long u[2], v;
1669 src = dst;
1670 dst.type = OP_REG;
1671 dst.reg = (unsigned long *)&_regs.eax;
1672 switch ( src.bytes )
1674 case 1:
1675 u[0] = (int16_t)_regs.eax;
1676 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1677 v = (int8_t)src.val;
1678 generate_exception_if(
1679 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1680 EXC_DE);
1681 dst.val = (int8_t)u[0];
1682 ((int8_t *)&_regs.eax)[1] = u[1];
1683 break;
1684 case 2:
1685 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1686 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1687 v = (int16_t)src.val;
1688 generate_exception_if(
1689 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1690 EXC_DE);
1691 dst.val = (int16_t)u[0];
1692 *(int16_t *)&_regs.edx = u[1];
1693 break;
1694 #ifdef __x86_64__
1695 case 4:
1696 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1697 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1698 v = (int32_t)src.val;
1699 generate_exception_if(
1700 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1701 EXC_DE);
1702 dst.val = (int32_t)u[0];
1703 _regs.edx = (uint32_t)u[1];
1704 break;
1705 #endif
1706 default:
1707 u[0] = _regs.eax;
1708 u[1] = _regs.edx;
1709 v = src.val;
1710 generate_exception_if(idiv_dbl(u, v), EXC_DE);
1711 dst.val = u[0];
1712 _regs.edx = u[1];
1713 break;
1715 break;
1717 default:
1718 goto cannot_emulate;
1720 break;
1722 case 0xfe: /* Grp4 */
1723 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
1724 case 0xff: /* Grp5 */
1725 switch ( modrm_reg & 7 )
1727 case 0: /* inc */
1728 emulate_1op("inc", dst, _regs.eflags);
1729 break;
1730 case 1: /* dec */
1731 emulate_1op("dec", dst, _regs.eflags);
1732 break;
1733 case 2: /* call (near) */
1734 case 4: /* jmp (near) */
1735 dst.type = OP_NONE;
1736 if ( (dst.bytes != 8) && mode_64bit() )
1738 dst.bytes = op_bytes = 8;
1739 if ( dst.type == OP_REG )
1740 dst.val = *dst.reg;
1741 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1742 &dst.val, 8, ctxt)) != 0 )
1743 goto done;
1745 src.val = _regs.eip;
1746 _regs.eip = dst.val;
1747 if ( (modrm_reg & 7) == 2 )
1748 goto push; /* call */
1749 break;
1750 case 3: /* call (far, absolute indirect) */
1751 case 5: /* jmp (far, absolute indirect) */ {
1752 unsigned long sel;
1754 if ( (rc = ops->read(dst.mem.seg, dst.mem.off+dst.bytes,
1755 &sel, 2, ctxt)) )
1756 goto done;
1758 if ( (modrm_reg & 7) == 3 ) /* call */
1760 struct segment_register reg;
1761 fail_if(ops->read_segment == NULL);
1762 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
1763 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1764 reg.sel, op_bytes, ctxt)) ||
1765 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1766 _regs.eip, op_bytes, ctxt)) )
1767 goto done;
1770 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
1771 goto done;
1772 _regs.eip = dst.val;
1774 dst.type = OP_NONE;
1775 break;
1777 case 6: /* push */
1778 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1779 if ( mode_64bit() && (dst.bytes == 4) )
1781 dst.bytes = 8;
1782 if ( dst.type == OP_REG )
1783 dst.val = *dst.reg;
1784 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1785 &dst.val, 8, ctxt)) != 0 )
1786 goto done;
1788 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
1789 dst.val, dst.bytes, ctxt)) != 0 )
1790 goto done;
1791 dst.type = OP_NONE;
1792 break;
1793 case 7:
1794 generate_exception_if(1, EXC_UD);
1795 default:
1796 goto cannot_emulate;
1798 break;
1801 writeback:
1802 switch ( dst.type )
1804 case OP_REG:
1805 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1806 switch ( dst.bytes )
1808 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1809 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1810 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1811 case 8: *dst.reg = dst.val; break;
1813 break;
1814 case OP_MEM:
1815 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1816 /* nothing to do */;
1817 else if ( lock_prefix )
1818 rc = ops->cmpxchg(
1819 dst.mem.seg, dst.mem.off, dst.orig_val,
1820 dst.val, dst.bytes, ctxt);
1821 else
1822 rc = ops->write(
1823 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1824 if ( rc != 0 )
1825 goto done;
1826 default:
1827 break;
1830 /* Commit shadow register state. */
1831 _regs.eflags &= ~EFLG_RF;
1832 *ctxt->regs = _regs;
1834 if ( (_regs.eflags & EFLG_TF) &&
1835 (rc == X86EMUL_OKAY) &&
1836 (ops->inject_hw_exception != NULL) )
1837 rc = ops->inject_hw_exception(EXC_DB, ctxt) ? : X86EMUL_EXCEPTION;
1839 done:
1840 return rc;
1842 special_insn:
1843 dst.type = OP_NONE;
1845 /*
1846 * The only implicit-operands instructions allowed a LOCK prefix are
1847 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1848 */
1849 generate_exception_if(lock_prefix &&
1850 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1851 (b != 0xc7), /* CMPXCHG{8,16}B */
1852 EXC_GP);
1854 if ( twobyte )
1855 goto twobyte_special_insn;
1857 switch ( b )
1859 case 0x06: /* push %%es */ {
1860 struct segment_register reg;
1861 src.val = x86_seg_es;
1862 push_seg:
1863 fail_if(ops->read_segment == NULL);
1864 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
1865 return rc;
1866 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1867 if ( mode_64bit() && (op_bytes == 4) )
1868 op_bytes = 8;
1869 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1870 reg.sel, op_bytes, ctxt)) != 0 )
1871 goto done;
1872 break;
1875 case 0x07: /* pop %%es */
1876 src.val = x86_seg_es;
1877 pop_seg:
1878 fail_if(ops->write_segment == NULL);
1879 /* 64-bit mode: POP defaults to a 64-bit operand. */
1880 if ( mode_64bit() && (op_bytes == 4) )
1881 op_bytes = 8;
1882 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
1883 &dst.val, op_bytes, ctxt)) != 0 )
1884 goto done;
1885 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
1886 return rc;
1887 break;
1889 case 0x0e: /* push %%cs */
1890 src.val = x86_seg_cs;
1891 goto push_seg;
1893 case 0x16: /* push %%ss */
1894 src.val = x86_seg_ss;
1895 goto push_seg;
1897 case 0x17: /* pop %%ss */
1898 src.val = x86_seg_ss;
1899 goto pop_seg;
1901 case 0x1e: /* push %%ds */
1902 src.val = x86_seg_ds;
1903 goto push_seg;
1905 case 0x1f: /* pop %%ds */
1906 src.val = x86_seg_ds;
1907 goto pop_seg;
1909 case 0x27: /* daa */ {
1910 uint8_t al = _regs.eax;
1911 unsigned long eflags = _regs.eflags;
1912 generate_exception_if(mode_64bit(), EXC_UD);
1913 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1914 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1916 *(uint8_t *)&_regs.eax += 6;
1917 _regs.eflags |= EFLG_AF;
1919 if ( (al > 0x99) || (eflags & EFLG_CF) )
1921 *(uint8_t *)&_regs.eax += 0x60;
1922 _regs.eflags |= EFLG_CF;
1924 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1925 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1926 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1927 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1928 break;
1931 case 0x2f: /* das */ {
1932 uint8_t al = _regs.eax;
1933 unsigned long eflags = _regs.eflags;
1934 generate_exception_if(mode_64bit(), EXC_UD);
1935 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1936 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1938 _regs.eflags |= EFLG_AF;
1939 if ( (al < 6) || (eflags & EFLG_CF) )
1940 _regs.eflags |= EFLG_CF;
1941 *(uint8_t *)&_regs.eax -= 6;
1943 if ( (al > 0x99) || (eflags & EFLG_CF) )
1945 *(uint8_t *)&_regs.eax -= 0x60;
1946 _regs.eflags |= EFLG_CF;
1948 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1949 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1950 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1951 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1952 break;
1955 case 0x37: /* aaa */
1956 case 0x3f: /* aas */
1957 generate_exception_if(mode_64bit(), EXC_UD);
1958 _regs.eflags &= ~EFLG_CF;
1959 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1961 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1962 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1963 _regs.eflags |= EFLG_CF | EFLG_AF;
1965 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1966 break;
1968 case 0x40 ... 0x4f: /* inc/dec reg */
1969 dst.type = OP_REG;
1970 dst.reg = decode_register(b & 7, &_regs, 0);
1971 dst.bytes = op_bytes;
1972 dst.val = *dst.reg;
1973 if ( b & 8 )
1974 emulate_1op("dec", dst, _regs.eflags);
1975 else
1976 emulate_1op("inc", dst, _regs.eflags);
1977 break;
1979 case 0x50 ... 0x57: /* push reg */
1980 src.val = *(unsigned long *)decode_register(
1981 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1982 goto push;
1984 case 0x58 ... 0x5f: /* pop reg */
1985 dst.type = OP_REG;
1986 dst.reg = decode_register(
1987 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1988 dst.bytes = op_bytes;
1989 if ( mode_64bit() && (dst.bytes == 4) )
1990 dst.bytes = 8;
1991 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1992 &dst.val, dst.bytes, ctxt)) != 0 )
1993 goto done;
1994 break;
1996 case 0x60: /* pusha */ {
1997 int i;
1998 unsigned long regs[] = {
1999 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
2000 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
2001 generate_exception_if(mode_64bit(), EXC_UD);
2002 for ( i = 0; i < 8; i++ )
2003 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2004 regs[i], op_bytes, ctxt)) != 0 )
2005 goto done;
2006 break;
2009 case 0x61: /* popa */ {
2010 int i;
2011 unsigned long dummy_esp, *regs[] = {
2012 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
2013 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
2014 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
2015 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
2016 generate_exception_if(mode_64bit(), EXC_UD);
2017 for ( i = 0; i < 8; i++ )
2019 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2020 &dst.val, op_bytes, ctxt)) != 0 )
2021 goto done;
2022 switch ( op_bytes )
2024 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
2025 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
2026 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
2027 case 8: *regs[i] = dst.val; break;
2030 break;
2033 case 0x68: /* push imm{16,32,64} */
2034 src.val = ((op_bytes == 2)
2035 ? (int32_t)insn_fetch_type(int16_t)
2036 : insn_fetch_type(int32_t));
2037 goto push;
2039 case 0x6a: /* push imm8 */
2040 src.val = insn_fetch_type(int8_t);
2041 push:
2042 d |= Mov; /* force writeback */
2043 dst.type = OP_MEM;
2044 dst.bytes = op_bytes;
2045 if ( mode_64bit() && (dst.bytes == 4) )
2046 dst.bytes = 8;
2047 dst.val = src.val;
2048 dst.mem.seg = x86_seg_ss;
2049 dst.mem.off = sp_pre_dec(dst.bytes);
2050 break;
2052 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */
2053 handle_rep_prefix();
2054 generate_exception_if(!mode_iopl(), EXC_GP);
2055 dst.type = OP_MEM;
2056 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2057 dst.mem.seg = x86_seg_es;
2058 dst.mem.off = truncate_ea(_regs.edi);
2059 fail_if(ops->read_io == NULL);
2060 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
2061 &dst.val, ctxt)) != 0 )
2062 goto done;
2063 register_address_increment(
2064 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2065 break;
2067 case 0x6e ... 0x6f: /* outs %esi,%dx */
2068 handle_rep_prefix();
2069 generate_exception_if(!mode_iopl(), EXC_GP);
2070 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2071 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2072 &dst.val, dst.bytes, ctxt)) != 0 )
2073 goto done;
2074 fail_if(ops->write_io == NULL);
2075 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
2076 dst.val, ctxt)) != 0 )
2077 goto done;
2078 register_address_increment(
2079 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2080 break;
2082 case 0x70 ... 0x7f: /* jcc (short) */ {
2083 int rel = insn_fetch_type(int8_t);
2084 if ( test_cc(b, _regs.eflags) )
2085 jmp_rel(rel);
2086 break;
2089 case 0x90: /* nop / xchg %%r8,%%rax */
2090 if ( !(rex_prefix & 1) )
2091 break; /* nop */
2093 case 0x91 ... 0x97: /* xchg reg,%%rax */
2094 src.type = dst.type = OP_REG;
2095 src.bytes = dst.bytes = op_bytes;
2096 src.reg = (unsigned long *)&_regs.eax;
2097 src.val = *src.reg;
2098 dst.reg = decode_register(
2099 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2100 dst.val = *dst.reg;
2101 goto xchg;
2103 case 0x98: /* cbw/cwde/cdqe */
2104 switch ( op_bytes )
2106 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2107 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2108 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2110 break;
2112 case 0x99: /* cwd/cdq/cqo */
2113 switch ( op_bytes )
2115 case 2:
2116 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2117 break;
2118 case 4:
2119 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2120 break;
2121 case 8:
2122 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2123 break;
2125 break;
2127 case 0x9a: /* call (far, absolute) */ {
2128 struct segment_register reg;
2129 uint16_t sel;
2130 uint32_t eip;
2132 fail_if(ops->read_segment == NULL);
2133 generate_exception_if(mode_64bit(), EXC_UD);
2135 eip = insn_fetch_bytes(op_bytes);
2136 sel = insn_fetch_type(uint16_t);
2138 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2139 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2140 reg.sel, op_bytes, ctxt)) ||
2141 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2142 _regs.eip, op_bytes, ctxt)) )
2143 goto done;
2145 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2146 goto done;
2147 _regs.eip = eip;
2148 break;
2151 case 0x9c: /* pushf */
2152 src.val = _regs.eflags;
2153 goto push;
2155 case 0x9d: /* popf */ {
2156 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2157 if ( !mode_iopl() )
2158 mask |= EFLG_IOPL;
2159 fail_if(ops->write_rflags == NULL);
2160 /* 64-bit mode: POP defaults to a 64-bit operand. */
2161 if ( mode_64bit() && (op_bytes == 4) )
2162 op_bytes = 8;
2163 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2164 &dst.val, op_bytes, ctxt)) != 0 )
2165 goto done;
2166 if ( op_bytes == 2 )
2167 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2168 dst.val &= 0x257fd5;
2169 _regs.eflags &= mask;
2170 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2171 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2172 goto done;
2173 break;
2176 case 0x9e: /* sahf */
2177 *(uint8_t *)_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2178 break;
2180 case 0x9f: /* lahf */
2181 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2182 break;
2184 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2185 /* Source EA is not encoded via ModRM. */
2186 dst.type = OP_REG;
2187 dst.reg = (unsigned long *)&_regs.eax;
2188 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2189 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2190 &dst.val, dst.bytes, ctxt)) != 0 )
2191 goto done;
2192 break;
2194 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2195 /* Destination EA is not encoded via ModRM. */
2196 dst.type = OP_MEM;
2197 dst.mem.seg = ea.mem.seg;
2198 dst.mem.off = insn_fetch_bytes(ad_bytes);
2199 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2200 dst.val = (unsigned long)_regs.eax;
2201 break;
2203 case 0xa4 ... 0xa5: /* movs */
2204 handle_rep_prefix();
2205 dst.type = OP_MEM;
2206 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2207 dst.mem.seg = x86_seg_es;
2208 dst.mem.off = truncate_ea(_regs.edi);
2209 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2210 &dst.val, dst.bytes, ctxt)) != 0 )
2211 goto done;
2212 register_address_increment(
2213 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2214 register_address_increment(
2215 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2216 break;
2218 case 0xa6 ... 0xa7: /* cmps */ {
2219 unsigned long next_eip = _regs.eip;
2220 handle_rep_prefix();
2221 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2222 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2223 &dst.val, dst.bytes, ctxt)) ||
2224 (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2225 &src.val, src.bytes, ctxt)) )
2226 goto done;
2227 register_address_increment(
2228 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2229 register_address_increment(
2230 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2231 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2232 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2233 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2234 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2235 _regs.eip = next_eip;
2236 break;
2239 case 0xaa ... 0xab: /* stos */
2240 handle_rep_prefix();
2241 dst.type = OP_MEM;
2242 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2243 dst.mem.seg = x86_seg_es;
2244 dst.mem.off = truncate_ea(_regs.edi);
2245 dst.val = _regs.eax;
2246 register_address_increment(
2247 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2248 break;
2250 case 0xac ... 0xad: /* lods */
2251 handle_rep_prefix();
2252 dst.type = OP_REG;
2253 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2254 dst.reg = (unsigned long *)&_regs.eax;
2255 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2256 &dst.val, dst.bytes, ctxt)) != 0 )
2257 goto done;
2258 register_address_increment(
2259 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2260 break;
2262 case 0xae ... 0xaf: /* scas */ {
2263 unsigned long next_eip = _regs.eip;
2264 handle_rep_prefix();
2265 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2266 dst.val = _regs.eax;
2267 if ( (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2268 &src.val, src.bytes, ctxt)) != 0 )
2269 goto done;
2270 register_address_increment(
2271 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2272 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2273 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2274 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2275 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2276 _regs.eip = next_eip;
2277 break;
2280 case 0xc2: /* ret imm16 (near) */
2281 case 0xc3: /* ret (near) */ {
2282 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2283 op_bytes = mode_64bit() ? 8 : op_bytes;
2284 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2285 &dst.val, op_bytes, ctxt)) != 0 )
2286 goto done;
2287 _regs.eip = dst.val;
2288 break;
2291 case 0xc8: /* enter imm16,imm8 */ {
2292 uint16_t size = insn_fetch_type(uint16_t);
2293 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2294 int i;
2296 dst.type = OP_REG;
2297 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2298 dst.reg = (unsigned long *)&_regs.ebp;
2299 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2300 _regs.ebp, dst.bytes, ctxt)) )
2301 goto done;
2302 dst.val = _regs.esp;
2304 if ( depth > 0 )
2306 for ( i = 1; i < depth; i++ )
2308 unsigned long ebp, temp_data;
2309 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2310 if ( (rc = ops->read(x86_seg_ss, ebp,
2311 &temp_data, dst.bytes, ctxt)) ||
2312 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2313 temp_data, dst.bytes, ctxt)) )
2314 goto done;
2316 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2317 dst.val, dst.bytes, ctxt)) )
2318 goto done;
2321 sp_pre_dec(size);
2322 break;
2325 case 0xc9: /* leave */
2326 /* First writeback, to %%esp. */
2327 dst.type = OP_REG;
2328 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2329 dst.reg = (unsigned long *)&_regs.esp;
2330 dst.val = _regs.ebp;
2332 /* Flush first writeback, since there is a second. */
2333 switch ( dst.bytes )
2335 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2336 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2337 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2338 case 8: *dst.reg = dst.val; break;
2341 /* Second writeback, to %%ebp. */
2342 dst.reg = (unsigned long *)&_regs.ebp;
2343 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2344 &dst.val, dst.bytes, ctxt)) )
2345 goto done;
2346 break;
2348 case 0xca: /* ret imm16 (far) */
2349 case 0xcb: /* ret (far) */ {
2350 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2351 op_bytes = mode_64bit() ? 8 : op_bytes;
2352 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2353 &dst.val, op_bytes, ctxt)) ||
2354 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2355 &src.val, op_bytes, ctxt)) ||
2356 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2357 goto done;
2358 _regs.eip = dst.val;
2359 break;
2362 case 0xcc: /* int3 */
2363 src.val = EXC_BP;
2364 goto swint;
2366 case 0xcd: /* int imm8 */
2367 src.val = insn_fetch_type(uint8_t);
2368 swint:
2369 fail_if(ops->inject_sw_interrupt == NULL);
2370 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2371 ctxt) ? : X86EMUL_EXCEPTION;
2372 goto done;
2374 case 0xce: /* into */
2375 generate_exception_if(mode_64bit(), EXC_UD);
2376 if ( !(_regs.eflags & EFLG_OF) )
2377 break;
2378 src.val = EXC_OF;
2379 goto swint;
2381 case 0xcf: /* iret */ {
2382 unsigned long cs, eip, eflags;
2383 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2384 if ( !mode_iopl() )
2385 mask |= EFLG_IOPL;
2386 fail_if(!in_realmode(ctxt, ops));
2387 fail_if(ops->write_rflags == NULL);
2388 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2389 &eip, op_bytes, ctxt)) ||
2390 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2391 &cs, op_bytes, ctxt)) ||
2392 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2393 &eflags, op_bytes, ctxt)) )
2394 goto done;
2395 if ( op_bytes == 2 )
2396 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2397 eflags &= 0x257fd5;
2398 _regs.eflags &= mask;
2399 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2400 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2401 goto done;
2402 _regs.eip = eip;
2403 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2404 goto done;
2405 break;
2408 case 0xd4: /* aam */ {
2409 unsigned int base = insn_fetch_type(uint8_t);
2410 uint8_t al = _regs.eax;
2411 generate_exception_if(mode_64bit(), EXC_UD);
2412 generate_exception_if(base == 0, EXC_DE);
2413 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2414 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2415 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2416 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2417 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2418 break;
2421 case 0xd5: /* aad */ {
2422 unsigned int base = insn_fetch_type(uint8_t);
2423 uint16_t ax = _regs.eax;
2424 generate_exception_if(mode_64bit(), EXC_UD);
2425 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2426 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2427 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2428 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2429 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2430 break;
2433 case 0xd6: /* salc */
2434 generate_exception_if(mode_64bit(), EXC_UD);
2435 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2436 break;
2438 case 0xd7: /* xlat */ {
2439 unsigned long al = (uint8_t)_regs.eax;
2440 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
2441 &al, 1, ctxt)) != 0 )
2442 goto done;
2443 *(uint8_t *)&_regs.eax = al;
2444 break;
2447 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2448 int rel = insn_fetch_type(int8_t);
2449 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2450 if ( b == 0xe1 )
2451 do_jmp = !do_jmp; /* loopz */
2452 else if ( b == 0xe2 )
2453 do_jmp = 1; /* loop */
2454 switch ( ad_bytes )
2456 case 2:
2457 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2458 break;
2459 case 4:
2460 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
2461 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
2462 break;
2463 default: /* case 8: */
2464 do_jmp &= --_regs.ecx != 0;
2465 break;
2467 if ( do_jmp )
2468 jmp_rel(rel);
2469 break;
2472 case 0xe3: /* jcxz/jecxz (short) */ {
2473 int rel = insn_fetch_type(int8_t);
2474 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2475 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2476 jmp_rel(rel);
2477 break;
2480 case 0xe4: /* in imm8,%al */
2481 case 0xe5: /* in imm8,%eax */
2482 case 0xe6: /* out %al,imm8 */
2483 case 0xe7: /* out %eax,imm8 */
2484 case 0xec: /* in %dx,%al */
2485 case 0xed: /* in %dx,%eax */
2486 case 0xee: /* out %al,%dx */
2487 case 0xef: /* out %eax,%dx */ {
2488 unsigned int port = ((b < 0xe8)
2489 ? insn_fetch_type(uint8_t)
2490 : (uint16_t)_regs.edx);
2491 generate_exception_if(!mode_iopl(), EXC_GP);
2492 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2493 if ( b & 2 )
2495 /* out */
2496 fail_if(ops->write_io == NULL);
2497 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2500 else
2502 /* in */
2503 dst.type = OP_REG;
2504 dst.bytes = op_bytes;
2505 dst.reg = (unsigned long *)&_regs.eax;
2506 fail_if(ops->read_io == NULL);
2507 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2509 if ( rc != 0 )
2510 goto done;
2511 break;
2514 case 0xe8: /* call (near) */ {
2515 int rel = (((op_bytes == 2) && !mode_64bit())
2516 ? (int32_t)insn_fetch_type(int16_t)
2517 : insn_fetch_type(int32_t));
2518 op_bytes = mode_64bit() ? 8 : op_bytes;
2519 src.val = _regs.eip;
2520 jmp_rel(rel);
2521 goto push;
2524 case 0xe9: /* jmp (near) */ {
2525 int rel = (((op_bytes == 2) && !mode_64bit())
2526 ? (int32_t)insn_fetch_type(int16_t)
2527 : insn_fetch_type(int32_t));
2528 jmp_rel(rel);
2529 break;
2532 case 0xea: /* jmp (far, absolute) */ {
2533 uint16_t sel;
2534 uint32_t eip;
2535 generate_exception_if(mode_64bit(), EXC_UD);
2536 eip = insn_fetch_bytes(op_bytes);
2537 sel = insn_fetch_type(uint16_t);
2538 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2539 goto done;
2540 _regs.eip = eip;
2541 break;
2544 case 0xeb: /* jmp (short) */
2545 jmp_rel(insn_fetch_type(int8_t));
2546 break;
2548 case 0xf1: /* int1 (icebp) */
2549 src.val = EXC_DB;
2550 goto swint;
2552 case 0xf4: /* hlt */
2553 fail_if(ops->hlt == NULL);
2554 if ( (rc = ops->hlt(ctxt)) != 0 )
2555 goto done;
2556 break;
2558 case 0xf5: /* cmc */
2559 _regs.eflags ^= EFLG_CF;
2560 break;
2562 case 0xf8: /* clc */
2563 _regs.eflags &= ~EFLG_CF;
2564 break;
2566 case 0xf9: /* stc */
2567 _regs.eflags |= EFLG_CF;
2568 break;
2570 case 0xfa: /* cli */
2571 case 0xfb: /* sti */
2572 generate_exception_if(!mode_iopl(), EXC_GP);
2573 fail_if(ops->write_rflags == NULL);
2574 _regs.eflags &= ~EFLG_IF;
2575 if ( b == 0xfb ) /* sti */
2576 _regs.eflags |= EFLG_IF;
2577 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2578 goto done;
2579 break;
2581 case 0xfc: /* cld */
2582 _regs.eflags &= ~EFLG_DF;
2583 break;
2585 case 0xfd: /* std */
2586 _regs.eflags |= EFLG_DF;
2587 break;
2589 goto writeback;
2591 twobyte_insn:
2592 switch ( b )
2594 case 0x40 ... 0x4f: /* cmovcc */
2595 dst.val = src.val;
2596 if ( !test_cc(b, _regs.eflags) )
2597 dst.type = OP_NONE;
2598 break;
2600 case 0x90 ... 0x9f: /* setcc */
2601 dst.val = test_cc(b, _regs.eflags);
2602 break;
2604 case 0xb0 ... 0xb1: /* cmpxchg */
2605 /* Save real source value, then compare EAX against destination. */
2606 src.orig_val = src.val;
2607 src.val = _regs.eax;
2608 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2609 /* Always write back. The question is: where to? */
2610 d |= Mov;
2611 if ( _regs.eflags & EFLG_ZF )
2613 /* Success: write back to memory. */
2614 dst.val = src.orig_val;
2616 else
2618 /* Failure: write the value we saw to EAX. */
2619 dst.type = OP_REG;
2620 dst.reg = (unsigned long *)&_regs.eax;
2622 break;
2624 case 0xa3: bt: /* bt */
2625 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2626 break;
2628 case 0xa4: /* shld imm8,r,r/m */
2629 case 0xa5: /* shld %%cl,r,r/m */
2630 case 0xac: /* shrd imm8,r,r/m */
2631 case 0xad: /* shrd %%cl,r,r/m */ {
2632 uint8_t shift, width = dst.bytes << 3;
2633 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
2634 if ( (shift &= width - 1) == 0 )
2635 break;
2636 dst.orig_val = truncate_word(dst.val, dst.bytes);
2637 dst.val = ((shift == width) ? src.val :
2638 (b & 8) ?
2639 /* shrd */
2640 ((dst.orig_val >> shift) |
2641 truncate_word(src.val << (width - shift), dst.bytes)) :
2642 /* shld */
2643 ((dst.orig_val << shift) |
2644 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
2645 dst.val = truncate_word(dst.val, dst.bytes);
2646 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
2647 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
2648 _regs.eflags |= EFLG_CF;
2649 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
2650 _regs.eflags |= EFLG_OF;
2651 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
2652 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
2653 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
2654 break;
2657 case 0xb3: btr: /* btr */
2658 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
2659 break;
2661 case 0xab: bts: /* bts */
2662 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
2663 break;
2665 case 0xaf: /* imul */
2666 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
2667 switch ( dst.bytes )
2669 case 2:
2670 dst.val = ((uint32_t)(int16_t)src.val *
2671 (uint32_t)(int16_t)dst.val);
2672 if ( (int16_t)dst.val != (uint32_t)dst.val )
2673 _regs.eflags |= EFLG_OF|EFLG_CF;
2674 break;
2675 #ifdef __x86_64__
2676 case 4:
2677 dst.val = ((uint64_t)(int32_t)src.val *
2678 (uint64_t)(int32_t)dst.val);
2679 if ( (int32_t)dst.val != dst.val )
2680 _regs.eflags |= EFLG_OF|EFLG_CF;
2681 break;
2682 #endif
2683 default: {
2684 unsigned long m[2] = { src.val, dst.val };
2685 if ( imul_dbl(m) )
2686 _regs.eflags |= EFLG_OF|EFLG_CF;
2687 dst.val = m[0];
2688 break;
2691 break;
2693 case 0xb2: /* lss */
2694 dst.val = x86_seg_ss;
2695 goto les;
2697 case 0xb4: /* lfs */
2698 dst.val = x86_seg_fs;
2699 goto les;
2701 case 0xb5: /* lgs */
2702 dst.val = x86_seg_gs;
2703 goto les;
2705 case 0xb6: /* movzx rm8,r{16,32,64} */
2706 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2707 dst.reg = decode_register(modrm_reg, &_regs, 0);
2708 dst.bytes = op_bytes;
2709 dst.val = (uint8_t)src.val;
2710 break;
2712 case 0xbc: /* bsf */ {
2713 int zf;
2714 asm ( "bsf %2,%0; setz %b1"
2715 : "=r" (dst.val), "=q" (zf)
2716 : "r" (src.val), "1" (0) );
2717 _regs.eflags &= ~EFLG_ZF;
2718 _regs.eflags |= zf ? EFLG_ZF : 0;
2719 break;
2722 case 0xbd: /* bsr */ {
2723 int zf;
2724 asm ( "bsr %2,%0; setz %b1"
2725 : "=r" (dst.val), "=q" (zf)
2726 : "r" (src.val), "1" (0) );
2727 _regs.eflags &= ~EFLG_ZF;
2728 _regs.eflags |= zf ? EFLG_ZF : 0;
2729 break;
2732 case 0xb7: /* movzx rm16,r{16,32,64} */
2733 dst.val = (uint16_t)src.val;
2734 break;
2736 case 0xbb: btc: /* btc */
2737 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
2738 break;
2740 case 0xba: /* Grp8 */
2741 switch ( modrm_reg & 7 )
2743 case 4: goto bt;
2744 case 5: goto bts;
2745 case 6: goto btr;
2746 case 7: goto btc;
2747 default: generate_exception_if(1, EXC_UD);
2749 break;
2751 case 0xbe: /* movsx rm8,r{16,32,64} */
2752 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2753 dst.reg = decode_register(modrm_reg, &_regs, 0);
2754 dst.bytes = op_bytes;
2755 dst.val = (int8_t)src.val;
2756 break;
2758 case 0xbf: /* movsx rm16,r{16,32,64} */
2759 dst.val = (int16_t)src.val;
2760 break;
2762 case 0xc0 ... 0xc1: /* xadd */
2763 /* Write back the register source. */
2764 switch ( dst.bytes )
2766 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2767 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2768 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2769 case 8: *src.reg = dst.val; break;
2771 goto add;
2773 goto writeback;
2775 twobyte_special_insn:
2776 switch ( b )
2778 case 0x01: /* Grp7 */ {
2779 struct segment_register reg;
2780 unsigned long base, limit, cr0, cr0w;
2782 switch ( modrm_reg & 7 )
2784 case 0: /* sgdt */
2785 case 1: /* sidt */
2786 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2787 fail_if(ops->read_segment == NULL);
2788 if ( (rc = ops->read_segment((modrm_reg & 1) ?
2789 x86_seg_idtr : x86_seg_gdtr,
2790 &reg, ctxt)) )
2791 goto done;
2792 if ( op_bytes == 2 )
2793 reg.base &= 0xffffff;
2794 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
2795 reg.limit, 2, ctxt)) ||
2796 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
2797 reg.base, mode_64bit() ? 8 : 4, ctxt)) )
2798 goto done;
2799 break;
2800 case 2: /* lgdt */
2801 case 3: /* lidt */
2802 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2803 fail_if(ops->write_segment == NULL);
2804 memset(&reg, 0, sizeof(reg));
2805 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0,
2806 &limit, 2, ctxt)) ||
2807 (rc = ops->read(ea.mem.seg, ea.mem.off+2,
2808 &base, mode_64bit() ? 8 : 4, ctxt)) )
2809 goto done;
2810 reg.base = base;
2811 reg.limit = limit;
2812 if ( op_bytes == 2 )
2813 reg.base &= 0xffffff;
2814 if ( (rc = ops->write_segment((modrm_reg & 1) ?
2815 x86_seg_idtr : x86_seg_gdtr,
2816 &reg, ctxt)) )
2817 goto done;
2818 break;
2819 case 4: /* smsw */
2820 ea.bytes = 2;
2821 dst = ea;
2822 fail_if(ops->read_cr == NULL);
2823 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
2824 goto done;
2825 d |= Mov; /* force writeback */
2826 break;
2827 case 6: /* lmsw */
2828 fail_if(ops->read_cr == NULL);
2829 fail_if(ops->write_cr == NULL);
2830 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
2831 goto done;
2832 if ( ea.type == OP_REG )
2833 cr0w = *ea.reg;
2834 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
2835 &cr0w, 2, ctxt)) )
2836 goto done;
2837 cr0 &= 0xffff0000;
2838 cr0 |= (uint16_t)cr0w;
2839 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
2840 goto done;
2841 break;
2842 default:
2843 goto cannot_emulate;
2845 break;
2848 case 0x06: /* clts */
2849 generate_exception_if(!mode_ring0(), EXC_GP);
2850 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
2851 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
2852 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
2853 goto done;
2854 break;
2856 case 0x08: /* invd */
2857 case 0x09: /* wbinvd */
2858 generate_exception_if(!mode_ring0(), EXC_GP);
2859 fail_if(ops->wbinvd == NULL);
2860 if ( (rc = ops->wbinvd(ctxt)) != 0 )
2861 goto done;
2862 break;
2864 case 0x0d: /* GrpP (prefetch) */
2865 case 0x18: /* Grp16 (prefetch/nop) */
2866 case 0x19 ... 0x1f: /* nop (amd-defined) */
2867 break;
2869 case 0x20: /* mov cr,reg */
2870 case 0x21: /* mov dr,reg */
2871 case 0x22: /* mov reg,cr */
2872 case 0x23: /* mov reg,dr */
2873 generate_exception_if(!mode_ring0(), EXC_GP);
2874 modrm_rm |= (rex_prefix & 1) << 3;
2875 modrm_reg |= lock_prefix << 3;
2876 if ( b & 2 )
2878 /* Write to CR/DR. */
2879 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
2880 if ( !mode_64bit() )
2881 src.val = (uint32_t)src.val;
2882 rc = ((b & 1)
2883 ? (ops->write_dr
2884 ? ops->write_dr(modrm_reg, src.val, ctxt)
2885 : X86EMUL_UNHANDLEABLE)
2886 : (ops->write_cr
2887 ? ops->write_cr(modrm_reg, src.val, ctxt)
2888 : X86EMUL_UNHANDLEABLE));
2890 else
2892 /* Read from CR/DR. */
2893 dst.type = OP_REG;
2894 dst.bytes = mode_64bit() ? 8 : 4;
2895 dst.reg = decode_register(modrm_rm, &_regs, 0);
2896 rc = ((b & 1)
2897 ? (ops->read_dr
2898 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2899 : X86EMUL_UNHANDLEABLE)
2900 : (ops->read_cr
2901 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
2902 : X86EMUL_UNHANDLEABLE));
2904 if ( rc != 0 )
2905 goto done;
2906 break;
2908 case 0x30: /* wrmsr */ {
2909 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
2910 generate_exception_if(!mode_ring0(), EXC_GP);
2911 fail_if(ops->write_msr == NULL);
2912 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
2913 goto done;
2914 break;
2917 case 0x31: /* rdtsc */ {
2918 unsigned long cr4;
2919 uint64_t val;
2920 fail_if(ops->read_cr == NULL);
2921 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
2922 goto done;
2923 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP);
2924 fail_if(ops->read_msr == NULL);
2925 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
2926 goto done;
2927 _regs.edx = (uint32_t)(val >> 32);
2928 _regs.eax = (uint32_t)(val >> 0);
2929 break;
2932 case 0x32: /* rdmsr */ {
2933 uint64_t val;
2934 generate_exception_if(!mode_ring0(), EXC_GP);
2935 fail_if(ops->read_msr == NULL);
2936 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
2937 goto done;
2938 _regs.edx = (uint32_t)(val >> 32);
2939 _regs.eax = (uint32_t)(val >> 0);
2940 break;
2943 case 0x80 ... 0x8f: /* jcc (near) */ {
2944 int rel = (((op_bytes == 2) && !mode_64bit())
2945 ? (int32_t)insn_fetch_type(int16_t)
2946 : insn_fetch_type(int32_t));
2947 if ( test_cc(b, _regs.eflags) )
2948 jmp_rel(rel);
2949 break;
2952 case 0xa0: /* push %%fs */
2953 src.val = x86_seg_fs;
2954 goto push_seg;
2956 case 0xa1: /* pop %%fs */
2957 src.val = x86_seg_fs;
2958 goto pop_seg;
2960 case 0xa2: /* cpuid */ {
2961 unsigned int eax = _regs.eax, ebx = _regs.ebx;
2962 unsigned int ecx = _regs.ecx, edx = _regs.edx;
2963 fail_if(ops->cpuid == NULL);
2964 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
2965 goto done;
2966 _regs.eax = eax; _regs.ebx = ebx;
2967 _regs.ecx = ecx; _regs.edx = edx;
2968 break;
2971 case 0xa8: /* push %%gs */
2972 src.val = x86_seg_gs;
2973 goto push_seg;
2975 case 0xa9: /* pop %%gs */
2976 src.val = x86_seg_gs;
2977 goto pop_seg;
2979 case 0xc7: /* Grp9 (cmpxchg8b) */
2980 #if defined(__i386__)
2982 unsigned long old_lo, old_hi;
2983 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2984 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
2985 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
2986 goto done;
2987 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
2989 _regs.eax = old_lo;
2990 _regs.edx = old_hi;
2991 _regs.eflags &= ~EFLG_ZF;
2993 else if ( ops->cmpxchg8b == NULL )
2995 rc = X86EMUL_UNHANDLEABLE;
2996 goto done;
2998 else
3000 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
3001 _regs.ebx, _regs.ecx, ctxt)) != 0 )
3002 goto done;
3003 _regs.eflags |= EFLG_ZF;
3005 break;
3007 #elif defined(__x86_64__)
3009 unsigned long old, new;
3010 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
3011 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
3012 goto done;
3013 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
3014 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
3016 _regs.eax = (uint32_t)(old>>0);
3017 _regs.edx = (uint32_t)(old>>32);
3018 _regs.eflags &= ~EFLG_ZF;
3020 else
3022 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
3023 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3024 new, 8, ctxt)) != 0 )
3025 goto done;
3026 _regs.eflags |= EFLG_ZF;
3028 break;
3030 #endif
3032 case 0xc8 ... 0xcf: /* bswap */
3033 dst.type = OP_REG;
3034 dst.reg = decode_register(
3035 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3036 switch ( dst.bytes = op_bytes )
3038 default: /* case 2: */
3039 /* Undefined behaviour. Writes zero on all tested CPUs. */
3040 dst.val = 0;
3041 break;
3042 case 4:
3043 #ifdef __x86_64__
3044 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3045 break;
3046 case 8:
3047 #endif
3048 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3049 break;
3051 break;
3053 goto writeback;
3055 cannot_emulate:
3056 #if 0
3057 gdprintk(XENLOG_DEBUG, "Instr:");
3058 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
3060 unsigned long x;
3061 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
3062 printk(" %02x", (uint8_t)x);
3064 printk("\n");
3065 #endif
3066 return X86EMUL_UNHANDLEABLE;