ia64/xen-unstable

view xen/arch/x86/x86_emulate/x86_emulate.c @ 18629:9b227eb09263

x86_emulate: Fix after decode changes. Valid opcode decode values must
be non-zero.

Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Tue Oct 14 19:19:48 2008 +0100 (2008-10-14)
parents 8d993552673a
children c4be040bef6f
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 /* Operand sizes: 8-bit operands or specified/overridden size. */
25 #define ByteOp (1<<0) /* 8-bit operands. */
26 /* Destination operand type. */
27 #define DstNone (0<<1) /* No destination operand. */
28 #define DstImplicit (0<<1) /* Destination operand is implicit in the opcode. */
29 #define DstBitBase (1<<1) /* Memory operand, bit string. */
30 #define DstReg (2<<1) /* Register operand. */
31 #define DstMem (3<<1) /* Memory operand. */
32 #define DstMask (3<<1)
33 /* Source operand type. */
34 #define SrcInvalid (0<<3) /* Unimplemented opcode. */
35 #define SrcNone (1<<3) /* No source operand. */
36 #define SrcImplicit (1<<3) /* Source operand is implicit in the opcode. */
37 #define SrcReg (2<<3) /* Register operand. */
38 #define SrcMem (3<<3) /* Memory operand. */
39 #define SrcMem16 (4<<3) /* Memory operand (16-bit). */
40 #define SrcImm (5<<3) /* Immediate operand. */
41 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
42 #define SrcMask (7<<3)
43 /* Generic ModRM decode. */
44 #define ModRM (1<<6)
45 /* Destination is only written; never read. */
46 #define Mov (1<<7)
47 /* All operands are implicit in the opcode. */
48 #define ImplicitOps (DstImplicit|SrcImplicit)
50 static uint8_t opcode_table[256] = {
51 /* 0x00 - 0x07 */
52 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
53 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
54 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
55 /* 0x08 - 0x0F */
56 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
57 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
58 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
59 /* 0x10 - 0x17 */
60 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
61 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
62 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
63 /* 0x18 - 0x1F */
64 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
65 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
66 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
67 /* 0x20 - 0x27 */
68 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
69 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
70 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
71 /* 0x28 - 0x2F */
72 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
73 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
74 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
75 /* 0x30 - 0x37 */
76 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
77 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
78 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
79 /* 0x38 - 0x3F */
80 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
81 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
82 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
83 /* 0x40 - 0x4F */
84 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
85 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
86 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
87 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
88 /* 0x50 - 0x5F */
89 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
90 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
91 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
92 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
93 /* 0x60 - 0x67 */
94 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
95 0, 0, 0, 0,
96 /* 0x68 - 0x6F */
97 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
98 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 /* 0x70 - 0x77 */
101 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
102 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
103 /* 0x78 - 0x7F */
104 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
106 /* 0x80 - 0x87 */
107 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
108 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
109 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
110 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
111 /* 0x88 - 0x8F */
112 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
113 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
114 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
115 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
116 /* 0x90 - 0x97 */
117 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
118 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
119 /* 0x98 - 0x9F */
120 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
121 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
122 /* 0xA0 - 0xA7 */
123 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
124 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
125 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
126 ByteOp|ImplicitOps, ImplicitOps,
127 /* 0xA8 - 0xAF */
128 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
129 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
130 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
131 ByteOp|ImplicitOps, ImplicitOps,
132 /* 0xB0 - 0xB7 */
133 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
134 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
135 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
136 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
137 /* 0xB8 - 0xBF */
138 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
139 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
140 /* 0xC0 - 0xC7 */
141 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
142 ImplicitOps, ImplicitOps,
143 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
144 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
145 /* 0xC8 - 0xCF */
146 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
147 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
148 /* 0xD0 - 0xD7 */
149 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
150 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
151 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
152 /* 0xD8 - 0xDF */
153 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
154 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
155 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
156 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
157 /* 0xE0 - 0xE7 */
158 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
159 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
160 /* 0xE8 - 0xEF */
161 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
162 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
163 /* 0xF0 - 0xF7 */
164 0, ImplicitOps, 0, 0,
165 ImplicitOps, ImplicitOps,
166 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
167 /* 0xF8 - 0xFF */
168 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
169 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
170 };
172 static uint8_t twobyte_table[256] = {
173 /* 0x00 - 0x07 */
174 0, ImplicitOps|ModRM, 0, 0, 0, 0, ImplicitOps, 0,
175 /* 0x08 - 0x0F */
176 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
177 /* 0x10 - 0x17 */
178 0, 0, 0, 0, 0, 0, 0, 0,
179 /* 0x18 - 0x1F */
180 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
181 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
182 /* 0x20 - 0x27 */
183 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
184 0, 0, 0, 0,
185 /* 0x28 - 0x2F */
186 0, 0, 0, 0, 0, 0, 0, 0,
187 /* 0x30 - 0x37 */
188 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
189 /* 0x38 - 0x3F */
190 0, 0, 0, 0, 0, 0, 0, 0,
191 /* 0x40 - 0x47 */
192 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
193 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
194 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
195 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
196 /* 0x48 - 0x4F */
197 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 /* 0x50 - 0x5F */
202 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
203 /* 0x60 - 0x6F */
204 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
205 /* 0x70 - 0x7F */
206 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
207 /* 0x80 - 0x87 */
208 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
209 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
210 /* 0x88 - 0x8F */
211 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
212 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
213 /* 0x90 - 0x97 */
214 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
215 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
216 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
217 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
218 /* 0x98 - 0x9F */
219 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 /* 0xA0 - 0xA7 */
224 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
225 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
226 /* 0xA8 - 0xAF */
227 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
228 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
229 /* 0xB0 - 0xB7 */
230 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
231 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
232 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
233 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
234 /* 0xB8 - 0xBF */
235 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
236 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
237 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
238 /* 0xC0 - 0xC7 */
239 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
240 0, 0, 0, ImplicitOps|ModRM,
241 /* 0xC8 - 0xCF */
242 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
243 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
244 /* 0xD0 - 0xDF */
245 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
246 /* 0xE0 - 0xEF */
247 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
248 /* 0xF0 - 0xFF */
249 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
250 };
252 /* Type, address-of, and value of an instruction's operand. */
253 struct operand {
254 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
255 unsigned int bytes;
257 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
258 union {
259 unsigned long val;
260 uint32_t bigval[4];
261 };
263 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
264 union {
265 unsigned long orig_val;
266 uint32_t orig_bigval[4];
267 };
269 union {
270 /* OP_REG: Pointer to register field. */
271 unsigned long *reg;
272 /* OP_MEM: Segment and offset. */
273 struct {
274 enum x86_segment seg;
275 unsigned long off;
276 } mem;
277 };
278 };
280 /* MSRs. */
281 #define MSR_TSC 0x10
283 /* Control register flags. */
284 #define CR0_PE (1<<0)
285 #define CR4_TSD (1<<2)
287 /* EFLAGS bit definitions. */
288 #define EFLG_VIP (1<<20)
289 #define EFLG_VIF (1<<19)
290 #define EFLG_AC (1<<18)
291 #define EFLG_VM (1<<17)
292 #define EFLG_RF (1<<16)
293 #define EFLG_NT (1<<14)
294 #define EFLG_IOPL (3<<12)
295 #define EFLG_OF (1<<11)
296 #define EFLG_DF (1<<10)
297 #define EFLG_IF (1<<9)
298 #define EFLG_TF (1<<8)
299 #define EFLG_SF (1<<7)
300 #define EFLG_ZF (1<<6)
301 #define EFLG_AF (1<<4)
302 #define EFLG_PF (1<<2)
303 #define EFLG_CF (1<<0)
305 /* Exception definitions. */
306 #define EXC_DE 0
307 #define EXC_DB 1
308 #define EXC_BP 3
309 #define EXC_OF 4
310 #define EXC_BR 5
311 #define EXC_UD 6
312 #define EXC_TS 10
313 #define EXC_NP 11
314 #define EXC_SS 12
315 #define EXC_GP 13
316 #define EXC_PF 14
317 #define EXC_MF 16
319 /*
320 * Instruction emulation:
321 * Most instructions are emulated directly via a fragment of inline assembly
322 * code. This allows us to save/restore EFLAGS and thus very easily pick up
323 * any modified flags.
324 */
326 #if defined(__x86_64__)
327 #define _LO32 "k" /* force 32-bit operand */
328 #define _STK "%%rsp" /* stack pointer */
329 #define _BYTES_PER_LONG "8"
330 #elif defined(__i386__)
331 #define _LO32 "" /* force 32-bit operand */
332 #define _STK "%%esp" /* stack pointer */
333 #define _BYTES_PER_LONG "4"
334 #endif
336 /*
337 * These EFLAGS bits are restored from saved value during emulation, and
338 * any changes are written back to the saved value after emulation.
339 */
340 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
342 /* Before executing instruction: restore necessary bits in EFLAGS. */
343 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
344 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
345 "movl %"_sav",%"_LO32 _tmp"; " \
346 "push %"_tmp"; " \
347 "push %"_tmp"; " \
348 "movl %"_msk",%"_LO32 _tmp"; " \
349 "andl %"_LO32 _tmp",("_STK"); " \
350 "pushf; " \
351 "notl %"_LO32 _tmp"; " \
352 "andl %"_LO32 _tmp",("_STK"); " \
353 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
354 "pop %"_tmp"; " \
355 "orl %"_LO32 _tmp",("_STK"); " \
356 "popf; " \
357 "pop %"_sav"; "
359 /* After executing instruction: write-back necessary bits in EFLAGS. */
360 #define _POST_EFLAGS(_sav, _msk, _tmp) \
361 /* _sav |= EFLAGS & _msk; */ \
362 "pushf; " \
363 "pop %"_tmp"; " \
364 "andl %"_msk",%"_LO32 _tmp"; " \
365 "orl %"_LO32 _tmp",%"_sav"; "
367 /* Raw emulation: instruction has two explicit operands. */
368 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
369 do{ unsigned long _tmp; \
370 switch ( (_dst).bytes ) \
371 { \
372 case 2: \
373 asm volatile ( \
374 _PRE_EFLAGS("0","4","2") \
375 _op"w %"_wx"3,%1; " \
376 _POST_EFLAGS("0","4","2") \
377 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
378 : _wy ((_src).val), "i" (EFLAGS_MASK), \
379 "m" (_eflags), "m" ((_dst).val) ); \
380 break; \
381 case 4: \
382 asm volatile ( \
383 _PRE_EFLAGS("0","4","2") \
384 _op"l %"_lx"3,%1; " \
385 _POST_EFLAGS("0","4","2") \
386 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
387 : _ly ((_src).val), "i" (EFLAGS_MASK), \
388 "m" (_eflags), "m" ((_dst).val) ); \
389 break; \
390 case 8: \
391 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
392 break; \
393 } \
394 } while (0)
395 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
396 do{ unsigned long _tmp; \
397 switch ( (_dst).bytes ) \
398 { \
399 case 1: \
400 asm volatile ( \
401 _PRE_EFLAGS("0","4","2") \
402 _op"b %"_bx"3,%1; " \
403 _POST_EFLAGS("0","4","2") \
404 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
405 : _by ((_src).val), "i" (EFLAGS_MASK), \
406 "m" (_eflags), "m" ((_dst).val) ); \
407 break; \
408 default: \
409 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
410 break; \
411 } \
412 } while (0)
413 /* Source operand is byte-sized and may be restricted to just %cl. */
414 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
415 __emulate_2op(_op, _src, _dst, _eflags, \
416 "b", "c", "b", "c", "b", "c", "b", "c")
417 /* Source operand is byte, word, long or quad sized. */
418 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
419 __emulate_2op(_op, _src, _dst, _eflags, \
420 "b", "q", "w", "r", _LO32, "r", "", "r")
421 /* Source operand is word, long or quad sized. */
422 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
423 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
424 "w", "r", _LO32, "r", "", "r")
426 /* Instruction has only one explicit operand (no source operand). */
427 #define emulate_1op(_op,_dst,_eflags) \
428 do{ unsigned long _tmp; \
429 switch ( (_dst).bytes ) \
430 { \
431 case 1: \
432 asm volatile ( \
433 _PRE_EFLAGS("0","3","2") \
434 _op"b %1; " \
435 _POST_EFLAGS("0","3","2") \
436 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
437 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
438 break; \
439 case 2: \
440 asm volatile ( \
441 _PRE_EFLAGS("0","3","2") \
442 _op"w %1; " \
443 _POST_EFLAGS("0","3","2") \
444 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
445 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
446 break; \
447 case 4: \
448 asm volatile ( \
449 _PRE_EFLAGS("0","3","2") \
450 _op"l %1; " \
451 _POST_EFLAGS("0","3","2") \
452 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
453 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
454 break; \
455 case 8: \
456 __emulate_1op_8byte(_op, _dst, _eflags); \
457 break; \
458 } \
459 } while (0)
461 /* Emulate an instruction with quadword operands (x86/64 only). */
462 #if defined(__x86_64__)
463 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
464 do{ asm volatile ( \
465 _PRE_EFLAGS("0","4","2") \
466 _op"q %"_qx"3,%1; " \
467 _POST_EFLAGS("0","4","2") \
468 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
469 : _qy ((_src).val), "i" (EFLAGS_MASK), \
470 "m" (_eflags), "m" ((_dst).val) ); \
471 } while (0)
472 #define __emulate_1op_8byte(_op, _dst, _eflags) \
473 do{ asm volatile ( \
474 _PRE_EFLAGS("0","3","2") \
475 _op"q %1; " \
476 _POST_EFLAGS("0","3","2") \
477 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
478 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
479 } while (0)
480 #elif defined(__i386__)
481 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
482 #define __emulate_1op_8byte(_op, _dst, _eflags)
483 #endif /* __i386__ */
485 /* Fetch next part of the instruction being emulated. */
486 #define insn_fetch_bytes(_size) \
487 ({ unsigned long _x = 0, _eip = _regs.eip; \
488 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
489 _regs.eip += (_size); /* real hardware doesn't truncate */ \
490 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
491 EXC_GP, 0); \
492 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
493 if ( rc ) goto done; \
494 _x; \
495 })
496 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
498 #define truncate_word(ea, byte_width) \
499 ({ unsigned long __ea = (ea); \
500 unsigned int _width = (byte_width); \
501 ((_width == sizeof(unsigned long)) ? __ea : \
502 (__ea & ((1UL << (_width << 3)) - 1))); \
503 })
504 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
506 #define mode_64bit() (def_ad_bytes == 8)
508 #define fail_if(p) \
509 do { \
510 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
511 if ( rc ) goto done; \
512 } while (0)
514 #define generate_exception_if(p, e, ec) \
515 ({ if ( (p) ) { \
516 fail_if(ops->inject_hw_exception == NULL); \
517 rc = ops->inject_hw_exception(e, ec, ctxt) ? : X86EMUL_EXCEPTION; \
518 goto done; \
519 } \
520 })
522 /*
523 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
524 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
525 */
526 static int even_parity(uint8_t v)
527 {
528 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
529 return v;
530 }
532 /* Update address held in a register, based on addressing mode. */
533 #define _register_address_increment(reg, inc, byte_width) \
534 do { \
535 int _inc = (inc); /* signed type ensures sign extension to long */ \
536 unsigned int _width = (byte_width); \
537 if ( _width == sizeof(unsigned long) ) \
538 (reg) += _inc; \
539 else if ( mode_64bit() ) \
540 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
541 else \
542 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
543 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
544 } while (0)
545 #define register_address_increment(reg, inc) \
546 _register_address_increment((reg), (inc), ad_bytes)
548 #define sp_pre_dec(dec) ({ \
549 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
550 truncate_word(_regs.esp, ctxt->sp_size/8); \
551 })
552 #define sp_post_inc(inc) ({ \
553 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
554 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
555 __esp; \
556 })
558 #define jmp_rel(rel) \
559 do { \
560 int _rel = (int)(rel); \
561 _regs.eip += _rel; \
562 if ( !mode_64bit() ) \
563 _regs.eip = ((op_bytes == 2) \
564 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
565 } while (0)
567 struct fpu_insn_ctxt {
568 uint8_t insn_bytes;
569 uint8_t exn_raised;
570 };
572 static void fpu_handle_exception(void *_fic, struct cpu_user_regs *regs)
573 {
574 struct fpu_insn_ctxt *fic = _fic;
575 fic->exn_raised = 1;
576 regs->eip += fic->insn_bytes;
577 }
579 #define get_fpu(_type, _fic) \
580 do{ (_fic)->exn_raised = 0; \
581 fail_if(ops->get_fpu == NULL); \
582 rc = ops->get_fpu(fpu_handle_exception, _fic, _type, ctxt); \
583 if ( rc ) goto done; \
584 } while (0)
585 #define put_fpu(_fic) \
586 do{ \
587 if ( ops->put_fpu != NULL ) \
588 ops->put_fpu(ctxt); \
589 generate_exception_if((_fic)->exn_raised, EXC_MF, -1); \
590 } while (0)
592 #define emulate_fpu_insn(_op) \
593 do{ struct fpu_insn_ctxt fic; \
594 get_fpu(X86EMUL_FPU_fpu, &fic); \
595 asm volatile ( \
596 "movb $2f-1f,%0 \n" \
597 "1: " _op " \n" \
598 "2: \n" \
599 : "=m" (fic.insn_bytes) : : "memory" ); \
600 put_fpu(&fic); \
601 } while (0)
603 #define emulate_fpu_insn_memdst(_op, _arg) \
604 do{ struct fpu_insn_ctxt fic; \
605 get_fpu(X86EMUL_FPU_fpu, &fic); \
606 asm volatile ( \
607 "movb $2f-1f,%0 \n" \
608 "1: " _op " %1 \n" \
609 "2: \n" \
610 : "=m" (fic.insn_bytes), "=m" (_arg) \
611 : : "memory" ); \
612 put_fpu(&fic); \
613 } while (0)
615 #define emulate_fpu_insn_memsrc(_op, _arg) \
616 do{ struct fpu_insn_ctxt fic; \
617 get_fpu(X86EMUL_FPU_fpu, &fic); \
618 asm volatile ( \
619 "movb $2f-1f,%0 \n" \
620 "1: " _op " %1 \n" \
621 "2: \n" \
622 : "=m" (fic.insn_bytes) \
623 : "m" (_arg) : "memory" ); \
624 put_fpu(&fic); \
625 } while (0)
627 #define emulate_fpu_insn_stub(_bytes...) \
628 do{ uint8_t stub[] = { _bytes, 0xc3 }; \
629 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 }; \
630 get_fpu(X86EMUL_FPU_fpu, &fic); \
631 (*(void(*)(void))stub)(); \
632 put_fpu(&fic); \
633 } while (0)
635 static unsigned long __get_rep_prefix(
636 struct cpu_user_regs *int_regs,
637 struct cpu_user_regs *ext_regs,
638 int ad_bytes)
639 {
640 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
641 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
642 int_regs->ecx);
644 /* Skip the instruction if no repetitions are required. */
645 if ( ecx == 0 )
646 ext_regs->eip = int_regs->eip;
648 return ecx;
649 }
651 #define get_rep_prefix() ({ \
652 unsigned long max_reps = 1; \
653 if ( rep_prefix ) \
654 max_reps = __get_rep_prefix(&_regs, ctxt->regs, ad_bytes); \
655 if ( max_reps == 0 ) \
656 goto done; \
657 max_reps; \
658 })
660 static void __put_rep_prefix(
661 struct cpu_user_regs *int_regs,
662 struct cpu_user_regs *ext_regs,
663 int ad_bytes,
664 unsigned long reps_completed)
665 {
666 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
667 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
668 int_regs->ecx);
670 /* Reduce counter appropriately, and repeat instruction if non-zero. */
671 ecx -= reps_completed;
672 if ( ecx != 0 )
673 int_regs->eip = ext_regs->eip;
675 if ( ad_bytes == 2 )
676 *(uint16_t *)&int_regs->ecx = ecx;
677 else if ( ad_bytes == 4 )
678 int_regs->ecx = (uint32_t)ecx;
679 else
680 int_regs->ecx = ecx;
681 }
683 #define put_rep_prefix(reps_completed) ({ \
684 if ( rep_prefix ) \
685 __put_rep_prefix(&_regs, ctxt->regs, ad_bytes, reps_completed); \
686 })
688 /* Clip maximum repetitions so that the index register only just wraps. */
689 #define truncate_ea_and_reps(ea, reps, bytes_per_rep) ({ \
690 unsigned long __todo = (ctxt->regs->eflags & EF_DF) ? (ea) : ~(ea); \
691 __todo = truncate_word(__todo, ad_bytes); \
692 __todo = (__todo / (bytes_per_rep)) + 1; \
693 (reps) = (__todo < (reps)) ? __todo : (reps); \
694 truncate_word((ea), ad_bytes); \
695 })
697 /* Compatibility function: read guest memory, zero-extend result to a ulong. */
698 static int read_ulong(
699 enum x86_segment seg,
700 unsigned long offset,
701 unsigned long *val,
702 unsigned int bytes,
703 struct x86_emulate_ctxt *ctxt,
704 struct x86_emulate_ops *ops)
705 {
706 *val = 0;
707 return ops->read(seg, offset, val, bytes, ctxt);
708 }
710 /*
711 * Unsigned multiplication with double-word result.
712 * IN: Multiplicand=m[0], Multiplier=m[1]
713 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
714 */
715 static int mul_dbl(unsigned long m[2])
716 {
717 int rc;
718 asm ( "mul %4; seto %b2"
719 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
720 : "0" (m[0]), "1" (m[1]), "2" (0) );
721 return rc;
722 }
724 /*
725 * Signed multiplication with double-word result.
726 * IN: Multiplicand=m[0], Multiplier=m[1]
727 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
728 */
729 static int imul_dbl(unsigned long m[2])
730 {
731 int rc;
732 asm ( "imul %4; seto %b2"
733 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
734 : "0" (m[0]), "1" (m[1]), "2" (0) );
735 return rc;
736 }
738 /*
739 * Unsigned division of double-word dividend.
740 * IN: Dividend=u[1]:u[0], Divisor=v
741 * OUT: Return 1: #DE
742 * Return 0: Quotient=u[0], Remainder=u[1]
743 */
744 static int div_dbl(unsigned long u[2], unsigned long v)
745 {
746 if ( (v == 0) || (u[1] >= v) )
747 return 1;
748 asm ( "div %4"
749 : "=a" (u[0]), "=d" (u[1])
750 : "0" (u[0]), "1" (u[1]), "r" (v) );
751 return 0;
752 }
754 /*
755 * Signed division of double-word dividend.
756 * IN: Dividend=u[1]:u[0], Divisor=v
757 * OUT: Return 1: #DE
758 * Return 0: Quotient=u[0], Remainder=u[1]
759 * NB. We don't use idiv directly as it's moderately hard to work out
760 * ahead of time whether it will #DE, which we cannot allow to happen.
761 */
762 static int idiv_dbl(unsigned long u[2], unsigned long v)
763 {
764 int negu = (long)u[1] < 0, negv = (long)v < 0;
766 /* u = abs(u) */
767 if ( negu )
768 {
769 u[1] = ~u[1];
770 if ( (u[0] = -u[0]) == 0 )
771 u[1]++;
772 }
774 /* abs(u) / abs(v) */
775 if ( div_dbl(u, negv ? -v : v) )
776 return 1;
778 /* Remainder has same sign as dividend. It cannot overflow. */
779 if ( negu )
780 u[1] = -u[1];
782 /* Quotient is overflowed if sign bit is set. */
783 if ( negu ^ negv )
784 {
785 if ( (long)u[0] >= 0 )
786 u[0] = -u[0];
787 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
788 return 1;
789 }
790 else if ( (long)u[0] < 0 )
791 return 1;
793 return 0;
794 }
796 static int
797 test_cc(
798 unsigned int condition, unsigned int flags)
799 {
800 int rc = 0;
802 switch ( (condition & 15) >> 1 )
803 {
804 case 0: /* o */
805 rc |= (flags & EFLG_OF);
806 break;
807 case 1: /* b/c/nae */
808 rc |= (flags & EFLG_CF);
809 break;
810 case 2: /* z/e */
811 rc |= (flags & EFLG_ZF);
812 break;
813 case 3: /* be/na */
814 rc |= (flags & (EFLG_CF|EFLG_ZF));
815 break;
816 case 4: /* s */
817 rc |= (flags & EFLG_SF);
818 break;
819 case 5: /* p/pe */
820 rc |= (flags & EFLG_PF);
821 break;
822 case 7: /* le/ng */
823 rc |= (flags & EFLG_ZF);
824 /* fall through */
825 case 6: /* l/nge */
826 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
827 break;
828 }
830 /* Odd condition identifiers (lsb == 1) have inverted sense. */
831 return (!!rc ^ (condition & 1));
832 }
834 static int
835 get_cpl(
836 struct x86_emulate_ctxt *ctxt,
837 struct x86_emulate_ops *ops)
838 {
839 struct segment_register reg;
841 if ( ctxt->regs->eflags & EFLG_VM )
842 return 3;
844 if ( (ops->read_segment == NULL) ||
845 ops->read_segment(x86_seg_ss, &reg, ctxt) )
846 return -1;
848 return reg.attr.fields.dpl;
849 }
851 static int
852 _mode_iopl(
853 struct x86_emulate_ctxt *ctxt,
854 struct x86_emulate_ops *ops)
855 {
856 int cpl = get_cpl(ctxt, ops);
857 if ( cpl == -1 )
858 return -1;
859 return (cpl <= ((ctxt->regs->eflags >> 12) & 3));
860 }
862 #define mode_ring0() ({ \
863 int _cpl = get_cpl(ctxt, ops); \
864 fail_if(_cpl < 0); \
865 (_cpl == 0); \
866 })
867 #define mode_iopl() ({ \
868 int _iopl = _mode_iopl(ctxt, ops); \
869 fail_if(_iopl < 0); \
870 _iopl; \
871 })
873 static int ioport_access_check(
874 unsigned int first_port,
875 unsigned int bytes,
876 struct x86_emulate_ctxt *ctxt,
877 struct x86_emulate_ops *ops)
878 {
879 unsigned long iobmp;
880 struct segment_register tr;
881 int rc = X86EMUL_OKAY;
883 if ( !(ctxt->regs->eflags & EFLG_VM) && mode_iopl() )
884 return X86EMUL_OKAY;
886 fail_if(ops->read_segment == NULL);
887 if ( (rc = ops->read_segment(x86_seg_tr, &tr, ctxt)) != 0 )
888 return rc;
890 /* Ensure that the TSS is valid and has an io-bitmap-offset field. */
891 if ( !tr.attr.fields.p ||
892 ((tr.attr.fields.type & 0xd) != 0x9) ||
893 (tr.limit < 0x67) )
894 goto raise_exception;
896 if ( (rc = read_ulong(x86_seg_none, tr.base + 0x66,
897 &iobmp, 2, ctxt, ops)) )
898 return rc;
900 /* Ensure TSS includes two bytes including byte containing first port. */
901 iobmp += first_port / 8;
902 if ( tr.limit <= iobmp )
903 goto raise_exception;
905 if ( (rc = read_ulong(x86_seg_none, tr.base + iobmp,
906 &iobmp, 2, ctxt, ops)) )
907 return rc;
908 if ( (iobmp & (((1<<bytes)-1) << (first_port&7))) != 0 )
909 goto raise_exception;
911 done:
912 return rc;
914 raise_exception:
915 fail_if(ops->inject_hw_exception == NULL);
916 return ops->inject_hw_exception(EXC_GP, 0, ctxt) ? : X86EMUL_EXCEPTION;
917 }
919 static int
920 in_realmode(
921 struct x86_emulate_ctxt *ctxt,
922 struct x86_emulate_ops *ops)
923 {
924 unsigned long cr0;
925 int rc;
927 if ( ops->read_cr == NULL )
928 return 0;
930 rc = ops->read_cr(0, &cr0, ctxt);
931 return (!rc && !(cr0 & CR0_PE));
932 }
934 static int
935 in_protmode(
936 struct x86_emulate_ctxt *ctxt,
937 struct x86_emulate_ops *ops)
938 {
939 return !(in_realmode(ctxt, ops) || (ctxt->regs->eflags & EFLG_VM));
940 }
942 static int
943 realmode_load_seg(
944 enum x86_segment seg,
945 uint16_t sel,
946 struct x86_emulate_ctxt *ctxt,
947 struct x86_emulate_ops *ops)
948 {
949 struct segment_register reg;
950 int rc;
952 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
953 return rc;
955 reg.sel = sel;
956 reg.base = (uint32_t)sel << 4;
958 return ops->write_segment(seg, &reg, ctxt);
959 }
961 static int
962 protmode_load_seg(
963 enum x86_segment seg,
964 uint16_t sel,
965 struct x86_emulate_ctxt *ctxt,
966 struct x86_emulate_ops *ops)
967 {
968 struct segment_register desctab, ss, segr;
969 struct { uint32_t a, b; } desc;
970 unsigned long val;
971 uint8_t dpl, rpl, cpl;
972 uint32_t new_desc_b;
973 int rc, fault_type = EXC_TS;
975 /* NULL selector? */
976 if ( (sel & 0xfffc) == 0 )
977 {
978 if ( (seg == x86_seg_cs) || (seg == x86_seg_ss) )
979 goto raise_exn;
980 memset(&segr, 0, sizeof(segr));
981 return ops->write_segment(seg, &segr, ctxt);
982 }
984 /* LDT descriptor must be in the GDT. */
985 if ( (seg == x86_seg_ldtr) && (sel & 4) )
986 goto raise_exn;
988 if ( (rc = ops->read_segment(x86_seg_ss, &ss, ctxt)) ||
989 (rc = ops->read_segment((sel & 4) ? x86_seg_ldtr : x86_seg_gdtr,
990 &desctab, ctxt)) )
991 return rc;
993 /* Check against descriptor table limit. */
994 if ( ((sel & 0xfff8) + 7) > desctab.limit )
995 goto raise_exn;
997 do {
998 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8),
999 &val, 4, ctxt, ops)) )
1000 return rc;
1001 desc.a = val;
1002 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1003 &val, 4, ctxt, ops)) )
1004 return rc;
1005 desc.b = val;
1007 /* Segment present in memory? */
1008 if ( !(desc.b & (1u<<15)) )
1010 fault_type = EXC_NP;
1011 goto raise_exn;
1014 /* LDT descriptor is a system segment. All others are code/data. */
1015 if ( (desc.b & (1u<<12)) == ((seg == x86_seg_ldtr) << 12) )
1016 goto raise_exn;
1018 dpl = (desc.b >> 13) & 3;
1019 rpl = sel & 3;
1020 cpl = ss.attr.fields.dpl;
1022 switch ( seg )
1024 case x86_seg_cs:
1025 /* Code segment? */
1026 if ( !(desc.b & (1u<<11)) )
1027 goto raise_exn;
1028 /* Non-conforming segment: check DPL against RPL. */
1029 if ( ((desc.b & (6u<<9)) != (6u<<9)) && (dpl != rpl) )
1030 goto raise_exn;
1031 break;
1032 case x86_seg_ss:
1033 /* Writable data segment? */
1034 if ( (desc.b & (5u<<9)) != (1u<<9) )
1035 goto raise_exn;
1036 if ( (dpl != cpl) || (dpl != rpl) )
1037 goto raise_exn;
1038 break;
1039 case x86_seg_ldtr:
1040 /* LDT system segment? */
1041 if ( (desc.b & (15u<<8)) != (2u<<8) )
1042 goto raise_exn;
1043 goto skip_accessed_flag;
1044 default:
1045 /* Readable code or data segment? */
1046 if ( (desc.b & (5u<<9)) == (4u<<9) )
1047 goto raise_exn;
1048 /* Non-conforming segment: check DPL against RPL and CPL. */
1049 if ( ((desc.b & (6u<<9)) != (6u<<9)) &&
1050 ((dpl < cpl) || (dpl < rpl)) )
1051 goto raise_exn;
1052 break;
1055 /* Ensure Accessed flag is set. */
1056 new_desc_b = desc.b | 0x100;
1057 rc = ((desc.b & 0x100) ? X86EMUL_OKAY :
1058 ops->cmpxchg(
1059 x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1060 &desc.b, &new_desc_b, 4, ctxt));
1061 } while ( rc == X86EMUL_CMPXCHG_FAILED );
1063 if ( rc )
1064 return rc;
1066 /* Force the Accessed flag in our local copy. */
1067 desc.b |= 0x100;
1069 skip_accessed_flag:
1070 segr.base = (((desc.b << 0) & 0xff000000u) |
1071 ((desc.b << 16) & 0x00ff0000u) |
1072 ((desc.a >> 16) & 0x0000ffffu));
1073 segr.attr.bytes = (((desc.b >> 8) & 0x00ffu) |
1074 ((desc.b >> 12) & 0x0f00u));
1075 segr.limit = (desc.b & 0x000f0000u) | (desc.a & 0x0000ffffu);
1076 if ( segr.attr.fields.g )
1077 segr.limit = (segr.limit << 12) | 0xfffu;
1078 segr.sel = sel;
1079 return ops->write_segment(seg, &segr, ctxt);
1081 raise_exn:
1082 if ( ops->inject_hw_exception == NULL )
1083 return X86EMUL_UNHANDLEABLE;
1084 if ( (rc = ops->inject_hw_exception(fault_type, sel & 0xfffc, ctxt)) )
1085 return rc;
1086 return X86EMUL_EXCEPTION;
1089 static int
1090 load_seg(
1091 enum x86_segment seg,
1092 uint16_t sel,
1093 struct x86_emulate_ctxt *ctxt,
1094 struct x86_emulate_ops *ops)
1096 if ( (ops->read_segment == NULL) ||
1097 (ops->write_segment == NULL) )
1098 return X86EMUL_UNHANDLEABLE;
1100 if ( in_protmode(ctxt, ops) )
1101 return protmode_load_seg(seg, sel, ctxt, ops);
1103 return realmode_load_seg(seg, sel, ctxt, ops);
1106 void *
1107 decode_register(
1108 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
1110 void *p;
1112 switch ( modrm_reg )
1114 case 0: p = &regs->eax; break;
1115 case 1: p = &regs->ecx; break;
1116 case 2: p = &regs->edx; break;
1117 case 3: p = &regs->ebx; break;
1118 case 4: p = (highbyte_regs ?
1119 ((unsigned char *)&regs->eax + 1) :
1120 (unsigned char *)&regs->esp); break;
1121 case 5: p = (highbyte_regs ?
1122 ((unsigned char *)&regs->ecx + 1) :
1123 (unsigned char *)&regs->ebp); break;
1124 case 6: p = (highbyte_regs ?
1125 ((unsigned char *)&regs->edx + 1) :
1126 (unsigned char *)&regs->esi); break;
1127 case 7: p = (highbyte_regs ?
1128 ((unsigned char *)&regs->ebx + 1) :
1129 (unsigned char *)&regs->edi); break;
1130 #if defined(__x86_64__)
1131 case 8: p = &regs->r8; break;
1132 case 9: p = &regs->r9; break;
1133 case 10: p = &regs->r10; break;
1134 case 11: p = &regs->r11; break;
1135 case 12: p = &regs->r12; break;
1136 case 13: p = &regs->r13; break;
1137 case 14: p = &regs->r14; break;
1138 case 15: p = &regs->r15; break;
1139 #endif
1140 default: p = NULL; break;
1143 return p;
1146 #define decode_segment_failed x86_seg_tr
1147 enum x86_segment
1148 decode_segment(
1149 uint8_t modrm_reg)
1151 switch ( modrm_reg )
1153 case 0: return x86_seg_es;
1154 case 1: return x86_seg_cs;
1155 case 2: return x86_seg_ss;
1156 case 3: return x86_seg_ds;
1157 case 4: return x86_seg_fs;
1158 case 5: return x86_seg_gs;
1159 default: break;
1161 return decode_segment_failed;
1164 int
1165 x86_emulate(
1166 struct x86_emulate_ctxt *ctxt,
1167 struct x86_emulate_ops *ops)
1169 /* Shadow copy of register state. Committed on successful emulation. */
1170 struct cpu_user_regs _regs = *ctxt->regs;
1172 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
1173 uint8_t modrm = 0, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
1174 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
1175 #define REPE_PREFIX 1
1176 #define REPNE_PREFIX 2
1177 unsigned int lock_prefix = 0, rep_prefix = 0;
1178 int override_seg = -1, rc = X86EMUL_OKAY;
1179 struct operand src, dst;
1181 /*
1182 * Data operand effective address (usually computed from ModRM).
1183 * Default is a memory operand relative to segment DS.
1184 */
1185 struct operand ea = { .type = OP_MEM };
1186 ea.mem.seg = x86_seg_ds; /* gcc may reject anon union initializer */
1188 ctxt->retire.byte = 0;
1190 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
1191 if ( op_bytes == 8 )
1193 op_bytes = def_op_bytes = 4;
1194 #ifndef __x86_64__
1195 return X86EMUL_UNHANDLEABLE;
1196 #endif
1199 /* Prefix bytes. */
1200 for ( ; ; )
1202 switch ( b = insn_fetch_type(uint8_t) )
1204 case 0x66: /* operand-size override */
1205 op_bytes = def_op_bytes ^ 6;
1206 break;
1207 case 0x67: /* address-size override */
1208 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
1209 break;
1210 case 0x2e: /* CS override */
1211 override_seg = x86_seg_cs;
1212 break;
1213 case 0x3e: /* DS override */
1214 override_seg = x86_seg_ds;
1215 break;
1216 case 0x26: /* ES override */
1217 override_seg = x86_seg_es;
1218 break;
1219 case 0x64: /* FS override */
1220 override_seg = x86_seg_fs;
1221 break;
1222 case 0x65: /* GS override */
1223 override_seg = x86_seg_gs;
1224 break;
1225 case 0x36: /* SS override */
1226 override_seg = x86_seg_ss;
1227 break;
1228 case 0xf0: /* LOCK */
1229 lock_prefix = 1;
1230 break;
1231 case 0xf2: /* REPNE/REPNZ */
1232 rep_prefix = REPNE_PREFIX;
1233 break;
1234 case 0xf3: /* REP/REPE/REPZ */
1235 rep_prefix = REPE_PREFIX;
1236 break;
1237 case 0x40 ... 0x4f: /* REX */
1238 if ( !mode_64bit() )
1239 goto done_prefixes;
1240 rex_prefix = b;
1241 continue;
1242 default:
1243 goto done_prefixes;
1246 /* Any legacy prefix after a REX prefix nullifies its effect. */
1247 rex_prefix = 0;
1249 done_prefixes:
1251 if ( rex_prefix & 8 ) /* REX.W */
1252 op_bytes = 8;
1254 /* Opcode byte(s). */
1255 d = opcode_table[b];
1256 if ( d == 0 )
1258 /* Two-byte opcode? */
1259 if ( b == 0x0f )
1261 twobyte = 1;
1262 b = insn_fetch_type(uint8_t);
1263 d = twobyte_table[b];
1266 /* Unrecognised? */
1267 if ( d == 0 )
1268 goto cannot_emulate;
1271 /* Lock prefix is allowed only on RMW instructions. */
1272 generate_exception_if((d & Mov) && lock_prefix, EXC_GP, 0);
1274 /* ModRM and SIB bytes. */
1275 if ( d & ModRM )
1277 modrm = insn_fetch_type(uint8_t);
1278 modrm_mod = (modrm & 0xc0) >> 6;
1279 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
1280 modrm_rm = modrm & 0x07;
1282 if ( modrm_mod == 3 )
1284 modrm_rm |= (rex_prefix & 1) << 3;
1285 ea.type = OP_REG;
1286 ea.reg = decode_register(
1287 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
1289 else if ( ad_bytes == 2 )
1291 /* 16-bit ModR/M decode. */
1292 switch ( modrm_rm )
1294 case 0:
1295 ea.mem.off = _regs.ebx + _regs.esi;
1296 break;
1297 case 1:
1298 ea.mem.off = _regs.ebx + _regs.edi;
1299 break;
1300 case 2:
1301 ea.mem.seg = x86_seg_ss;
1302 ea.mem.off = _regs.ebp + _regs.esi;
1303 break;
1304 case 3:
1305 ea.mem.seg = x86_seg_ss;
1306 ea.mem.off = _regs.ebp + _regs.edi;
1307 break;
1308 case 4:
1309 ea.mem.off = _regs.esi;
1310 break;
1311 case 5:
1312 ea.mem.off = _regs.edi;
1313 break;
1314 case 6:
1315 if ( modrm_mod == 0 )
1316 break;
1317 ea.mem.seg = x86_seg_ss;
1318 ea.mem.off = _regs.ebp;
1319 break;
1320 case 7:
1321 ea.mem.off = _regs.ebx;
1322 break;
1324 switch ( modrm_mod )
1326 case 0:
1327 if ( modrm_rm == 6 )
1328 ea.mem.off = insn_fetch_type(int16_t);
1329 break;
1330 case 1:
1331 ea.mem.off += insn_fetch_type(int8_t);
1332 break;
1333 case 2:
1334 ea.mem.off += insn_fetch_type(int16_t);
1335 break;
1337 ea.mem.off = truncate_ea(ea.mem.off);
1339 else
1341 /* 32/64-bit ModR/M decode. */
1342 if ( modrm_rm == 4 )
1344 sib = insn_fetch_type(uint8_t);
1345 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1346 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1347 if ( sib_index != 4 )
1348 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1349 ea.mem.off <<= (sib >> 6) & 3;
1350 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1351 ea.mem.off += insn_fetch_type(int32_t);
1352 else if ( sib_base == 4 )
1354 ea.mem.seg = x86_seg_ss;
1355 ea.mem.off += _regs.esp;
1356 if ( !twobyte && (b == 0x8f) )
1357 /* POP <rm> computes its EA post increment. */
1358 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1359 ? 8 : op_bytes);
1361 else if ( sib_base == 5 )
1363 ea.mem.seg = x86_seg_ss;
1364 ea.mem.off += _regs.ebp;
1366 else
1367 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1369 else
1371 modrm_rm |= (rex_prefix & 1) << 3;
1372 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1373 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1374 ea.mem.seg = x86_seg_ss;
1376 switch ( modrm_mod )
1378 case 0:
1379 if ( (modrm_rm & 7) != 5 )
1380 break;
1381 ea.mem.off = insn_fetch_type(int32_t);
1382 if ( !mode_64bit() )
1383 break;
1384 /* Relative to RIP of next instruction. Argh! */
1385 ea.mem.off += _regs.eip;
1386 if ( (d & SrcMask) == SrcImm )
1387 ea.mem.off += (d & ByteOp) ? 1 :
1388 ((op_bytes == 8) ? 4 : op_bytes);
1389 else if ( (d & SrcMask) == SrcImmByte )
1390 ea.mem.off += 1;
1391 else if ( !twobyte && ((b & 0xfe) == 0xf6) &&
1392 ((modrm_reg & 7) <= 1) )
1393 /* Special case in Grp3: test has immediate operand. */
1394 ea.mem.off += (d & ByteOp) ? 1
1395 : ((op_bytes == 8) ? 4 : op_bytes);
1396 else if ( twobyte && ((b & 0xf7) == 0xa4) )
1397 /* SHLD/SHRD with immediate byte third operand. */
1398 ea.mem.off++;
1399 break;
1400 case 1:
1401 ea.mem.off += insn_fetch_type(int8_t);
1402 break;
1403 case 2:
1404 ea.mem.off += insn_fetch_type(int32_t);
1405 break;
1407 ea.mem.off = truncate_ea(ea.mem.off);
1411 if ( override_seg != -1 )
1412 ea.mem.seg = override_seg;
1414 /* Decode and fetch the source operand: register, memory or immediate. */
1415 switch ( d & SrcMask )
1417 case SrcNone: /* case SrcImplicit: */
1418 src.type = OP_NONE;
1419 break;
1420 case SrcReg:
1421 src.type = OP_REG;
1422 if ( d & ByteOp )
1424 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1425 src.val = *(uint8_t *)src.reg;
1426 src.bytes = 1;
1428 else
1430 src.reg = decode_register(modrm_reg, &_regs, 0);
1431 switch ( (src.bytes = op_bytes) )
1433 case 2: src.val = *(uint16_t *)src.reg; break;
1434 case 4: src.val = *(uint32_t *)src.reg; break;
1435 case 8: src.val = *(uint64_t *)src.reg; break;
1438 break;
1439 case SrcMem16:
1440 ea.bytes = 2;
1441 goto srcmem_common;
1442 case SrcMem:
1443 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1444 srcmem_common:
1445 src = ea;
1446 if ( src.type == OP_REG )
1448 switch ( src.bytes )
1450 case 1: src.val = *(uint8_t *)src.reg; break;
1451 case 2: src.val = *(uint16_t *)src.reg; break;
1452 case 4: src.val = *(uint32_t *)src.reg; break;
1453 case 8: src.val = *(uint64_t *)src.reg; break;
1456 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1457 &src.val, src.bytes, ctxt, ops)) )
1458 goto done;
1459 break;
1460 case SrcImm:
1461 src.type = OP_IMM;
1462 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1463 if ( src.bytes == 8 ) src.bytes = 4;
1464 /* NB. Immediates are sign-extended as necessary. */
1465 switch ( src.bytes )
1467 case 1: src.val = insn_fetch_type(int8_t); break;
1468 case 2: src.val = insn_fetch_type(int16_t); break;
1469 case 4: src.val = insn_fetch_type(int32_t); break;
1471 break;
1472 case SrcImmByte:
1473 src.type = OP_IMM;
1474 src.bytes = 1;
1475 src.val = insn_fetch_type(int8_t);
1476 break;
1479 /* Decode and fetch the destination operand: register or memory. */
1480 switch ( d & DstMask )
1482 case DstNone: /* case DstImplicit: */
1483 /*
1484 * The only implicit-operands instructions allowed a LOCK prefix are
1485 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1486 */
1487 generate_exception_if(
1488 lock_prefix &&
1489 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1490 (b != 0xc7), /* CMPXCHG{8,16}B */
1491 EXC_GP, 0);
1492 dst.type = OP_NONE;
1493 break;
1495 case DstReg:
1496 generate_exception_if(lock_prefix, EXC_GP, 0);
1497 dst.type = OP_REG;
1498 if ( d & ByteOp )
1500 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1501 dst.val = *(uint8_t *)dst.reg;
1502 dst.bytes = 1;
1504 else
1506 dst.reg = decode_register(modrm_reg, &_regs, 0);
1507 switch ( (dst.bytes = op_bytes) )
1509 case 2: dst.val = *(uint16_t *)dst.reg; break;
1510 case 4: dst.val = *(uint32_t *)dst.reg; break;
1511 case 8: dst.val = *(uint64_t *)dst.reg; break;
1514 break;
1515 case DstBitBase:
1516 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1518 src.val &= (op_bytes << 3) - 1;
1520 else
1522 /*
1523 * EA += BitOffset DIV op_bytes*8
1524 * BitOffset = BitOffset MOD op_bytes*8
1525 * DIV truncates towards negative infinity.
1526 * MOD always produces a positive result.
1527 */
1528 if ( op_bytes == 2 )
1529 src.val = (int16_t)src.val;
1530 else if ( op_bytes == 4 )
1531 src.val = (int32_t)src.val;
1532 if ( (long)src.val < 0 )
1534 unsigned long byte_offset;
1535 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1536 ea.mem.off -= byte_offset;
1537 src.val = (byte_offset << 3) + src.val;
1539 else
1541 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1542 src.val &= (op_bytes << 3) - 1;
1545 /* Becomes a normal DstMem operation from here on. */
1546 d = (d & ~DstMask) | DstMem;
1547 case DstMem:
1548 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1549 dst = ea;
1550 if ( dst.type == OP_REG )
1552 generate_exception_if(lock_prefix, EXC_GP, 0);
1553 switch ( dst.bytes )
1555 case 1: dst.val = *(uint8_t *)dst.reg; break;
1556 case 2: dst.val = *(uint16_t *)dst.reg; break;
1557 case 4: dst.val = *(uint32_t *)dst.reg; break;
1558 case 8: dst.val = *(uint64_t *)dst.reg; break;
1561 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1563 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
1564 &dst.val, dst.bytes, ctxt, ops)) )
1565 goto done;
1566 dst.orig_val = dst.val;
1568 break;
1571 if ( twobyte )
1572 goto twobyte_insn;
1574 switch ( b )
1576 case 0x04 ... 0x05: /* add imm,%%eax */
1577 dst.reg = (unsigned long *)&_regs.eax;
1578 dst.val = _regs.eax;
1579 case 0x00 ... 0x03: add: /* add */
1580 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1581 break;
1583 case 0x0c ... 0x0d: /* or imm,%%eax */
1584 dst.reg = (unsigned long *)&_regs.eax;
1585 dst.val = _regs.eax;
1586 case 0x08 ... 0x0b: or: /* or */
1587 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1588 break;
1590 case 0x14 ... 0x15: /* adc imm,%%eax */
1591 dst.reg = (unsigned long *)&_regs.eax;
1592 dst.val = _regs.eax;
1593 case 0x10 ... 0x13: adc: /* adc */
1594 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1595 break;
1597 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1598 dst.reg = (unsigned long *)&_regs.eax;
1599 dst.val = _regs.eax;
1600 case 0x18 ... 0x1b: sbb: /* sbb */
1601 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1602 break;
1604 case 0x24 ... 0x25: /* and imm,%%eax */
1605 dst.reg = (unsigned long *)&_regs.eax;
1606 dst.val = _regs.eax;
1607 case 0x20 ... 0x23: and: /* and */
1608 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1609 break;
1611 case 0x2c ... 0x2d: /* sub imm,%%eax */
1612 dst.reg = (unsigned long *)&_regs.eax;
1613 dst.val = _regs.eax;
1614 case 0x28 ... 0x2b: sub: /* sub */
1615 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1616 break;
1618 case 0x34 ... 0x35: /* xor imm,%%eax */
1619 dst.reg = (unsigned long *)&_regs.eax;
1620 dst.val = _regs.eax;
1621 case 0x30 ... 0x33: xor: /* xor */
1622 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1623 break;
1625 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1626 dst.reg = (unsigned long *)&_regs.eax;
1627 dst.val = _regs.eax;
1628 case 0x38 ... 0x3b: cmp: /* cmp */
1629 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1630 dst.type = OP_NONE;
1631 break;
1633 case 0x06: /* push %%es */ {
1634 struct segment_register reg;
1635 src.val = x86_seg_es;
1636 push_seg:
1637 fail_if(ops->read_segment == NULL);
1638 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
1639 return rc;
1640 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1641 if ( mode_64bit() && (op_bytes == 4) )
1642 op_bytes = 8;
1643 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1644 &reg.sel, op_bytes, ctxt)) != 0 )
1645 goto done;
1646 break;
1649 case 0x07: /* pop %%es */
1650 src.val = x86_seg_es;
1651 pop_seg:
1652 fail_if(ops->write_segment == NULL);
1653 /* 64-bit mode: POP defaults to a 64-bit operand. */
1654 if ( mode_64bit() && (op_bytes == 4) )
1655 op_bytes = 8;
1656 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
1657 &dst.val, op_bytes, ctxt, ops)) != 0 )
1658 goto done;
1659 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
1660 return rc;
1661 break;
1663 case 0x0e: /* push %%cs */
1664 src.val = x86_seg_cs;
1665 goto push_seg;
1667 case 0x16: /* push %%ss */
1668 src.val = x86_seg_ss;
1669 goto push_seg;
1671 case 0x17: /* pop %%ss */
1672 src.val = x86_seg_ss;
1673 ctxt->retire.flags.mov_ss = 1;
1674 goto pop_seg;
1676 case 0x1e: /* push %%ds */
1677 src.val = x86_seg_ds;
1678 goto push_seg;
1680 case 0x1f: /* pop %%ds */
1681 src.val = x86_seg_ds;
1682 goto pop_seg;
1684 case 0x27: /* daa */ {
1685 uint8_t al = _regs.eax;
1686 unsigned long eflags = _regs.eflags;
1687 generate_exception_if(mode_64bit(), EXC_UD, -1);
1688 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1689 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1691 *(uint8_t *)&_regs.eax += 6;
1692 _regs.eflags |= EFLG_AF;
1694 if ( (al > 0x99) || (eflags & EFLG_CF) )
1696 *(uint8_t *)&_regs.eax += 0x60;
1697 _regs.eflags |= EFLG_CF;
1699 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1700 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1701 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1702 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1703 break;
1706 case 0x2f: /* das */ {
1707 uint8_t al = _regs.eax;
1708 unsigned long eflags = _regs.eflags;
1709 generate_exception_if(mode_64bit(), EXC_UD, -1);
1710 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1711 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1713 _regs.eflags |= EFLG_AF;
1714 if ( (al < 6) || (eflags & EFLG_CF) )
1715 _regs.eflags |= EFLG_CF;
1716 *(uint8_t *)&_regs.eax -= 6;
1718 if ( (al > 0x99) || (eflags & EFLG_CF) )
1720 *(uint8_t *)&_regs.eax -= 0x60;
1721 _regs.eflags |= EFLG_CF;
1723 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1724 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1725 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1726 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1727 break;
1730 case 0x37: /* aaa */
1731 case 0x3f: /* aas */
1732 generate_exception_if(mode_64bit(), EXC_UD, -1);
1733 _regs.eflags &= ~EFLG_CF;
1734 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1736 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1737 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1738 _regs.eflags |= EFLG_CF | EFLG_AF;
1740 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1741 break;
1743 case 0x40 ... 0x4f: /* inc/dec reg */
1744 dst.type = OP_REG;
1745 dst.reg = decode_register(b & 7, &_regs, 0);
1746 dst.bytes = op_bytes;
1747 dst.val = *dst.reg;
1748 if ( b & 8 )
1749 emulate_1op("dec", dst, _regs.eflags);
1750 else
1751 emulate_1op("inc", dst, _regs.eflags);
1752 break;
1754 case 0x50 ... 0x57: /* push reg */
1755 src.val = *(unsigned long *)decode_register(
1756 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1757 goto push;
1759 case 0x58 ... 0x5f: /* pop reg */
1760 dst.type = OP_REG;
1761 dst.reg = decode_register(
1762 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1763 dst.bytes = op_bytes;
1764 if ( mode_64bit() && (dst.bytes == 4) )
1765 dst.bytes = 8;
1766 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
1767 &dst.val, dst.bytes, ctxt, ops)) != 0 )
1768 goto done;
1769 break;
1771 case 0x60: /* pusha */ {
1772 int i;
1773 unsigned long regs[] = {
1774 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1775 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1776 generate_exception_if(mode_64bit(), EXC_UD, -1);
1777 for ( i = 0; i < 8; i++ )
1778 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1779 &regs[i], op_bytes, ctxt)) != 0 )
1780 goto done;
1781 break;
1784 case 0x61: /* popa */ {
1785 int i;
1786 unsigned long dummy_esp, *regs[] = {
1787 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
1788 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
1789 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
1790 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
1791 generate_exception_if(mode_64bit(), EXC_UD, -1);
1792 for ( i = 0; i < 8; i++ )
1794 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
1795 &dst.val, op_bytes, ctxt, ops)) != 0 )
1796 goto done;
1797 switch ( op_bytes )
1799 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
1800 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
1801 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
1802 case 8: *regs[i] = dst.val; break;
1805 break;
1808 case 0x62: /* bound */ {
1809 unsigned long src_val2;
1810 int lb, ub, idx;
1811 generate_exception_if(mode_64bit() || (src.type != OP_MEM),
1812 EXC_UD, -1);
1813 if ( (rc = read_ulong(src.mem.seg, src.mem.off + op_bytes,
1814 &src_val2, op_bytes, ctxt, ops)) )
1815 goto done;
1816 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1817 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1818 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1819 generate_exception_if((idx < lb) || (idx > ub), EXC_BR, -1);
1820 dst.type = OP_NONE;
1821 break;
1824 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1825 if ( mode_64bit() )
1827 /* movsxd */
1828 if ( src.type == OP_REG )
1829 src.val = *(int32_t *)src.reg;
1830 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1831 &src.val, 4, ctxt, ops)) )
1832 goto done;
1833 dst.val = (int32_t)src.val;
1835 else
1837 /* arpl */
1838 uint16_t src_val = dst.val;
1839 dst = src;
1840 _regs.eflags &= ~EFLG_ZF;
1841 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1842 if ( _regs.eflags & EFLG_ZF )
1843 dst.val = (dst.val & ~3) | (src_val & 3);
1844 else
1845 dst.type = OP_NONE;
1846 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
1848 break;
1850 case 0x68: /* push imm{16,32,64} */
1851 src.val = ((op_bytes == 2)
1852 ? (int32_t)insn_fetch_type(int16_t)
1853 : insn_fetch_type(int32_t));
1854 goto push;
1856 case 0x69: /* imul imm16/32 */
1857 case 0x6b: /* imul imm8 */ {
1858 unsigned long src1; /* ModR/M source operand */
1859 if ( ea.type == OP_REG )
1860 src1 = *ea.reg;
1861 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
1862 &src1, op_bytes, ctxt, ops)) )
1863 goto done;
1864 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1865 switch ( dst.bytes )
1867 case 2:
1868 dst.val = ((uint32_t)(int16_t)src.val *
1869 (uint32_t)(int16_t)src1);
1870 if ( (int16_t)dst.val != (uint32_t)dst.val )
1871 _regs.eflags |= EFLG_OF|EFLG_CF;
1872 break;
1873 #ifdef __x86_64__
1874 case 4:
1875 dst.val = ((uint64_t)(int32_t)src.val *
1876 (uint64_t)(int32_t)src1);
1877 if ( (int32_t)dst.val != dst.val )
1878 _regs.eflags |= EFLG_OF|EFLG_CF;
1879 break;
1880 #endif
1881 default: {
1882 unsigned long m[2] = { src.val, src1 };
1883 if ( imul_dbl(m) )
1884 _regs.eflags |= EFLG_OF|EFLG_CF;
1885 dst.val = m[0];
1886 break;
1889 break;
1892 case 0x6a: /* push imm8 */
1893 src.val = insn_fetch_type(int8_t);
1894 push:
1895 d |= Mov; /* force writeback */
1896 dst.type = OP_MEM;
1897 dst.bytes = op_bytes;
1898 if ( mode_64bit() && (dst.bytes == 4) )
1899 dst.bytes = 8;
1900 dst.val = src.val;
1901 dst.mem.seg = x86_seg_ss;
1902 dst.mem.off = sp_pre_dec(dst.bytes);
1903 break;
1905 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */ {
1906 unsigned long nr_reps = get_rep_prefix();
1907 unsigned int port = (uint16_t)_regs.edx;
1908 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1909 dst.mem.seg = x86_seg_es;
1910 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
1911 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
1912 goto done;
1913 if ( (nr_reps > 1) && (ops->rep_ins != NULL) &&
1914 ((rc = ops->rep_ins(port, dst.mem.seg, dst.mem.off, dst.bytes,
1915 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
1917 if ( rc != 0 )
1918 goto done;
1920 else
1922 fail_if(ops->read_io == NULL);
1923 if ( (rc = ops->read_io(port, dst.bytes, &dst.val, ctxt)) != 0 )
1924 goto done;
1925 dst.type = OP_MEM;
1926 nr_reps = 1;
1928 register_address_increment(
1929 _regs.edi,
1930 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
1931 put_rep_prefix(nr_reps);
1932 break;
1935 case 0x6e ... 0x6f: /* outs %esi,%dx */ {
1936 unsigned long nr_reps = get_rep_prefix();
1937 unsigned int port = (uint16_t)_regs.edx;
1938 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1939 ea.mem.off = truncate_ea_and_reps(_regs.esi, nr_reps, dst.bytes);
1940 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
1941 goto done;
1942 if ( (nr_reps > 1) && (ops->rep_outs != NULL) &&
1943 ((rc = ops->rep_outs(ea.mem.seg, ea.mem.off, port, dst.bytes,
1944 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
1946 if ( rc != 0 )
1947 goto done;
1949 else
1951 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
1952 &dst.val, dst.bytes, ctxt, ops)) != 0 )
1953 goto done;
1954 fail_if(ops->write_io == NULL);
1955 if ( (rc = ops->write_io(port, dst.bytes, dst.val, ctxt)) != 0 )
1956 goto done;
1957 nr_reps = 1;
1959 register_address_increment(
1960 _regs.esi,
1961 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
1962 put_rep_prefix(nr_reps);
1963 break;
1966 case 0x70 ... 0x7f: /* jcc (short) */ {
1967 int rel = insn_fetch_type(int8_t);
1968 if ( test_cc(b, _regs.eflags) )
1969 jmp_rel(rel);
1970 break;
1973 case 0x82: /* Grp1 (x86/32 only) */
1974 generate_exception_if(mode_64bit(), EXC_UD, -1);
1975 case 0x80: case 0x81: case 0x83: /* Grp1 */
1976 switch ( modrm_reg & 7 )
1978 case 0: goto add;
1979 case 1: goto or;
1980 case 2: goto adc;
1981 case 3: goto sbb;
1982 case 4: goto and;
1983 case 5: goto sub;
1984 case 6: goto xor;
1985 case 7: goto cmp;
1987 break;
1989 case 0xa8 ... 0xa9: /* test imm,%%eax */
1990 dst.reg = (unsigned long *)&_regs.eax;
1991 dst.val = _regs.eax;
1992 case 0x84 ... 0x85: test: /* test */
1993 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1994 dst.type = OP_NONE;
1995 break;
1997 case 0x86 ... 0x87: xchg: /* xchg */
1998 /* Write back the register source. */
1999 switch ( dst.bytes )
2001 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2002 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2003 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2004 case 8: *src.reg = dst.val; break;
2006 /* Write back the memory destination with implicit LOCK prefix. */
2007 dst.val = src.val;
2008 lock_prefix = 1;
2009 break;
2011 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
2012 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
2013 case 0x88 ... 0x8b: /* mov */
2014 dst.val = src.val;
2015 break;
2017 case 0x8c: /* mov Sreg,r/m */ {
2018 struct segment_register reg;
2019 enum x86_segment seg = decode_segment(modrm_reg);
2020 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
2021 fail_if(ops->read_segment == NULL);
2022 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
2023 goto done;
2024 dst.val = reg.sel;
2025 if ( dst.type == OP_MEM )
2026 dst.bytes = 2;
2027 break;
2030 case 0x8e: /* mov r/m,Sreg */ {
2031 enum x86_segment seg = decode_segment(modrm_reg);
2032 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
2033 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
2034 goto done;
2035 if ( seg == x86_seg_ss )
2036 ctxt->retire.flags.mov_ss = 1;
2037 dst.type = OP_NONE;
2038 break;
2041 case 0x8d: /* lea */
2042 dst.val = ea.mem.off;
2043 break;
2045 case 0x8f: /* pop (sole member of Grp1a) */
2046 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
2047 /* 64-bit mode: POP defaults to a 64-bit operand. */
2048 if ( mode_64bit() && (dst.bytes == 4) )
2049 dst.bytes = 8;
2050 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2051 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2052 goto done;
2053 break;
2055 case 0x90: /* nop / xchg %%r8,%%rax */
2056 if ( !(rex_prefix & 1) )
2057 break; /* nop */
2059 case 0x91 ... 0x97: /* xchg reg,%%rax */
2060 src.type = dst.type = OP_REG;
2061 src.bytes = dst.bytes = op_bytes;
2062 src.reg = (unsigned long *)&_regs.eax;
2063 src.val = *src.reg;
2064 dst.reg = decode_register(
2065 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2066 dst.val = *dst.reg;
2067 goto xchg;
2069 case 0x98: /* cbw/cwde/cdqe */
2070 switch ( op_bytes )
2072 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2073 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2074 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2076 break;
2078 case 0x99: /* cwd/cdq/cqo */
2079 switch ( op_bytes )
2081 case 2:
2082 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2083 break;
2084 case 4:
2085 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2086 break;
2087 case 8:
2088 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2089 break;
2091 break;
2093 case 0x9a: /* call (far, absolute) */ {
2094 struct segment_register reg;
2095 uint16_t sel;
2096 uint32_t eip;
2098 fail_if(ops->read_segment == NULL);
2099 generate_exception_if(mode_64bit(), EXC_UD, -1);
2101 eip = insn_fetch_bytes(op_bytes);
2102 sel = insn_fetch_type(uint16_t);
2104 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2105 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2106 &reg.sel, op_bytes, ctxt)) ||
2107 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2108 &_regs.eip, op_bytes, ctxt)) )
2109 goto done;
2111 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2112 goto done;
2113 _regs.eip = eip;
2114 break;
2117 case 0x9b: /* wait/fwait */
2118 emulate_fpu_insn("fwait");
2119 break;
2121 case 0x9c: /* pushf */
2122 src.val = _regs.eflags;
2123 goto push;
2125 case 0x9d: /* popf */ {
2126 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2127 if ( !mode_ring0() )
2128 mask |= EFLG_IOPL;
2129 if ( !mode_iopl() )
2130 mask |= EFLG_IF;
2131 /* 64-bit mode: POP defaults to a 64-bit operand. */
2132 if ( mode_64bit() && (op_bytes == 4) )
2133 op_bytes = 8;
2134 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2135 &dst.val, op_bytes, ctxt, ops)) != 0 )
2136 goto done;
2137 if ( op_bytes == 2 )
2138 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2139 dst.val &= 0x257fd5;
2140 _regs.eflags &= mask;
2141 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2142 break;
2145 case 0x9e: /* sahf */
2146 *(uint8_t *)&_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2147 break;
2149 case 0x9f: /* lahf */
2150 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2151 break;
2153 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2154 /* Source EA is not encoded via ModRM. */
2155 dst.type = OP_REG;
2156 dst.reg = (unsigned long *)&_regs.eax;
2157 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2158 if ( (rc = read_ulong(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2159 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2160 goto done;
2161 break;
2163 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2164 /* Destination EA is not encoded via ModRM. */
2165 dst.type = OP_MEM;
2166 dst.mem.seg = ea.mem.seg;
2167 dst.mem.off = insn_fetch_bytes(ad_bytes);
2168 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2169 dst.val = (unsigned long)_regs.eax;
2170 break;
2172 case 0xa4 ... 0xa5: /* movs */ {
2173 unsigned long nr_reps = get_rep_prefix();
2174 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2175 dst.mem.seg = x86_seg_es;
2176 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
2177 if ( (nr_reps > 1) && (ops->rep_movs != NULL) &&
2178 ((rc = ops->rep_movs(ea.mem.seg, truncate_ea(_regs.esi),
2179 dst.mem.seg, dst.mem.off, dst.bytes,
2180 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2182 if ( rc != 0 )
2183 goto done;
2185 else
2187 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2188 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2189 goto done;
2190 dst.type = OP_MEM;
2191 nr_reps = 1;
2193 register_address_increment(
2194 _regs.esi,
2195 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2196 register_address_increment(
2197 _regs.edi,
2198 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2199 put_rep_prefix(nr_reps);
2200 break;
2203 case 0xa6 ... 0xa7: /* cmps */ {
2204 unsigned long next_eip = _regs.eip;
2205 get_rep_prefix();
2206 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2207 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2208 &dst.val, dst.bytes, ctxt, ops)) ||
2209 (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2210 &src.val, src.bytes, ctxt, ops)) )
2211 goto done;
2212 register_address_increment(
2213 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2214 register_address_increment(
2215 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2216 put_rep_prefix(1);
2217 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2218 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2219 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2220 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2221 _regs.eip = next_eip;
2222 break;
2225 case 0xaa ... 0xab: /* stos */ {
2226 /* unsigned long max_reps = */get_rep_prefix();
2227 dst.type = OP_MEM;
2228 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2229 dst.mem.seg = x86_seg_es;
2230 dst.mem.off = truncate_ea(_regs.edi);
2231 dst.val = _regs.eax;
2232 register_address_increment(
2233 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2234 put_rep_prefix(1);
2235 break;
2238 case 0xac ... 0xad: /* lods */ {
2239 /* unsigned long max_reps = */get_rep_prefix();
2240 dst.type = OP_REG;
2241 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2242 dst.reg = (unsigned long *)&_regs.eax;
2243 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2244 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2245 goto done;
2246 register_address_increment(
2247 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2248 put_rep_prefix(1);
2249 break;
2252 case 0xae ... 0xaf: /* scas */ {
2253 unsigned long next_eip = _regs.eip;
2254 get_rep_prefix();
2255 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2256 dst.val = _regs.eax;
2257 if ( (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2258 &src.val, src.bytes, ctxt, ops)) != 0 )
2259 goto done;
2260 register_address_increment(
2261 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2262 put_rep_prefix(1);
2263 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2264 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2265 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2266 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2267 _regs.eip = next_eip;
2268 break;
2271 case 0xb0 ... 0xb7: /* mov imm8,r8 */
2272 dst.reg = decode_register(
2273 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
2274 dst.val = src.val;
2275 break;
2277 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
2278 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
2279 src.val = ((uint32_t)src.val |
2280 ((uint64_t)insn_fetch_type(uint32_t) << 32));
2281 dst.reg = decode_register(
2282 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2283 dst.val = src.val;
2284 break;
2286 case 0xc0 ... 0xc1: grp2: /* Grp2 */
2287 switch ( modrm_reg & 7 )
2289 case 0: /* rol */
2290 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
2291 break;
2292 case 1: /* ror */
2293 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
2294 break;
2295 case 2: /* rcl */
2296 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
2297 break;
2298 case 3: /* rcr */
2299 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
2300 break;
2301 case 4: /* sal/shl */
2302 case 6: /* sal/shl */
2303 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
2304 break;
2305 case 5: /* shr */
2306 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
2307 break;
2308 case 7: /* sar */
2309 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
2310 break;
2312 break;
2314 case 0xc2: /* ret imm16 (near) */
2315 case 0xc3: /* ret (near) */ {
2316 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2317 op_bytes = mode_64bit() ? 8 : op_bytes;
2318 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2319 &dst.val, op_bytes, ctxt, ops)) != 0 )
2320 goto done;
2321 _regs.eip = dst.val;
2322 break;
2325 case 0xc4: /* les */ {
2326 unsigned long sel;
2327 dst.val = x86_seg_es;
2328 les: /* dst.val identifies the segment */
2329 generate_exception_if(src.type != OP_MEM, EXC_UD, -1);
2330 if ( (rc = read_ulong(src.mem.seg, src.mem.off + src.bytes,
2331 &sel, 2, ctxt, ops)) != 0 )
2332 goto done;
2333 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
2334 goto done;
2335 dst.val = src.val;
2336 break;
2339 case 0xc5: /* lds */
2340 dst.val = x86_seg_ds;
2341 goto les;
2343 case 0xc8: /* enter imm16,imm8 */ {
2344 uint16_t size = insn_fetch_type(uint16_t);
2345 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2346 int i;
2348 dst.type = OP_REG;
2349 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2350 dst.reg = (unsigned long *)&_regs.ebp;
2351 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2352 &_regs.ebp, dst.bytes, ctxt)) )
2353 goto done;
2354 dst.val = _regs.esp;
2356 if ( depth > 0 )
2358 for ( i = 1; i < depth; i++ )
2360 unsigned long ebp, temp_data;
2361 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2362 if ( (rc = read_ulong(x86_seg_ss, ebp,
2363 &temp_data, dst.bytes, ctxt, ops)) ||
2364 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2365 &temp_data, dst.bytes, ctxt)) )
2366 goto done;
2368 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2369 &dst.val, dst.bytes, ctxt)) )
2370 goto done;
2373 sp_pre_dec(size);
2374 break;
2377 case 0xc9: /* leave */
2378 /* First writeback, to %%esp. */
2379 dst.type = OP_REG;
2380 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2381 dst.reg = (unsigned long *)&_regs.esp;
2382 dst.val = _regs.ebp;
2384 /* Flush first writeback, since there is a second. */
2385 switch ( dst.bytes )
2387 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2388 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2389 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2390 case 8: *dst.reg = dst.val; break;
2393 /* Second writeback, to %%ebp. */
2394 dst.reg = (unsigned long *)&_regs.ebp;
2395 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2396 &dst.val, dst.bytes, ctxt, ops)) )
2397 goto done;
2398 break;
2400 case 0xca: /* ret imm16 (far) */
2401 case 0xcb: /* ret (far) */ {
2402 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2403 op_bytes = mode_64bit() ? 8 : op_bytes;
2404 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2405 &dst.val, op_bytes, ctxt, ops)) ||
2406 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2407 &src.val, op_bytes, ctxt, ops)) ||
2408 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2409 goto done;
2410 _regs.eip = dst.val;
2411 break;
2414 case 0xcc: /* int3 */
2415 src.val = EXC_BP;
2416 goto swint;
2418 case 0xcd: /* int imm8 */
2419 src.val = insn_fetch_type(uint8_t);
2420 swint:
2421 fail_if(ops->inject_sw_interrupt == NULL);
2422 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2423 ctxt) ? : X86EMUL_EXCEPTION;
2424 goto done;
2426 case 0xce: /* into */
2427 generate_exception_if(mode_64bit(), EXC_UD, -1);
2428 if ( !(_regs.eflags & EFLG_OF) )
2429 break;
2430 src.val = EXC_OF;
2431 goto swint;
2433 case 0xcf: /* iret */ {
2434 unsigned long cs, eip, eflags;
2435 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2436 if ( !mode_ring0() )
2437 mask |= EFLG_IOPL;
2438 if ( !mode_iopl() )
2439 mask |= EFLG_IF;
2440 fail_if(!in_realmode(ctxt, ops));
2441 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2442 &eip, op_bytes, ctxt, ops)) ||
2443 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2444 &cs, op_bytes, ctxt, ops)) ||
2445 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2446 &eflags, op_bytes, ctxt, ops)) )
2447 goto done;
2448 if ( op_bytes == 2 )
2449 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2450 eflags &= 0x257fd5;
2451 _regs.eflags &= mask;
2452 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2453 _regs.eip = eip;
2454 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2455 goto done;
2456 break;
2459 case 0xd0 ... 0xd1: /* Grp2 */
2460 src.val = 1;
2461 goto grp2;
2463 case 0xd2 ... 0xd3: /* Grp2 */
2464 src.val = _regs.ecx;
2465 goto grp2;
2467 case 0xd4: /* aam */ {
2468 unsigned int base = insn_fetch_type(uint8_t);
2469 uint8_t al = _regs.eax;
2470 generate_exception_if(mode_64bit(), EXC_UD, -1);
2471 generate_exception_if(base == 0, EXC_DE, -1);
2472 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2473 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2474 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2475 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2476 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2477 break;
2480 case 0xd5: /* aad */ {
2481 unsigned int base = insn_fetch_type(uint8_t);
2482 uint16_t ax = _regs.eax;
2483 generate_exception_if(mode_64bit(), EXC_UD, -1);
2484 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2485 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2486 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2487 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2488 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2489 break;
2492 case 0xd6: /* salc */
2493 generate_exception_if(mode_64bit(), EXC_UD, -1);
2494 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2495 break;
2497 case 0xd7: /* xlat */ {
2498 unsigned long al = (uint8_t)_regs.eax;
2499 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.ebx + al),
2500 &al, 1, ctxt, ops)) != 0 )
2501 goto done;
2502 *(uint8_t *)&_regs.eax = al;
2503 break;
2506 case 0xd8: /* FPU 0xd8 */
2507 switch ( modrm )
2509 case 0xc0 ... 0xc7: /* fadd %stN,%stN */
2510 case 0xc8 ... 0xcf: /* fmul %stN,%stN */
2511 case 0xd0 ... 0xd7: /* fcom %stN,%stN */
2512 case 0xd8 ... 0xdf: /* fcomp %stN,%stN */
2513 case 0xe0 ... 0xe7: /* fsub %stN,%stN */
2514 case 0xe8 ... 0xef: /* fsubr %stN,%stN */
2515 case 0xf0 ... 0xf7: /* fdiv %stN,%stN */
2516 case 0xf8 ... 0xff: /* fdivr %stN,%stN */
2517 emulate_fpu_insn_stub(0xd8, modrm);
2518 break;
2519 default:
2520 fail_if(modrm >= 0xc0);
2521 ea.bytes = 4;
2522 src = ea;
2523 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2524 src.bytes, ctxt)) != 0 )
2525 goto done;
2526 switch ( modrm_reg & 7 )
2528 case 0: /* fadd */
2529 emulate_fpu_insn_memsrc("fadds", src.val);
2530 break;
2531 case 1: /* fmul */
2532 emulate_fpu_insn_memsrc("fmuls", src.val);
2533 break;
2534 case 2: /* fcom */
2535 emulate_fpu_insn_memsrc("fcoms", src.val);
2536 break;
2537 case 3: /* fcomp */
2538 emulate_fpu_insn_memsrc("fcomps", src.val);
2539 break;
2540 case 4: /* fsub */
2541 emulate_fpu_insn_memsrc("fsubs", src.val);
2542 break;
2543 case 5: /* fsubr */
2544 emulate_fpu_insn_memsrc("fsubrs", src.val);
2545 break;
2546 case 6: /* fdiv */
2547 emulate_fpu_insn_memsrc("fdivs", src.val);
2548 break;
2549 case 7: /* fdivr */
2550 emulate_fpu_insn_memsrc("fdivrs", src.val);
2551 break;
2552 default:
2553 goto cannot_emulate;
2556 break;
2558 case 0xd9: /* FPU 0xd9 */
2559 switch ( modrm )
2561 case 0xc0 ... 0xc7: /* fld %stN */
2562 case 0xc8 ... 0xcf: /* fxch %stN */
2563 case 0xd0: /* fnop */
2564 case 0xe0: /* fchs */
2565 case 0xe1: /* fabs */
2566 case 0xe4: /* ftst */
2567 case 0xe5: /* fxam */
2568 case 0xe8: /* fld1 */
2569 case 0xe9: /* fldl2t */
2570 case 0xea: /* fldl2e */
2571 case 0xeb: /* fldpi */
2572 case 0xec: /* fldlg2 */
2573 case 0xed: /* fldln2 */
2574 case 0xee: /* fldz */
2575 case 0xf0: /* f2xm1 */
2576 case 0xf1: /* fyl2x */
2577 case 0xf2: /* fptan */
2578 case 0xf3: /* fpatan */
2579 case 0xf4: /* fxtract */
2580 case 0xf5: /* fprem1 */
2581 case 0xf6: /* fdecstp */
2582 case 0xf7: /* fincstp */
2583 case 0xf8: /* fprem */
2584 case 0xf9: /* fyl2xp1 */
2585 case 0xfa: /* fsqrt */
2586 case 0xfb: /* fsincos */
2587 case 0xfc: /* frndint */
2588 case 0xfd: /* fscale */
2589 case 0xfe: /* fsin */
2590 case 0xff: /* fcos */
2591 emulate_fpu_insn_stub(0xd9, modrm);
2592 break;
2593 default:
2594 fail_if(modrm >= 0xc0);
2595 switch ( modrm_reg & 7 )
2597 case 0: /* fld m32fp */
2598 ea.bytes = 4;
2599 src = ea;
2600 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &src.val,
2601 src.bytes, ctxt)) != 0 )
2602 goto done;
2603 emulate_fpu_insn_memsrc("flds", src.val);
2604 break;
2605 case 2: /* fstp m32fp */
2606 ea.bytes = 4;
2607 dst = ea;
2608 dst.type = OP_MEM;
2609 emulate_fpu_insn_memdst("fsts", dst.val);
2610 break;
2611 case 3: /* fstp m32fp */
2612 ea.bytes = 4;
2613 dst = ea;
2614 dst.type = OP_MEM;
2615 emulate_fpu_insn_memdst("fstps", dst.val);
2616 break;
2617 /* case 4: fldenv - TODO */
2618 case 5: /* fldcw m2byte */
2619 ea.bytes = 2;
2620 src = ea;
2621 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2622 src.bytes, ctxt)) != 0 )
2623 goto done;
2624 emulate_fpu_insn_memsrc("fldcw", src.val);
2625 break;
2626 /* case 6: fstenv - TODO */
2627 case 7: /* fnstcw m2byte */
2628 ea.bytes = 2;
2629 dst = ea;
2630 dst.type = OP_MEM;
2631 emulate_fpu_insn_memdst("fnstcw", dst.val);
2632 break;
2633 default:
2634 goto cannot_emulate;
2637 break;
2639 case 0xda: /* FPU 0xda */
2640 switch ( modrm )
2642 case 0xc0 ... 0xc7: /* fcmovb %stN */
2643 case 0xc8 ... 0xcf: /* fcmove %stN */
2644 case 0xd0 ... 0xd7: /* fcmovbe %stN */
2645 case 0xd8 ... 0xdf: /* fcmovu %stN */
2646 case 0xe9: /* fucompp */
2647 emulate_fpu_insn_stub(0xda, modrm);
2648 break;
2649 default:
2650 fail_if(modrm >= 0xc0);
2651 ea.bytes = 8;
2652 src = ea;
2653 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2654 src.bytes, ctxt)) != 0 )
2655 goto done;
2656 switch ( modrm_reg & 7 )
2658 case 0: /* fiadd m64i */
2659 emulate_fpu_insn_memsrc("fiaddl", src.val);
2660 break;
2661 case 1: /* fimul m64i */
2662 emulate_fpu_insn_memsrc("fimul", src.val);
2663 break;
2664 case 2: /* ficom m64i */
2665 emulate_fpu_insn_memsrc("ficoml", src.val);
2666 break;
2667 case 3: /* ficomp m64i */
2668 emulate_fpu_insn_memsrc("ficompl", src.val);
2669 break;
2670 case 4: /* fisub m64i */
2671 emulate_fpu_insn_memsrc("fisubl", src.val);
2672 break;
2673 case 5: /* fisubr m64i */
2674 emulate_fpu_insn_memsrc("fisubrl", src.val);
2675 break;
2676 case 6: /* fidiv m64i */
2677 emulate_fpu_insn_memsrc("fidivl", src.val);
2678 break;
2679 case 7: /* fidivr m64i */
2680 emulate_fpu_insn_memsrc("fidivrl", src.val);
2681 break;
2682 default:
2683 goto cannot_emulate;
2686 break;
2688 case 0xdb: /* FPU 0xdb */
2689 switch ( modrm )
2691 case 0xc0 ... 0xc7: /* fcmovnb %stN */
2692 case 0xc8 ... 0xcf: /* fcmovne %stN */
2693 case 0xd0 ... 0xd7: /* fcmovnbe %stN */
2694 case 0xd8 ... 0xdf: /* fcmovnu %stN */
2695 emulate_fpu_insn_stub(0xdb, modrm);
2696 break;
2697 case 0xe2: /* fnclex */
2698 emulate_fpu_insn("fnclex");
2699 break;
2700 case 0xe3: /* fninit */
2701 emulate_fpu_insn("fninit");
2702 break;
2703 case 0xe4: /* fsetpm - 287 only, ignored by 387 */
2704 break;
2705 case 0xe8 ... 0xef: /* fucomi %stN */
2706 case 0xf0 ... 0xf7: /* fcomi %stN */
2707 emulate_fpu_insn_stub(0xdb, modrm);
2708 break;
2709 default:
2710 fail_if(modrm >= 0xc0);
2711 switch ( modrm_reg & 7 )
2713 case 0: /* fild m32i */
2714 ea.bytes = 4;
2715 src = ea;
2716 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2717 src.bytes, ctxt)) != 0 )
2718 goto done;
2719 emulate_fpu_insn_memsrc("fildl", src.val);
2720 break;
2721 case 1: /* fisttp m32i */
2722 ea.bytes = 4;
2723 dst = ea;
2724 dst.type = OP_MEM;
2725 emulate_fpu_insn_memdst("fisttpl", dst.val);
2726 break;
2727 case 2: /* fist m32i */
2728 ea.bytes = 4;
2729 dst = ea;
2730 dst.type = OP_MEM;
2731 emulate_fpu_insn_memdst("fistl", dst.val);
2732 break;
2733 case 3: /* fistp m32i */
2734 ea.bytes = 4;
2735 dst = ea;
2736 dst.type = OP_MEM;
2737 emulate_fpu_insn_memdst("fistpl", dst.val);
2738 break;
2739 case 5: /* fld m80fp */
2740 ea.bytes = 10;
2741 src = ea;
2742 if ( (rc = ops->read(src.mem.seg, src.mem.off,
2743 &src.val, src.bytes, ctxt)) != 0 )
2744 goto done;
2745 emulate_fpu_insn_memdst("fldt", src.val);
2746 break;
2747 case 7: /* fstp m80fp */
2748 ea.bytes = 10;
2749 dst.type = OP_MEM;
2750 dst = ea;
2751 emulate_fpu_insn_memdst("fstpt", dst.val);
2752 break;
2753 default:
2754 goto cannot_emulate;
2757 break;
2759 case 0xdc: /* FPU 0xdc */
2760 switch ( modrm )
2762 case 0xc0 ... 0xc7: /* fadd %stN */
2763 case 0xc8 ... 0xcf: /* fmul %stN */
2764 case 0xe0 ... 0xe7: /* fsubr %stN */
2765 case 0xe8 ... 0xef: /* fsub %stN */
2766 case 0xf0 ... 0xf7: /* fdivr %stN */
2767 case 0xf8 ... 0xff: /* fdiv %stN */
2768 emulate_fpu_insn_stub(0xdc, modrm);
2769 break;
2770 default:
2771 fail_if(modrm >= 0xc0);
2772 ea.bytes = 8;
2773 src = ea;
2774 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2775 src.bytes, ctxt)) != 0 )
2776 goto done;
2777 switch ( modrm_reg & 7 )
2779 case 0: /* fadd m64fp */
2780 emulate_fpu_insn_memsrc("faddl", src.val);
2781 break;
2782 case 1: /* fmul m64fp */
2783 emulate_fpu_insn_memsrc("fmull", src.val);
2784 break;
2785 case 2: /* fcom m64fp */
2786 emulate_fpu_insn_memsrc("fcoml", src.val);
2787 break;
2788 case 3: /* fcomp m64fp */
2789 emulate_fpu_insn_memsrc("fcompl", src.val);
2790 break;
2791 case 4: /* fsub m64fp */
2792 emulate_fpu_insn_memsrc("fsubl", src.val);
2793 break;
2794 case 5: /* fsubr m64fp */
2795 emulate_fpu_insn_memsrc("fsubrl", src.val);
2796 break;
2797 case 6: /* fdiv m64fp */
2798 emulate_fpu_insn_memsrc("fdivl", src.val);
2799 break;
2800 case 7: /* fdivr m64fp */
2801 emulate_fpu_insn_memsrc("fdivrl", src.val);
2802 break;
2805 break;
2807 case 0xdd: /* FPU 0xdd */
2808 switch ( modrm )
2810 case 0xc0 ... 0xc7: /* ffree %stN */
2811 case 0xd0 ... 0xd7: /* fst %stN */
2812 case 0xd8 ... 0xdf: /* fstp %stN */
2813 case 0xe0 ... 0xe7: /* fucom %stN */
2814 case 0xe8 ... 0xef: /* fucomp %stN */
2815 emulate_fpu_insn_stub(0xdd, modrm);
2816 break;
2817 default:
2818 fail_if(modrm >= 0xc0);
2819 switch ( modrm_reg & 7 )
2821 case 0: /* fld m64fp */;
2822 ea.bytes = 8;
2823 src = ea;
2824 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2825 src.bytes, ctxt)) != 0 )
2826 goto done;
2827 emulate_fpu_insn_memsrc("fldl", src.val);
2828 break;
2829 case 1: /* fisttp m64i */
2830 ea.bytes = 8;
2831 dst = ea;
2832 dst.type = OP_MEM;
2833 emulate_fpu_insn_memdst("fisttpll", dst.val);
2834 break;
2835 case 2: /* fst m64fp */
2836 ea.bytes = 8;
2837 dst = ea;
2838 dst.type = OP_MEM;
2839 emulate_fpu_insn_memsrc("fstl", dst.val);
2840 break;
2841 case 3: /* fstp m64fp */
2842 ea.bytes = 8;
2843 dst = ea;
2844 dst.type = OP_MEM;
2845 emulate_fpu_insn_memdst("fstpl", dst.val);
2846 break;
2847 case 7: /* fnstsw m2byte */
2848 ea.bytes = 2;
2849 dst = ea;
2850 dst.type = OP_MEM;
2851 emulate_fpu_insn_memdst("fnstsw", dst.val);
2852 break;
2853 default:
2854 goto cannot_emulate;
2857 break;
2859 case 0xde: /* FPU 0xde */
2860 switch ( modrm )
2862 case 0xc0 ... 0xc7: /* faddp %stN */
2863 case 0xc8 ... 0xcf: /* fmulp %stN */
2864 case 0xd9: /* fcompp */
2865 case 0xe0 ... 0xe7: /* fsubrp %stN */
2866 case 0xe8 ... 0xef: /* fsubp %stN */
2867 case 0xf0 ... 0xf7: /* fdivrp %stN */
2868 case 0xf8 ... 0xff: /* fdivp %stN */
2869 emulate_fpu_insn_stub(0xde, modrm);
2870 break;
2871 default:
2872 fail_if(modrm >= 0xc0);
2873 ea.bytes = 2;
2874 src = ea;
2875 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2876 src.bytes, ctxt)) != 0 )
2877 goto done;
2878 switch ( modrm_reg & 7 )
2880 case 0: /* fiadd m16i */
2881 emulate_fpu_insn_memsrc("fiadd", src.val);
2882 break;
2883 case 1: /* fimul m16i */
2884 emulate_fpu_insn_memsrc("fimul", src.val);
2885 break;
2886 case 2: /* ficom m16i */
2887 emulate_fpu_insn_memsrc("ficom", src.val);
2888 break;
2889 case 3: /* ficomp m16i */
2890 emulate_fpu_insn_memsrc("ficomp", src.val);
2891 break;
2892 case 4: /* fisub m16i */
2893 emulate_fpu_insn_memsrc("fisub", src.val);
2894 break;
2895 case 5: /* fisubr m16i */
2896 emulate_fpu_insn_memsrc("fisubr", src.val);
2897 break;
2898 case 6: /* fidiv m16i */
2899 emulate_fpu_insn_memsrc("fidiv", src.val);
2900 break;
2901 case 7: /* fidivr m16i */
2902 emulate_fpu_insn_memsrc("fidivr", src.val);
2903 break;
2904 default:
2905 goto cannot_emulate;
2908 break;
2910 case 0xdf: /* FPU 0xdf */
2911 switch ( modrm )
2913 case 0xe0:
2914 /* fnstsw %ax */
2915 dst.bytes = 2;
2916 dst.type = OP_REG;
2917 dst.reg = (unsigned long *)&_regs.eax;
2918 emulate_fpu_insn_memdst("fnstsw", dst.val);
2919 break;
2920 case 0xf0 ... 0xf7: /* fcomip %stN */
2921 case 0xf8 ... 0xff: /* fucomip %stN */
2922 emulate_fpu_insn_stub(0xdf, modrm);
2923 break;
2924 default:
2925 fail_if(modrm >= 0xc0);
2926 switch ( modrm_reg & 7 )
2928 case 0: /* fild m16i */
2929 ea.bytes = 2;
2930 src = ea;
2931 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2932 src.bytes, ctxt)) != 0 )
2933 goto done;
2934 emulate_fpu_insn_memsrc("fild", src.val);
2935 break;
2936 case 1: /* fisttp m16i */
2937 ea.bytes = 2;
2938 dst = ea;
2939 dst.type = OP_MEM;
2940 emulate_fpu_insn_memdst("fisttp", dst.val);
2941 break;
2942 case 2: /* fist m16i */
2943 ea.bytes = 2;
2944 dst = ea;
2945 dst.type = OP_MEM;
2946 emulate_fpu_insn_memdst("fist", dst.val);
2947 break;
2948 case 3: /* fistp m16i */
2949 ea.bytes = 2;
2950 dst = ea;
2951 dst.type = OP_MEM;
2952 emulate_fpu_insn_memdst("fistp", dst.val);
2953 break;
2954 case 4: /* fbld m80dec */
2955 ea.bytes = 10;
2956 dst = ea;
2957 if ( (rc = ops->read(src.mem.seg, src.mem.off,
2958 &src.val, src.bytes, ctxt)) != 0 )
2959 goto done;
2960 emulate_fpu_insn_memdst("fbld", src.val);
2961 break;
2962 case 5: /* fild m64i */
2963 ea.bytes = 8;
2964 src = ea;
2965 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2966 src.bytes, ctxt)) != 0 )
2967 goto done;
2968 emulate_fpu_insn_memsrc("fildll", src.val);
2969 break;
2970 case 6: /* fbstp packed bcd */
2971 ea.bytes = 10;
2972 dst = ea;
2973 dst.type = OP_MEM;
2974 emulate_fpu_insn_memdst("fbstp", dst.val);
2975 break;
2976 case 7: /* fistp m64i */
2977 ea.bytes = 8;
2978 dst = ea;
2979 dst.type = OP_MEM;
2980 emulate_fpu_insn_memdst("fistpll", dst.val);
2981 break;
2982 default:
2983 goto cannot_emulate;
2986 break;
2988 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2989 int rel = insn_fetch_type(int8_t);
2990 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2991 if ( b == 0xe1 )
2992 do_jmp = !do_jmp; /* loopz */
2993 else if ( b == 0xe2 )
2994 do_jmp = 1; /* loop */
2995 switch ( ad_bytes )
2997 case 2:
2998 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2999 break;
3000 case 4:
3001 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
3002 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
3003 break;
3004 default: /* case 8: */
3005 do_jmp &= --_regs.ecx != 0;
3006 break;
3008 if ( do_jmp )
3009 jmp_rel(rel);
3010 break;
3013 case 0xe3: /* jcxz/jecxz (short) */ {
3014 int rel = insn_fetch_type(int8_t);
3015 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
3016 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
3017 jmp_rel(rel);
3018 break;
3021 case 0xe4: /* in imm8,%al */
3022 case 0xe5: /* in imm8,%eax */
3023 case 0xe6: /* out %al,imm8 */
3024 case 0xe7: /* out %eax,imm8 */
3025 case 0xec: /* in %dx,%al */
3026 case 0xed: /* in %dx,%eax */
3027 case 0xee: /* out %al,%dx */
3028 case 0xef: /* out %eax,%dx */ {
3029 unsigned int port = ((b < 0xe8)
3030 ? insn_fetch_type(uint8_t)
3031 : (uint16_t)_regs.edx);
3032 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
3033 if ( (rc = ioport_access_check(port, op_bytes, ctxt, ops)) != 0 )
3034 goto done;
3035 if ( b & 2 )
3037 /* out */
3038 fail_if(ops->write_io == NULL);
3039 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
3041 else
3043 /* in */
3044 dst.type = OP_REG;
3045 dst.bytes = op_bytes;
3046 dst.reg = (unsigned long *)&_regs.eax;
3047 fail_if(ops->read_io == NULL);
3048 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
3050 if ( rc != 0 )
3051 goto done;
3052 break;
3055 case 0xe8: /* call (near) */ {
3056 int rel = (((op_bytes == 2) && !mode_64bit())
3057 ? (int32_t)insn_fetch_type(int16_t)
3058 : insn_fetch_type(int32_t));
3059 op_bytes = mode_64bit() ? 8 : op_bytes;
3060 src.val = _regs.eip;
3061 jmp_rel(rel);
3062 goto push;
3065 case 0xe9: /* jmp (near) */ {
3066 int rel = (((op_bytes == 2) && !mode_64bit())
3067 ? (int32_t)insn_fetch_type(int16_t)
3068 : insn_fetch_type(int32_t));
3069 jmp_rel(rel);
3070 break;
3073 case 0xea: /* jmp (far, absolute) */ {
3074 uint16_t sel;
3075 uint32_t eip;
3076 generate_exception_if(mode_64bit(), EXC_UD, -1);
3077 eip = insn_fetch_bytes(op_bytes);
3078 sel = insn_fetch_type(uint16_t);
3079 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
3080 goto done;
3081 _regs.eip = eip;
3082 break;
3085 case 0xeb: /* jmp (short) */ {
3086 int rel = insn_fetch_type(int8_t);
3087 jmp_rel(rel);
3088 break;
3091 case 0xf1: /* int1 (icebp) */
3092 src.val = EXC_DB;
3093 goto swint;
3095 case 0xf4: /* hlt */
3096 ctxt->retire.flags.hlt = 1;
3097 break;
3099 case 0xf5: /* cmc */
3100 _regs.eflags ^= EFLG_CF;
3101 break;
3103 case 0xf6 ... 0xf7: /* Grp3 */
3104 switch ( modrm_reg & 7 )
3106 case 0 ... 1: /* test */
3107 /* Special case in Grp3: test has an immediate source operand. */
3108 src.type = OP_IMM;
3109 src.bytes = (d & ByteOp) ? 1 : op_bytes;
3110 if ( src.bytes == 8 ) src.bytes = 4;
3111 switch ( src.bytes )
3113 case 1: src.val = insn_fetch_type(int8_t); break;
3114 case 2: src.val = insn_fetch_type(int16_t); break;
3115 case 4: src.val = insn_fetch_type(int32_t); break;
3117 goto test;
3118 case 2: /* not */
3119 dst.val = ~dst.val;
3120 break;
3121 case 3: /* neg */
3122 emulate_1op("neg", dst, _regs.eflags);
3123 break;
3124 case 4: /* mul */
3125 src = dst;
3126 dst.type = OP_REG;
3127 dst.reg = (unsigned long *)&_regs.eax;
3128 dst.val = *dst.reg;
3129 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3130 switch ( src.bytes )
3132 case 1:
3133 dst.val = (uint8_t)dst.val;
3134 dst.val *= src.val;
3135 if ( (uint8_t)dst.val != (uint16_t)dst.val )
3136 _regs.eflags |= EFLG_OF|EFLG_CF;
3137 dst.bytes = 2;
3138 break;
3139 case 2:
3140 dst.val = (uint16_t)dst.val;
3141 dst.val *= src.val;
3142 if ( (uint16_t)dst.val != (uint32_t)dst.val )
3143 _regs.eflags |= EFLG_OF|EFLG_CF;
3144 *(uint16_t *)&_regs.edx = dst.val >> 16;
3145 break;
3146 #ifdef __x86_64__
3147 case 4:
3148 dst.val = (uint32_t)dst.val;
3149 dst.val *= src.val;
3150 if ( (uint32_t)dst.val != dst.val )
3151 _regs.eflags |= EFLG_OF|EFLG_CF;
3152 _regs.edx = (uint32_t)(dst.val >> 32);
3153 break;
3154 #endif
3155 default: {
3156 unsigned long m[2] = { src.val, dst.val };
3157 if ( mul_dbl(m) )
3158 _regs.eflags |= EFLG_OF|EFLG_CF;
3159 _regs.edx = m[1];
3160 dst.val = m[0];
3161 break;
3164 break;
3165 case 5: /* imul */
3166 src = dst;
3167 dst.type = OP_REG;
3168 dst.reg = (unsigned long *)&_regs.eax;
3169 dst.val = *dst.reg;
3170 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3171 switch ( src.bytes )
3173 case 1:
3174 dst.val = ((uint16_t)(int8_t)src.val *
3175 (uint16_t)(int8_t)dst.val);
3176 if ( (int8_t)dst.val != (uint16_t)dst.val )
3177 _regs.eflags |= EFLG_OF|EFLG_CF;
3178 dst.bytes = 2;
3179 break;
3180 case 2:
3181 dst.val = ((uint32_t)(int16_t)src.val *
3182 (uint32_t)(int16_t)dst.val);
3183 if ( (int16_t)dst.val != (uint32_t)dst.val )
3184 _regs.eflags |= EFLG_OF|EFLG_CF;
3185 *(uint16_t *)&_regs.edx = dst.val >> 16;
3186 break;
3187 #ifdef __x86_64__
3188 case 4:
3189 dst.val = ((uint64_t)(int32_t)src.val *
3190 (uint64_t)(int32_t)dst.val);
3191 if ( (int32_t)dst.val != dst.val )
3192 _regs.eflags |= EFLG_OF|EFLG_CF;
3193 _regs.edx = (uint32_t)(dst.val >> 32);
3194 break;
3195 #endif
3196 default: {
3197 unsigned long m[2] = { src.val, dst.val };
3198 if ( imul_dbl(m) )
3199 _regs.eflags |= EFLG_OF|EFLG_CF;
3200 _regs.edx = m[1];
3201 dst.val = m[0];
3202 break;
3205 break;
3206 case 6: /* div */ {
3207 unsigned long u[2], v;
3208 src = dst;
3209 dst.type = OP_REG;
3210 dst.reg = (unsigned long *)&_regs.eax;
3211 switch ( src.bytes )
3213 case 1:
3214 u[0] = (uint16_t)_regs.eax;
3215 u[1] = 0;
3216 v = (uint8_t)src.val;
3217 generate_exception_if(
3218 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
3219 EXC_DE, -1);
3220 dst.val = (uint8_t)u[0];
3221 ((uint8_t *)&_regs.eax)[1] = u[1];
3222 break;
3223 case 2:
3224 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
3225 u[1] = 0;
3226 v = (uint16_t)src.val;
3227 generate_exception_if(
3228 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
3229 EXC_DE, -1);
3230 dst.val = (uint16_t)u[0];
3231 *(uint16_t *)&_regs.edx = u[1];
3232 break;
3233 #ifdef __x86_64__
3234 case 4:
3235 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
3236 u[1] = 0;
3237 v = (uint32_t)src.val;
3238 generate_exception_if(
3239 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
3240 EXC_DE, -1);
3241 dst.val = (uint32_t)u[0];
3242 _regs.edx = (uint32_t)u[1];
3243 break;
3244 #endif
3245 default:
3246 u[0] = _regs.eax;
3247 u[1] = _regs.edx;
3248 v = src.val;
3249 generate_exception_if(div_dbl(u, v), EXC_DE, -1);
3250 dst.val = u[0];
3251 _regs.edx = u[1];
3252 break;
3254 break;
3256 case 7: /* idiv */ {
3257 unsigned long u[2], v;
3258 src = dst;
3259 dst.type = OP_REG;
3260 dst.reg = (unsigned long *)&_regs.eax;
3261 switch ( src.bytes )
3263 case 1:
3264 u[0] = (int16_t)_regs.eax;
3265 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3266 v = (int8_t)src.val;
3267 generate_exception_if(
3268 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
3269 EXC_DE, -1);
3270 dst.val = (int8_t)u[0];
3271 ((int8_t *)&_regs.eax)[1] = u[1];
3272 break;
3273 case 2:
3274 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
3275 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3276 v = (int16_t)src.val;
3277 generate_exception_if(
3278 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
3279 EXC_DE, -1);
3280 dst.val = (int16_t)u[0];
3281 *(int16_t *)&_regs.edx = u[1];
3282 break;
3283 #ifdef __x86_64__
3284 case 4:
3285 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
3286 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3287 v = (int32_t)src.val;
3288 generate_exception_if(
3289 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
3290 EXC_DE, -1);
3291 dst.val = (int32_t)u[0];
3292 _regs.edx = (uint32_t)u[1];
3293 break;
3294 #endif
3295 default:
3296 u[0] = _regs.eax;
3297 u[1] = _regs.edx;
3298 v = src.val;
3299 generate_exception_if(idiv_dbl(u, v), EXC_DE, -1);
3300 dst.val = u[0];
3301 _regs.edx = u[1];
3302 break;
3304 break;
3306 default:
3307 goto cannot_emulate;
3309 break;
3311 case 0xf8: /* clc */
3312 _regs.eflags &= ~EFLG_CF;
3313 break;
3315 case 0xf9: /* stc */
3316 _regs.eflags |= EFLG_CF;
3317 break;
3319 case 0xfa: /* cli */
3320 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3321 _regs.eflags &= ~EFLG_IF;
3322 break;
3324 case 0xfb: /* sti */
3325 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3326 if ( !(_regs.eflags & EFLG_IF) )
3328 _regs.eflags |= EFLG_IF;
3329 ctxt->retire.flags.sti = 1;
3331 break;
3333 case 0xfc: /* cld */
3334 _regs.eflags &= ~EFLG_DF;
3335 break;
3337 case 0xfd: /* std */
3338 _regs.eflags |= EFLG_DF;
3339 break;
3341 case 0xfe: /* Grp4 */
3342 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD, -1);
3343 case 0xff: /* Grp5 */
3344 switch ( modrm_reg & 7 )
3346 case 0: /* inc */
3347 emulate_1op("inc", dst, _regs.eflags);
3348 break;
3349 case 1: /* dec */
3350 emulate_1op("dec", dst, _regs.eflags);
3351 break;
3352 case 2: /* call (near) */
3353 case 4: /* jmp (near) */
3354 if ( (dst.bytes != 8) && mode_64bit() )
3356 dst.bytes = op_bytes = 8;
3357 if ( dst.type == OP_REG )
3358 dst.val = *dst.reg;
3359 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
3360 &dst.val, 8, ctxt, ops)) != 0 )
3361 goto done;
3363 src.val = _regs.eip;
3364 _regs.eip = dst.val;
3365 if ( (modrm_reg & 7) == 2 )
3366 goto push; /* call */
3367 dst.type = OP_NONE;
3368 break;
3369 case 3: /* call (far, absolute indirect) */
3370 case 5: /* jmp (far, absolute indirect) */ {
3371 unsigned long sel;
3373 generate_exception_if(dst.type != OP_MEM, EXC_UD, -1);
3375 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off+dst.bytes,
3376 &sel, 2, ctxt, ops)) )
3377 goto done;
3379 if ( (modrm_reg & 7) == 3 ) /* call */
3381 struct segment_register reg;
3382 fail_if(ops->read_segment == NULL);
3383 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
3384 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
3385 &reg.sel, op_bytes, ctxt)) ||
3386 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
3387 &_regs.eip, op_bytes, ctxt)) )
3388 goto done;
3391 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
3392 goto done;
3393 _regs.eip = dst.val;
3395 dst.type = OP_NONE;
3396 break;
3398 case 6: /* push */
3399 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
3400 if ( mode_64bit() && (dst.bytes == 4) )
3402 dst.bytes = 8;
3403 if ( dst.type == OP_REG )
3404 dst.val = *dst.reg;
3405 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
3406 &dst.val, 8, ctxt, ops)) != 0 )
3407 goto done;
3409 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
3410 &dst.val, dst.bytes, ctxt)) != 0 )
3411 goto done;
3412 dst.type = OP_NONE;
3413 break;
3414 case 7:
3415 generate_exception_if(1, EXC_UD, -1);
3416 default:
3417 goto cannot_emulate;
3419 break;
3422 writeback:
3423 switch ( dst.type )
3425 case OP_REG:
3426 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
3427 switch ( dst.bytes )
3429 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
3430 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
3431 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
3432 case 8: *dst.reg = dst.val; break;
3434 break;
3435 case OP_MEM:
3436 if ( !(d & Mov) && (dst.orig_val == dst.val) &&
3437 !ctxt->force_writeback )
3438 /* nothing to do */;
3439 else if ( lock_prefix )
3440 rc = ops->cmpxchg(
3441 dst.mem.seg, dst.mem.off, &dst.orig_val,
3442 &dst.val, dst.bytes, ctxt);
3443 else
3444 rc = ops->write(
3445 dst.mem.seg, dst.mem.off, &dst.val, dst.bytes, ctxt);
3446 if ( rc != 0 )
3447 goto done;
3448 default:
3449 break;
3452 /* Inject #DB if single-step tracing was enabled at instruction start. */
3453 if ( (ctxt->regs->eflags & EFLG_TF) && (rc == X86EMUL_OKAY) &&
3454 (ops->inject_hw_exception != NULL) )
3455 rc = ops->inject_hw_exception(EXC_DB, -1, ctxt) ? : X86EMUL_EXCEPTION;
3457 /* Commit shadow register state. */
3458 _regs.eflags &= ~EFLG_RF;
3459 *ctxt->regs = _regs;
3461 done:
3462 return rc;
3464 twobyte_insn:
3465 switch ( b )
3467 case 0x01: /* Grp7 */ {
3468 struct segment_register reg;
3469 unsigned long base, limit, cr0, cr0w;
3471 if ( modrm == 0xdf ) /* invlpga */
3473 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
3474 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3475 fail_if(ops->invlpg == NULL);
3476 if ( (rc = ops->invlpg(x86_seg_none, truncate_ea(_regs.eax),
3477 ctxt)) )
3478 goto done;
3479 break;
3482 switch ( modrm_reg & 7 )
3484 case 0: /* sgdt */
3485 case 1: /* sidt */
3486 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3487 fail_if(ops->read_segment == NULL);
3488 if ( (rc = ops->read_segment((modrm_reg & 1) ?
3489 x86_seg_idtr : x86_seg_gdtr,
3490 &reg, ctxt)) )
3491 goto done;
3492 if ( op_bytes == 2 )
3493 reg.base &= 0xffffff;
3494 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
3495 &reg.limit, 2, ctxt)) ||
3496 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
3497 &reg.base, mode_64bit() ? 8 : 4, ctxt)) )
3498 goto done;
3499 break;
3500 case 2: /* lgdt */
3501 case 3: /* lidt */
3502 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3503 fail_if(ops->write_segment == NULL);
3504 memset(&reg, 0, sizeof(reg));
3505 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3506 &limit, 2, ctxt, ops)) ||
3507 (rc = read_ulong(ea.mem.seg, ea.mem.off+2,
3508 &base, mode_64bit() ? 8 : 4, ctxt, ops)) )
3509 goto done;
3510 reg.base = base;
3511 reg.limit = limit;
3512 if ( op_bytes == 2 )
3513 reg.base &= 0xffffff;
3514 if ( (rc = ops->write_segment((modrm_reg & 1) ?
3515 x86_seg_idtr : x86_seg_gdtr,
3516 &reg, ctxt)) )
3517 goto done;
3518 break;
3519 case 4: /* smsw */
3520 if ( ea.type == OP_MEM )
3521 ea.bytes = 2;
3522 dst = ea;
3523 fail_if(ops->read_cr == NULL);
3524 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
3525 goto done;
3526 d |= Mov; /* force writeback */
3527 break;
3528 case 6: /* lmsw */
3529 fail_if(ops->read_cr == NULL);
3530 fail_if(ops->write_cr == NULL);
3531 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
3532 goto done;
3533 if ( ea.type == OP_REG )
3534 cr0w = *ea.reg;
3535 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
3536 &cr0w, 2, ctxt, ops)) )
3537 goto done;
3538 /* LMSW can: (1) set bits 0-3; (2) clear bits 1-3. */
3539 cr0 = (cr0 & ~0xe) | (cr0w & 0xf);
3540 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
3541 goto done;
3542 break;
3543 case 7: /* invlpg */
3544 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3545 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3546 fail_if(ops->invlpg == NULL);
3547 if ( (rc = ops->invlpg(ea.mem.seg, ea.mem.off, ctxt)) )
3548 goto done;
3549 break;
3550 default:
3551 goto cannot_emulate;
3553 break;
3556 case 0x06: /* clts */
3557 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3558 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
3559 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
3560 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
3561 goto done;
3562 break;
3564 case 0x08: /* invd */
3565 case 0x09: /* wbinvd */
3566 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3567 fail_if(ops->wbinvd == NULL);
3568 if ( (rc = ops->wbinvd(ctxt)) != 0 )
3569 goto done;
3570 break;
3572 case 0x0d: /* GrpP (prefetch) */
3573 case 0x18: /* Grp16 (prefetch/nop) */
3574 case 0x19 ... 0x1f: /* nop (amd-defined) */
3575 break;
3577 case 0x20: /* mov cr,reg */
3578 case 0x21: /* mov dr,reg */
3579 case 0x22: /* mov reg,cr */
3580 case 0x23: /* mov reg,dr */
3581 generate_exception_if(ea.type != OP_REG, EXC_UD, -1);
3582 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3583 modrm_reg |= lock_prefix << 3;
3584 if ( b & 2 )
3586 /* Write to CR/DR. */
3587 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
3588 if ( !mode_64bit() )
3589 src.val = (uint32_t)src.val;
3590 rc = ((b & 1)
3591 ? (ops->write_dr
3592 ? ops->write_dr(modrm_reg, src.val, ctxt)
3593 : X86EMUL_UNHANDLEABLE)
3594 : (ops->write_cr
3595 ? ops->write_cr(modrm_reg, src.val, ctxt)
3596 : X86EMUL_UNHANDLEABLE));
3598 else
3600 /* Read from CR/DR. */
3601 dst.type = OP_REG;
3602 dst.bytes = mode_64bit() ? 8 : 4;
3603 dst.reg = decode_register(modrm_rm, &_regs, 0);
3604 rc = ((b & 1)
3605 ? (ops->read_dr
3606 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
3607 : X86EMUL_UNHANDLEABLE)
3608 : (ops->read_cr
3609 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
3610 : X86EMUL_UNHANDLEABLE));
3612 if ( rc != 0 )
3613 goto done;
3614 break;
3616 case 0x30: /* wrmsr */ {
3617 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
3618 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3619 fail_if(ops->write_msr == NULL);
3620 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
3621 goto done;
3622 break;
3625 case 0x31: /* rdtsc */ {
3626 unsigned long cr4;
3627 uint64_t val;
3628 fail_if(ops->read_cr == NULL);
3629 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
3630 goto done;
3631 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP, 0);
3632 fail_if(ops->read_msr == NULL);
3633 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
3634 goto done;
3635 _regs.edx = (uint32_t)(val >> 32);
3636 _regs.eax = (uint32_t)(val >> 0);
3637 break;
3640 case 0x32: /* rdmsr */ {
3641 uint64_t val;
3642 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3643 fail_if(ops->read_msr == NULL);
3644 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
3645 goto done;
3646 _regs.edx = (uint32_t)(val >> 32);
3647 _regs.eax = (uint32_t)(val >> 0);
3648 break;
3651 case 0x40 ... 0x4f: /* cmovcc */
3652 dst.val = src.val;
3653 if ( !test_cc(b, _regs.eflags) )
3654 dst.type = OP_NONE;
3655 break;
3657 case 0x6f: /* movq mm/m64,mm */ {
3658 uint8_t stub[] = { 0x0f, 0x6f, modrm, 0xc3 };
3659 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3660 uint64_t val;
3661 if ( ea.type == OP_MEM )
3663 unsigned long lval, hval;
3664 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3665 &lval, 4, ctxt, ops)) ||
3666 (rc = read_ulong(ea.mem.seg, ea.mem.off+4,
3667 &hval, 4, ctxt, ops)) )
3668 goto done;
3669 val = ((uint64_t)hval << 32) | (uint32_t)lval;
3670 stub[2] = modrm & 0x38; /* movq (%eax),%mmN */
3672 get_fpu(X86EMUL_FPU_mmx, &fic);
3673 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3674 put_fpu(&fic);
3675 break;
3678 case 0x7f: /* movq mm,mm/m64 */ {
3679 uint8_t stub[] = { 0x0f, 0x7f, modrm, 0xc3 };
3680 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3681 uint64_t val;
3682 if ( ea.type == OP_MEM )
3683 stub[2] = modrm & 0x38; /* movq %mmN,(%eax) */
3684 get_fpu(X86EMUL_FPU_mmx, &fic);
3685 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3686 put_fpu(&fic);
3687 if ( ea.type == OP_MEM )
3689 unsigned long lval = (uint32_t)val, hval = (uint32_t)(val >> 32);
3690 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0, &lval, 4, ctxt)) ||
3691 (rc = ops->write(ea.mem.seg, ea.mem.off+4, &hval, 4, ctxt)) )
3692 goto done;
3694 break;
3697 case 0x80 ... 0x8f: /* jcc (near) */ {
3698 int rel = (((op_bytes == 2) && !mode_64bit())
3699 ? (int32_t)insn_fetch_type(int16_t)
3700 : insn_fetch_type(int32_t));
3701 if ( test_cc(b, _regs.eflags) )
3702 jmp_rel(rel);
3703 break;
3706 case 0x90 ... 0x9f: /* setcc */
3707 dst.val = test_cc(b, _regs.eflags);
3708 break;
3710 case 0xa0: /* push %%fs */
3711 src.val = x86_seg_fs;
3712 goto push_seg;
3714 case 0xa1: /* pop %%fs */
3715 src.val = x86_seg_fs;
3716 goto pop_seg;
3718 case 0xa2: /* cpuid */ {
3719 unsigned int eax = _regs.eax, ebx = _regs.ebx;
3720 unsigned int ecx = _regs.ecx, edx = _regs.edx;
3721 fail_if(ops->cpuid == NULL);
3722 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
3723 goto done;
3724 _regs.eax = eax; _regs.ebx = ebx;
3725 _regs.ecx = ecx; _regs.edx = edx;
3726 break;
3729 case 0xa8: /* push %%gs */
3730 src.val = x86_seg_gs;
3731 goto push_seg;
3733 case 0xa9: /* pop %%gs */
3734 src.val = x86_seg_gs;
3735 goto pop_seg;
3737 case 0xb0 ... 0xb1: /* cmpxchg */
3738 /* Save real source value, then compare EAX against destination. */
3739 src.orig_val = src.val;
3740 src.val = _regs.eax;
3741 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
3742 if ( _regs.eflags & EFLG_ZF )
3744 /* Success: write back to memory. */
3745 dst.val = src.orig_val;
3747 else
3749 /* Failure: write the value we saw to EAX. */
3750 dst.type = OP_REG;
3751 dst.reg = (unsigned long *)&_regs.eax;
3753 break;
3755 case 0xa3: bt: /* bt */
3756 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
3757 dst.type = OP_NONE;
3758 break;
3760 case 0xa4: /* shld imm8,r,r/m */
3761 case 0xa5: /* shld %%cl,r,r/m */
3762 case 0xac: /* shrd imm8,r,r/m */
3763 case 0xad: /* shrd %%cl,r,r/m */ {
3764 uint8_t shift, width = dst.bytes << 3;
3765 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
3766 if ( (shift &= width - 1) == 0 )
3767 break;
3768 dst.orig_val = truncate_word(dst.val, dst.bytes);
3769 dst.val = ((shift == width) ? src.val :
3770 (b & 8) ?
3771 /* shrd */
3772 ((dst.orig_val >> shift) |
3773 truncate_word(src.val << (width - shift), dst.bytes)) :
3774 /* shld */
3775 ((dst.orig_val << shift) |
3776 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
3777 dst.val = truncate_word(dst.val, dst.bytes);
3778 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
3779 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
3780 _regs.eflags |= EFLG_CF;
3781 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
3782 _regs.eflags |= EFLG_OF;
3783 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
3784 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
3785 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
3786 break;
3789 case 0xb3: btr: /* btr */
3790 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
3791 break;
3793 case 0xab: bts: /* bts */
3794 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
3795 break;
3797 case 0xaf: /* imul */
3798 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3799 switch ( dst.bytes )
3801 case 2:
3802 dst.val = ((uint32_t)(int16_t)src.val *
3803 (uint32_t)(int16_t)dst.val);
3804 if ( (int16_t)dst.val != (uint32_t)dst.val )
3805 _regs.eflags |= EFLG_OF|EFLG_CF;
3806 break;
3807 #ifdef __x86_64__
3808 case 4:
3809 dst.val = ((uint64_t)(int32_t)src.val *
3810 (uint64_t)(int32_t)dst.val);
3811 if ( (int32_t)dst.val != dst.val )
3812 _regs.eflags |= EFLG_OF|EFLG_CF;
3813 break;
3814 #endif
3815 default: {
3816 unsigned long m[2] = { src.val, dst.val };
3817 if ( imul_dbl(m) )
3818 _regs.eflags |= EFLG_OF|EFLG_CF;
3819 dst.val = m[0];
3820 break;
3823 break;
3825 case 0xb2: /* lss */
3826 dst.val = x86_seg_ss;
3827 goto les;
3829 case 0xb4: /* lfs */
3830 dst.val = x86_seg_fs;
3831 goto les;
3833 case 0xb5: /* lgs */
3834 dst.val = x86_seg_gs;
3835 goto les;
3837 case 0xb6: /* movzx rm8,r{16,32,64} */
3838 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3839 dst.reg = decode_register(modrm_reg, &_regs, 0);
3840 dst.bytes = op_bytes;
3841 dst.val = (uint8_t)src.val;
3842 break;
3844 case 0xbc: /* bsf */ {
3845 int zf;
3846 asm ( "bsf %2,%0; setz %b1"
3847 : "=r" (dst.val), "=q" (zf)
3848 : "r" (src.val), "1" (0) );
3849 _regs.eflags &= ~EFLG_ZF;
3850 if ( zf )
3852 _regs.eflags |= EFLG_ZF;
3853 dst.type = OP_NONE;
3855 break;
3858 case 0xbd: /* bsr */ {
3859 int zf;
3860 asm ( "bsr %2,%0; setz %b1"
3861 : "=r" (dst.val), "=q" (zf)
3862 : "r" (src.val), "1" (0) );
3863 _regs.eflags &= ~EFLG_ZF;
3864 if ( zf )
3866 _regs.eflags |= EFLG_ZF;
3867 dst.type = OP_NONE;
3869 break;
3872 case 0xb7: /* movzx rm16,r{16,32,64} */
3873 dst.val = (uint16_t)src.val;
3874 break;
3876 case 0xbb: btc: /* btc */
3877 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
3878 break;
3880 case 0xba: /* Grp8 */
3881 switch ( modrm_reg & 7 )
3883 case 4: goto bt;
3884 case 5: goto bts;
3885 case 6: goto btr;
3886 case 7: goto btc;
3887 default: generate_exception_if(1, EXC_UD, -1);
3889 break;
3891 case 0xbe: /* movsx rm8,r{16,32,64} */
3892 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3893 dst.reg = decode_register(modrm_reg, &_regs, 0);
3894 dst.bytes = op_bytes;
3895 dst.val = (int8_t)src.val;
3896 break;
3898 case 0xbf: /* movsx rm16,r{16,32,64} */
3899 dst.val = (int16_t)src.val;
3900 break;
3902 case 0xc0 ... 0xc1: /* xadd */
3903 /* Write back the register source. */
3904 switch ( dst.bytes )
3906 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
3907 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
3908 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
3909 case 8: *src.reg = dst.val; break;
3911 goto add;
3913 case 0xc7: /* Grp9 (cmpxchg8b/cmpxchg16b) */ {
3914 unsigned long old[2], exp[2], new[2];
3915 unsigned int i;
3917 generate_exception_if((modrm_reg & 7) != 1, EXC_UD, -1);
3918 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3919 op_bytes *= 2;
3921 /* Get actual old value. */
3922 for ( i = 0; i < (op_bytes/sizeof(long)); i++ )
3923 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off + i*sizeof(long),
3924 &old[i], sizeof(long), ctxt, ops)) != 0 )
3925 goto done;
3927 /* Get expected and proposed values. */
3928 if ( op_bytes == 8 )
3930 ((uint32_t *)exp)[0] = _regs.eax; ((uint32_t *)exp)[1] = _regs.edx;
3931 ((uint32_t *)new)[0] = _regs.ebx; ((uint32_t *)new)[1] = _regs.ecx;
3933 else
3935 exp[0] = _regs.eax; exp[1] = _regs.edx;
3936 new[0] = _regs.ebx; new[1] = _regs.ecx;
3939 if ( memcmp(old, exp, op_bytes) )
3941 /* Expected != actual: store actual to rDX:rAX and clear ZF. */
3942 _regs.eax = (op_bytes == 8) ? ((uint32_t *)old)[0] : old[0];
3943 _regs.edx = (op_bytes == 8) ? ((uint32_t *)old)[1] : old[1];
3944 _regs.eflags &= ~EFLG_ZF;
3946 else
3948 /* Expected == actual: attempt atomic cmpxchg and set ZF. */
3949 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3950 new, op_bytes, ctxt)) != 0 )
3951 goto done;
3952 _regs.eflags |= EFLG_ZF;
3954 break;
3957 case 0xc8 ... 0xcf: /* bswap */
3958 dst.type = OP_REG;
3959 dst.reg = decode_register(
3960 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3961 switch ( dst.bytes = op_bytes )
3963 default: /* case 2: */
3964 /* Undefined behaviour. Writes zero on all tested CPUs. */
3965 dst.val = 0;
3966 break;
3967 case 4:
3968 #ifdef __x86_64__
3969 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3970 break;
3971 case 8:
3972 #endif
3973 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3974 break;
3976 break;
3978 goto writeback;
3980 cannot_emulate:
3981 return X86EMUL_UNHANDLEABLE;