ia64/xen-unstable

view xen/arch/x86/x86_emulate/x86_emulate.c @ 18664:c4be040bef6f

x86: add movnti emulation

Linux added the use of movnti for copying from user to kernel space in
certain cases, and as per reports we got this may happen with the
destination being in MMIO.

Signed-off-by: Jan Beulich <jbeulich@novell.com>
author Keir Fraser <keir.fraser@citrix.com>
date Mon Oct 20 15:22:58 2008 +0100 (2008-10-20)
parents 9b227eb09263
children 90ed7af65570
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 /* Operand sizes: 8-bit operands or specified/overridden size. */
25 #define ByteOp (1<<0) /* 8-bit operands. */
26 /* Destination operand type. */
27 #define DstNone (0<<1) /* No destination operand. */
28 #define DstImplicit (0<<1) /* Destination operand is implicit in the opcode. */
29 #define DstBitBase (1<<1) /* Memory operand, bit string. */
30 #define DstReg (2<<1) /* Register operand. */
31 #define DstMem (3<<1) /* Memory operand. */
32 #define DstMask (3<<1)
33 /* Source operand type. */
34 #define SrcInvalid (0<<3) /* Unimplemented opcode. */
35 #define SrcNone (1<<3) /* No source operand. */
36 #define SrcImplicit (1<<3) /* Source operand is implicit in the opcode. */
37 #define SrcReg (2<<3) /* Register operand. */
38 #define SrcMem (3<<3) /* Memory operand. */
39 #define SrcMem16 (4<<3) /* Memory operand (16-bit). */
40 #define SrcImm (5<<3) /* Immediate operand. */
41 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
42 #define SrcMask (7<<3)
43 /* Generic ModRM decode. */
44 #define ModRM (1<<6)
45 /* Destination is only written; never read. */
46 #define Mov (1<<7)
47 /* All operands are implicit in the opcode. */
48 #define ImplicitOps (DstImplicit|SrcImplicit)
50 static uint8_t opcode_table[256] = {
51 /* 0x00 - 0x07 */
52 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
53 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
54 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
55 /* 0x08 - 0x0F */
56 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
57 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
58 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
59 /* 0x10 - 0x17 */
60 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
61 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
62 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
63 /* 0x18 - 0x1F */
64 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
65 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
66 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
67 /* 0x20 - 0x27 */
68 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
69 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
70 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
71 /* 0x28 - 0x2F */
72 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
73 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
74 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
75 /* 0x30 - 0x37 */
76 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
77 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
78 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
79 /* 0x38 - 0x3F */
80 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
81 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
82 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
83 /* 0x40 - 0x4F */
84 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
85 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
86 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
87 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
88 /* 0x50 - 0x5F */
89 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
90 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
91 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
92 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
93 /* 0x60 - 0x67 */
94 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
95 0, 0, 0, 0,
96 /* 0x68 - 0x6F */
97 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
98 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 /* 0x70 - 0x77 */
101 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
102 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
103 /* 0x78 - 0x7F */
104 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
106 /* 0x80 - 0x87 */
107 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
108 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
109 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
110 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
111 /* 0x88 - 0x8F */
112 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
113 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
114 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
115 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
116 /* 0x90 - 0x97 */
117 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
118 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
119 /* 0x98 - 0x9F */
120 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
121 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
122 /* 0xA0 - 0xA7 */
123 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
124 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
125 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
126 ByteOp|ImplicitOps, ImplicitOps,
127 /* 0xA8 - 0xAF */
128 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
129 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
130 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
131 ByteOp|ImplicitOps, ImplicitOps,
132 /* 0xB0 - 0xB7 */
133 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
134 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
135 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
136 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
137 /* 0xB8 - 0xBF */
138 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
139 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
140 /* 0xC0 - 0xC7 */
141 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
142 ImplicitOps, ImplicitOps,
143 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
144 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
145 /* 0xC8 - 0xCF */
146 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
147 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
148 /* 0xD0 - 0xD7 */
149 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
150 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
151 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
152 /* 0xD8 - 0xDF */
153 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
154 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
155 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
156 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
157 /* 0xE0 - 0xE7 */
158 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
159 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
160 /* 0xE8 - 0xEF */
161 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
162 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
163 /* 0xF0 - 0xF7 */
164 0, ImplicitOps, 0, 0,
165 ImplicitOps, ImplicitOps,
166 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
167 /* 0xF8 - 0xFF */
168 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
169 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
170 };
172 static uint8_t twobyte_table[256] = {
173 /* 0x00 - 0x07 */
174 0, ImplicitOps|ModRM, 0, 0, 0, 0, ImplicitOps, 0,
175 /* 0x08 - 0x0F */
176 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
177 /* 0x10 - 0x17 */
178 0, 0, 0, 0, 0, 0, 0, 0,
179 /* 0x18 - 0x1F */
180 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
181 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
182 /* 0x20 - 0x27 */
183 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
184 0, 0, 0, 0,
185 /* 0x28 - 0x2F */
186 0, 0, 0, 0, 0, 0, 0, 0,
187 /* 0x30 - 0x37 */
188 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
189 /* 0x38 - 0x3F */
190 0, 0, 0, 0, 0, 0, 0, 0,
191 /* 0x40 - 0x47 */
192 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
193 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
194 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
195 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
196 /* 0x48 - 0x4F */
197 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 /* 0x50 - 0x5F */
202 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
203 /* 0x60 - 0x6F */
204 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
205 /* 0x70 - 0x7F */
206 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
207 /* 0x80 - 0x87 */
208 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
209 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
210 /* 0x88 - 0x8F */
211 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
212 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
213 /* 0x90 - 0x97 */
214 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
215 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
216 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
217 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
218 /* 0x98 - 0x9F */
219 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 /* 0xA0 - 0xA7 */
224 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
225 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
226 /* 0xA8 - 0xAF */
227 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
228 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
229 /* 0xB0 - 0xB7 */
230 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
231 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
232 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
233 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
234 /* 0xB8 - 0xBF */
235 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
236 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
237 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
238 /* 0xC0 - 0xC7 */
239 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
240 0, DstMem|SrcReg|ModRM|Mov,
241 0, 0, 0, ImplicitOps|ModRM,
242 /* 0xC8 - 0xCF */
243 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
244 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
245 /* 0xD0 - 0xDF */
246 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
247 /* 0xE0 - 0xEF */
248 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
249 /* 0xF0 - 0xFF */
250 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
251 };
253 /* Type, address-of, and value of an instruction's operand. */
254 struct operand {
255 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
256 unsigned int bytes;
258 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
259 union {
260 unsigned long val;
261 uint32_t bigval[4];
262 };
264 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
265 union {
266 unsigned long orig_val;
267 uint32_t orig_bigval[4];
268 };
270 union {
271 /* OP_REG: Pointer to register field. */
272 unsigned long *reg;
273 /* OP_MEM: Segment and offset. */
274 struct {
275 enum x86_segment seg;
276 unsigned long off;
277 } mem;
278 };
279 };
281 /* MSRs. */
282 #define MSR_TSC 0x10
284 /* Control register flags. */
285 #define CR0_PE (1<<0)
286 #define CR4_TSD (1<<2)
288 /* EFLAGS bit definitions. */
289 #define EFLG_VIP (1<<20)
290 #define EFLG_VIF (1<<19)
291 #define EFLG_AC (1<<18)
292 #define EFLG_VM (1<<17)
293 #define EFLG_RF (1<<16)
294 #define EFLG_NT (1<<14)
295 #define EFLG_IOPL (3<<12)
296 #define EFLG_OF (1<<11)
297 #define EFLG_DF (1<<10)
298 #define EFLG_IF (1<<9)
299 #define EFLG_TF (1<<8)
300 #define EFLG_SF (1<<7)
301 #define EFLG_ZF (1<<6)
302 #define EFLG_AF (1<<4)
303 #define EFLG_PF (1<<2)
304 #define EFLG_CF (1<<0)
306 /* Exception definitions. */
307 #define EXC_DE 0
308 #define EXC_DB 1
309 #define EXC_BP 3
310 #define EXC_OF 4
311 #define EXC_BR 5
312 #define EXC_UD 6
313 #define EXC_TS 10
314 #define EXC_NP 11
315 #define EXC_SS 12
316 #define EXC_GP 13
317 #define EXC_PF 14
318 #define EXC_MF 16
320 /*
321 * Instruction emulation:
322 * Most instructions are emulated directly via a fragment of inline assembly
323 * code. This allows us to save/restore EFLAGS and thus very easily pick up
324 * any modified flags.
325 */
327 #if defined(__x86_64__)
328 #define _LO32 "k" /* force 32-bit operand */
329 #define _STK "%%rsp" /* stack pointer */
330 #define _BYTES_PER_LONG "8"
331 #elif defined(__i386__)
332 #define _LO32 "" /* force 32-bit operand */
333 #define _STK "%%esp" /* stack pointer */
334 #define _BYTES_PER_LONG "4"
335 #endif
337 /*
338 * These EFLAGS bits are restored from saved value during emulation, and
339 * any changes are written back to the saved value after emulation.
340 */
341 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
343 /* Before executing instruction: restore necessary bits in EFLAGS. */
344 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
345 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
346 "movl %"_sav",%"_LO32 _tmp"; " \
347 "push %"_tmp"; " \
348 "push %"_tmp"; " \
349 "movl %"_msk",%"_LO32 _tmp"; " \
350 "andl %"_LO32 _tmp",("_STK"); " \
351 "pushf; " \
352 "notl %"_LO32 _tmp"; " \
353 "andl %"_LO32 _tmp",("_STK"); " \
354 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
355 "pop %"_tmp"; " \
356 "orl %"_LO32 _tmp",("_STK"); " \
357 "popf; " \
358 "pop %"_sav"; "
360 /* After executing instruction: write-back necessary bits in EFLAGS. */
361 #define _POST_EFLAGS(_sav, _msk, _tmp) \
362 /* _sav |= EFLAGS & _msk; */ \
363 "pushf; " \
364 "pop %"_tmp"; " \
365 "andl %"_msk",%"_LO32 _tmp"; " \
366 "orl %"_LO32 _tmp",%"_sav"; "
368 /* Raw emulation: instruction has two explicit operands. */
369 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
370 do{ unsigned long _tmp; \
371 switch ( (_dst).bytes ) \
372 { \
373 case 2: \
374 asm volatile ( \
375 _PRE_EFLAGS("0","4","2") \
376 _op"w %"_wx"3,%1; " \
377 _POST_EFLAGS("0","4","2") \
378 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
379 : _wy ((_src).val), "i" (EFLAGS_MASK), \
380 "m" (_eflags), "m" ((_dst).val) ); \
381 break; \
382 case 4: \
383 asm volatile ( \
384 _PRE_EFLAGS("0","4","2") \
385 _op"l %"_lx"3,%1; " \
386 _POST_EFLAGS("0","4","2") \
387 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
388 : _ly ((_src).val), "i" (EFLAGS_MASK), \
389 "m" (_eflags), "m" ((_dst).val) ); \
390 break; \
391 case 8: \
392 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
393 break; \
394 } \
395 } while (0)
396 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
397 do{ unsigned long _tmp; \
398 switch ( (_dst).bytes ) \
399 { \
400 case 1: \
401 asm volatile ( \
402 _PRE_EFLAGS("0","4","2") \
403 _op"b %"_bx"3,%1; " \
404 _POST_EFLAGS("0","4","2") \
405 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
406 : _by ((_src).val), "i" (EFLAGS_MASK), \
407 "m" (_eflags), "m" ((_dst).val) ); \
408 break; \
409 default: \
410 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
411 break; \
412 } \
413 } while (0)
414 /* Source operand is byte-sized and may be restricted to just %cl. */
415 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
416 __emulate_2op(_op, _src, _dst, _eflags, \
417 "b", "c", "b", "c", "b", "c", "b", "c")
418 /* Source operand is byte, word, long or quad sized. */
419 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
420 __emulate_2op(_op, _src, _dst, _eflags, \
421 "b", "q", "w", "r", _LO32, "r", "", "r")
422 /* Source operand is word, long or quad sized. */
423 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
424 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
425 "w", "r", _LO32, "r", "", "r")
427 /* Instruction has only one explicit operand (no source operand). */
428 #define emulate_1op(_op,_dst,_eflags) \
429 do{ unsigned long _tmp; \
430 switch ( (_dst).bytes ) \
431 { \
432 case 1: \
433 asm volatile ( \
434 _PRE_EFLAGS("0","3","2") \
435 _op"b %1; " \
436 _POST_EFLAGS("0","3","2") \
437 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
438 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
439 break; \
440 case 2: \
441 asm volatile ( \
442 _PRE_EFLAGS("0","3","2") \
443 _op"w %1; " \
444 _POST_EFLAGS("0","3","2") \
445 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
446 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
447 break; \
448 case 4: \
449 asm volatile ( \
450 _PRE_EFLAGS("0","3","2") \
451 _op"l %1; " \
452 _POST_EFLAGS("0","3","2") \
453 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
454 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
455 break; \
456 case 8: \
457 __emulate_1op_8byte(_op, _dst, _eflags); \
458 break; \
459 } \
460 } while (0)
462 /* Emulate an instruction with quadword operands (x86/64 only). */
463 #if defined(__x86_64__)
464 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
465 do{ asm volatile ( \
466 _PRE_EFLAGS("0","4","2") \
467 _op"q %"_qx"3,%1; " \
468 _POST_EFLAGS("0","4","2") \
469 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
470 : _qy ((_src).val), "i" (EFLAGS_MASK), \
471 "m" (_eflags), "m" ((_dst).val) ); \
472 } while (0)
473 #define __emulate_1op_8byte(_op, _dst, _eflags) \
474 do{ asm volatile ( \
475 _PRE_EFLAGS("0","3","2") \
476 _op"q %1; " \
477 _POST_EFLAGS("0","3","2") \
478 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
479 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
480 } while (0)
481 #elif defined(__i386__)
482 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
483 #define __emulate_1op_8byte(_op, _dst, _eflags)
484 #endif /* __i386__ */
486 /* Fetch next part of the instruction being emulated. */
487 #define insn_fetch_bytes(_size) \
488 ({ unsigned long _x = 0, _eip = _regs.eip; \
489 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
490 _regs.eip += (_size); /* real hardware doesn't truncate */ \
491 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
492 EXC_GP, 0); \
493 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
494 if ( rc ) goto done; \
495 _x; \
496 })
497 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
499 #define truncate_word(ea, byte_width) \
500 ({ unsigned long __ea = (ea); \
501 unsigned int _width = (byte_width); \
502 ((_width == sizeof(unsigned long)) ? __ea : \
503 (__ea & ((1UL << (_width << 3)) - 1))); \
504 })
505 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
507 #define mode_64bit() (def_ad_bytes == 8)
509 #define fail_if(p) \
510 do { \
511 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
512 if ( rc ) goto done; \
513 } while (0)
515 #define generate_exception_if(p, e, ec) \
516 ({ if ( (p) ) { \
517 fail_if(ops->inject_hw_exception == NULL); \
518 rc = ops->inject_hw_exception(e, ec, ctxt) ? : X86EMUL_EXCEPTION; \
519 goto done; \
520 } \
521 })
523 /*
524 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
525 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
526 */
527 static int even_parity(uint8_t v)
528 {
529 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
530 return v;
531 }
533 /* Update address held in a register, based on addressing mode. */
534 #define _register_address_increment(reg, inc, byte_width) \
535 do { \
536 int _inc = (inc); /* signed type ensures sign extension to long */ \
537 unsigned int _width = (byte_width); \
538 if ( _width == sizeof(unsigned long) ) \
539 (reg) += _inc; \
540 else if ( mode_64bit() ) \
541 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
542 else \
543 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
544 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
545 } while (0)
546 #define register_address_increment(reg, inc) \
547 _register_address_increment((reg), (inc), ad_bytes)
549 #define sp_pre_dec(dec) ({ \
550 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
551 truncate_word(_regs.esp, ctxt->sp_size/8); \
552 })
553 #define sp_post_inc(inc) ({ \
554 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
555 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
556 __esp; \
557 })
559 #define jmp_rel(rel) \
560 do { \
561 int _rel = (int)(rel); \
562 _regs.eip += _rel; \
563 if ( !mode_64bit() ) \
564 _regs.eip = ((op_bytes == 2) \
565 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
566 } while (0)
568 struct fpu_insn_ctxt {
569 uint8_t insn_bytes;
570 uint8_t exn_raised;
571 };
573 static void fpu_handle_exception(void *_fic, struct cpu_user_regs *regs)
574 {
575 struct fpu_insn_ctxt *fic = _fic;
576 fic->exn_raised = 1;
577 regs->eip += fic->insn_bytes;
578 }
580 #define get_fpu(_type, _fic) \
581 do{ (_fic)->exn_raised = 0; \
582 fail_if(ops->get_fpu == NULL); \
583 rc = ops->get_fpu(fpu_handle_exception, _fic, _type, ctxt); \
584 if ( rc ) goto done; \
585 } while (0)
586 #define put_fpu(_fic) \
587 do{ \
588 if ( ops->put_fpu != NULL ) \
589 ops->put_fpu(ctxt); \
590 generate_exception_if((_fic)->exn_raised, EXC_MF, -1); \
591 } while (0)
593 #define emulate_fpu_insn(_op) \
594 do{ struct fpu_insn_ctxt fic; \
595 get_fpu(X86EMUL_FPU_fpu, &fic); \
596 asm volatile ( \
597 "movb $2f-1f,%0 \n" \
598 "1: " _op " \n" \
599 "2: \n" \
600 : "=m" (fic.insn_bytes) : : "memory" ); \
601 put_fpu(&fic); \
602 } while (0)
604 #define emulate_fpu_insn_memdst(_op, _arg) \
605 do{ struct fpu_insn_ctxt fic; \
606 get_fpu(X86EMUL_FPU_fpu, &fic); \
607 asm volatile ( \
608 "movb $2f-1f,%0 \n" \
609 "1: " _op " %1 \n" \
610 "2: \n" \
611 : "=m" (fic.insn_bytes), "=m" (_arg) \
612 : : "memory" ); \
613 put_fpu(&fic); \
614 } while (0)
616 #define emulate_fpu_insn_memsrc(_op, _arg) \
617 do{ struct fpu_insn_ctxt fic; \
618 get_fpu(X86EMUL_FPU_fpu, &fic); \
619 asm volatile ( \
620 "movb $2f-1f,%0 \n" \
621 "1: " _op " %1 \n" \
622 "2: \n" \
623 : "=m" (fic.insn_bytes) \
624 : "m" (_arg) : "memory" ); \
625 put_fpu(&fic); \
626 } while (0)
628 #define emulate_fpu_insn_stub(_bytes...) \
629 do{ uint8_t stub[] = { _bytes, 0xc3 }; \
630 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 }; \
631 get_fpu(X86EMUL_FPU_fpu, &fic); \
632 (*(void(*)(void))stub)(); \
633 put_fpu(&fic); \
634 } while (0)
636 static unsigned long __get_rep_prefix(
637 struct cpu_user_regs *int_regs,
638 struct cpu_user_regs *ext_regs,
639 int ad_bytes)
640 {
641 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
642 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
643 int_regs->ecx);
645 /* Skip the instruction if no repetitions are required. */
646 if ( ecx == 0 )
647 ext_regs->eip = int_regs->eip;
649 return ecx;
650 }
652 #define get_rep_prefix() ({ \
653 unsigned long max_reps = 1; \
654 if ( rep_prefix ) \
655 max_reps = __get_rep_prefix(&_regs, ctxt->regs, ad_bytes); \
656 if ( max_reps == 0 ) \
657 goto done; \
658 max_reps; \
659 })
661 static void __put_rep_prefix(
662 struct cpu_user_regs *int_regs,
663 struct cpu_user_regs *ext_regs,
664 int ad_bytes,
665 unsigned long reps_completed)
666 {
667 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
668 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
669 int_regs->ecx);
671 /* Reduce counter appropriately, and repeat instruction if non-zero. */
672 ecx -= reps_completed;
673 if ( ecx != 0 )
674 int_regs->eip = ext_regs->eip;
676 if ( ad_bytes == 2 )
677 *(uint16_t *)&int_regs->ecx = ecx;
678 else if ( ad_bytes == 4 )
679 int_regs->ecx = (uint32_t)ecx;
680 else
681 int_regs->ecx = ecx;
682 }
684 #define put_rep_prefix(reps_completed) ({ \
685 if ( rep_prefix ) \
686 __put_rep_prefix(&_regs, ctxt->regs, ad_bytes, reps_completed); \
687 })
689 /* Clip maximum repetitions so that the index register only just wraps. */
690 #define truncate_ea_and_reps(ea, reps, bytes_per_rep) ({ \
691 unsigned long __todo = (ctxt->regs->eflags & EF_DF) ? (ea) : ~(ea); \
692 __todo = truncate_word(__todo, ad_bytes); \
693 __todo = (__todo / (bytes_per_rep)) + 1; \
694 (reps) = (__todo < (reps)) ? __todo : (reps); \
695 truncate_word((ea), ad_bytes); \
696 })
698 /* Compatibility function: read guest memory, zero-extend result to a ulong. */
699 static int read_ulong(
700 enum x86_segment seg,
701 unsigned long offset,
702 unsigned long *val,
703 unsigned int bytes,
704 struct x86_emulate_ctxt *ctxt,
705 struct x86_emulate_ops *ops)
706 {
707 *val = 0;
708 return ops->read(seg, offset, val, bytes, ctxt);
709 }
711 /*
712 * Unsigned multiplication with double-word result.
713 * IN: Multiplicand=m[0], Multiplier=m[1]
714 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
715 */
716 static int mul_dbl(unsigned long m[2])
717 {
718 int rc;
719 asm ( "mul %4; seto %b2"
720 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
721 : "0" (m[0]), "1" (m[1]), "2" (0) );
722 return rc;
723 }
725 /*
726 * Signed multiplication with double-word result.
727 * IN: Multiplicand=m[0], Multiplier=m[1]
728 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
729 */
730 static int imul_dbl(unsigned long m[2])
731 {
732 int rc;
733 asm ( "imul %4; seto %b2"
734 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
735 : "0" (m[0]), "1" (m[1]), "2" (0) );
736 return rc;
737 }
739 /*
740 * Unsigned division of double-word dividend.
741 * IN: Dividend=u[1]:u[0], Divisor=v
742 * OUT: Return 1: #DE
743 * Return 0: Quotient=u[0], Remainder=u[1]
744 */
745 static int div_dbl(unsigned long u[2], unsigned long v)
746 {
747 if ( (v == 0) || (u[1] >= v) )
748 return 1;
749 asm ( "div %4"
750 : "=a" (u[0]), "=d" (u[1])
751 : "0" (u[0]), "1" (u[1]), "r" (v) );
752 return 0;
753 }
755 /*
756 * Signed division of double-word dividend.
757 * IN: Dividend=u[1]:u[0], Divisor=v
758 * OUT: Return 1: #DE
759 * Return 0: Quotient=u[0], Remainder=u[1]
760 * NB. We don't use idiv directly as it's moderately hard to work out
761 * ahead of time whether it will #DE, which we cannot allow to happen.
762 */
763 static int idiv_dbl(unsigned long u[2], unsigned long v)
764 {
765 int negu = (long)u[1] < 0, negv = (long)v < 0;
767 /* u = abs(u) */
768 if ( negu )
769 {
770 u[1] = ~u[1];
771 if ( (u[0] = -u[0]) == 0 )
772 u[1]++;
773 }
775 /* abs(u) / abs(v) */
776 if ( div_dbl(u, negv ? -v : v) )
777 return 1;
779 /* Remainder has same sign as dividend. It cannot overflow. */
780 if ( negu )
781 u[1] = -u[1];
783 /* Quotient is overflowed if sign bit is set. */
784 if ( negu ^ negv )
785 {
786 if ( (long)u[0] >= 0 )
787 u[0] = -u[0];
788 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
789 return 1;
790 }
791 else if ( (long)u[0] < 0 )
792 return 1;
794 return 0;
795 }
797 static int
798 test_cc(
799 unsigned int condition, unsigned int flags)
800 {
801 int rc = 0;
803 switch ( (condition & 15) >> 1 )
804 {
805 case 0: /* o */
806 rc |= (flags & EFLG_OF);
807 break;
808 case 1: /* b/c/nae */
809 rc |= (flags & EFLG_CF);
810 break;
811 case 2: /* z/e */
812 rc |= (flags & EFLG_ZF);
813 break;
814 case 3: /* be/na */
815 rc |= (flags & (EFLG_CF|EFLG_ZF));
816 break;
817 case 4: /* s */
818 rc |= (flags & EFLG_SF);
819 break;
820 case 5: /* p/pe */
821 rc |= (flags & EFLG_PF);
822 break;
823 case 7: /* le/ng */
824 rc |= (flags & EFLG_ZF);
825 /* fall through */
826 case 6: /* l/nge */
827 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
828 break;
829 }
831 /* Odd condition identifiers (lsb == 1) have inverted sense. */
832 return (!!rc ^ (condition & 1));
833 }
835 static int
836 get_cpl(
837 struct x86_emulate_ctxt *ctxt,
838 struct x86_emulate_ops *ops)
839 {
840 struct segment_register reg;
842 if ( ctxt->regs->eflags & EFLG_VM )
843 return 3;
845 if ( (ops->read_segment == NULL) ||
846 ops->read_segment(x86_seg_ss, &reg, ctxt) )
847 return -1;
849 return reg.attr.fields.dpl;
850 }
852 static int
853 _mode_iopl(
854 struct x86_emulate_ctxt *ctxt,
855 struct x86_emulate_ops *ops)
856 {
857 int cpl = get_cpl(ctxt, ops);
858 if ( cpl == -1 )
859 return -1;
860 return (cpl <= ((ctxt->regs->eflags >> 12) & 3));
861 }
863 #define mode_ring0() ({ \
864 int _cpl = get_cpl(ctxt, ops); \
865 fail_if(_cpl < 0); \
866 (_cpl == 0); \
867 })
868 #define mode_iopl() ({ \
869 int _iopl = _mode_iopl(ctxt, ops); \
870 fail_if(_iopl < 0); \
871 _iopl; \
872 })
874 static int ioport_access_check(
875 unsigned int first_port,
876 unsigned int bytes,
877 struct x86_emulate_ctxt *ctxt,
878 struct x86_emulate_ops *ops)
879 {
880 unsigned long iobmp;
881 struct segment_register tr;
882 int rc = X86EMUL_OKAY;
884 if ( !(ctxt->regs->eflags & EFLG_VM) && mode_iopl() )
885 return X86EMUL_OKAY;
887 fail_if(ops->read_segment == NULL);
888 if ( (rc = ops->read_segment(x86_seg_tr, &tr, ctxt)) != 0 )
889 return rc;
891 /* Ensure that the TSS is valid and has an io-bitmap-offset field. */
892 if ( !tr.attr.fields.p ||
893 ((tr.attr.fields.type & 0xd) != 0x9) ||
894 (tr.limit < 0x67) )
895 goto raise_exception;
897 if ( (rc = read_ulong(x86_seg_none, tr.base + 0x66,
898 &iobmp, 2, ctxt, ops)) )
899 return rc;
901 /* Ensure TSS includes two bytes including byte containing first port. */
902 iobmp += first_port / 8;
903 if ( tr.limit <= iobmp )
904 goto raise_exception;
906 if ( (rc = read_ulong(x86_seg_none, tr.base + iobmp,
907 &iobmp, 2, ctxt, ops)) )
908 return rc;
909 if ( (iobmp & (((1<<bytes)-1) << (first_port&7))) != 0 )
910 goto raise_exception;
912 done:
913 return rc;
915 raise_exception:
916 fail_if(ops->inject_hw_exception == NULL);
917 return ops->inject_hw_exception(EXC_GP, 0, ctxt) ? : X86EMUL_EXCEPTION;
918 }
920 static int
921 in_realmode(
922 struct x86_emulate_ctxt *ctxt,
923 struct x86_emulate_ops *ops)
924 {
925 unsigned long cr0;
926 int rc;
928 if ( ops->read_cr == NULL )
929 return 0;
931 rc = ops->read_cr(0, &cr0, ctxt);
932 return (!rc && !(cr0 & CR0_PE));
933 }
935 static int
936 in_protmode(
937 struct x86_emulate_ctxt *ctxt,
938 struct x86_emulate_ops *ops)
939 {
940 return !(in_realmode(ctxt, ops) || (ctxt->regs->eflags & EFLG_VM));
941 }
943 static int
944 realmode_load_seg(
945 enum x86_segment seg,
946 uint16_t sel,
947 struct x86_emulate_ctxt *ctxt,
948 struct x86_emulate_ops *ops)
949 {
950 struct segment_register reg;
951 int rc;
953 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
954 return rc;
956 reg.sel = sel;
957 reg.base = (uint32_t)sel << 4;
959 return ops->write_segment(seg, &reg, ctxt);
960 }
962 static int
963 protmode_load_seg(
964 enum x86_segment seg,
965 uint16_t sel,
966 struct x86_emulate_ctxt *ctxt,
967 struct x86_emulate_ops *ops)
968 {
969 struct segment_register desctab, ss, segr;
970 struct { uint32_t a, b; } desc;
971 unsigned long val;
972 uint8_t dpl, rpl, cpl;
973 uint32_t new_desc_b;
974 int rc, fault_type = EXC_TS;
976 /* NULL selector? */
977 if ( (sel & 0xfffc) == 0 )
978 {
979 if ( (seg == x86_seg_cs) || (seg == x86_seg_ss) )
980 goto raise_exn;
981 memset(&segr, 0, sizeof(segr));
982 return ops->write_segment(seg, &segr, ctxt);
983 }
985 /* LDT descriptor must be in the GDT. */
986 if ( (seg == x86_seg_ldtr) && (sel & 4) )
987 goto raise_exn;
989 if ( (rc = ops->read_segment(x86_seg_ss, &ss, ctxt)) ||
990 (rc = ops->read_segment((sel & 4) ? x86_seg_ldtr : x86_seg_gdtr,
991 &desctab, ctxt)) )
992 return rc;
994 /* Check against descriptor table limit. */
995 if ( ((sel & 0xfff8) + 7) > desctab.limit )
996 goto raise_exn;
998 do {
999 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8),
1000 &val, 4, ctxt, ops)) )
1001 return rc;
1002 desc.a = val;
1003 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1004 &val, 4, ctxt, ops)) )
1005 return rc;
1006 desc.b = val;
1008 /* Segment present in memory? */
1009 if ( !(desc.b & (1u<<15)) )
1011 fault_type = EXC_NP;
1012 goto raise_exn;
1015 /* LDT descriptor is a system segment. All others are code/data. */
1016 if ( (desc.b & (1u<<12)) == ((seg == x86_seg_ldtr) << 12) )
1017 goto raise_exn;
1019 dpl = (desc.b >> 13) & 3;
1020 rpl = sel & 3;
1021 cpl = ss.attr.fields.dpl;
1023 switch ( seg )
1025 case x86_seg_cs:
1026 /* Code segment? */
1027 if ( !(desc.b & (1u<<11)) )
1028 goto raise_exn;
1029 /* Non-conforming segment: check DPL against RPL. */
1030 if ( ((desc.b & (6u<<9)) != (6u<<9)) && (dpl != rpl) )
1031 goto raise_exn;
1032 break;
1033 case x86_seg_ss:
1034 /* Writable data segment? */
1035 if ( (desc.b & (5u<<9)) != (1u<<9) )
1036 goto raise_exn;
1037 if ( (dpl != cpl) || (dpl != rpl) )
1038 goto raise_exn;
1039 break;
1040 case x86_seg_ldtr:
1041 /* LDT system segment? */
1042 if ( (desc.b & (15u<<8)) != (2u<<8) )
1043 goto raise_exn;
1044 goto skip_accessed_flag;
1045 default:
1046 /* Readable code or data segment? */
1047 if ( (desc.b & (5u<<9)) == (4u<<9) )
1048 goto raise_exn;
1049 /* Non-conforming segment: check DPL against RPL and CPL. */
1050 if ( ((desc.b & (6u<<9)) != (6u<<9)) &&
1051 ((dpl < cpl) || (dpl < rpl)) )
1052 goto raise_exn;
1053 break;
1056 /* Ensure Accessed flag is set. */
1057 new_desc_b = desc.b | 0x100;
1058 rc = ((desc.b & 0x100) ? X86EMUL_OKAY :
1059 ops->cmpxchg(
1060 x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1061 &desc.b, &new_desc_b, 4, ctxt));
1062 } while ( rc == X86EMUL_CMPXCHG_FAILED );
1064 if ( rc )
1065 return rc;
1067 /* Force the Accessed flag in our local copy. */
1068 desc.b |= 0x100;
1070 skip_accessed_flag:
1071 segr.base = (((desc.b << 0) & 0xff000000u) |
1072 ((desc.b << 16) & 0x00ff0000u) |
1073 ((desc.a >> 16) & 0x0000ffffu));
1074 segr.attr.bytes = (((desc.b >> 8) & 0x00ffu) |
1075 ((desc.b >> 12) & 0x0f00u));
1076 segr.limit = (desc.b & 0x000f0000u) | (desc.a & 0x0000ffffu);
1077 if ( segr.attr.fields.g )
1078 segr.limit = (segr.limit << 12) | 0xfffu;
1079 segr.sel = sel;
1080 return ops->write_segment(seg, &segr, ctxt);
1082 raise_exn:
1083 if ( ops->inject_hw_exception == NULL )
1084 return X86EMUL_UNHANDLEABLE;
1085 if ( (rc = ops->inject_hw_exception(fault_type, sel & 0xfffc, ctxt)) )
1086 return rc;
1087 return X86EMUL_EXCEPTION;
1090 static int
1091 load_seg(
1092 enum x86_segment seg,
1093 uint16_t sel,
1094 struct x86_emulate_ctxt *ctxt,
1095 struct x86_emulate_ops *ops)
1097 if ( (ops->read_segment == NULL) ||
1098 (ops->write_segment == NULL) )
1099 return X86EMUL_UNHANDLEABLE;
1101 if ( in_protmode(ctxt, ops) )
1102 return protmode_load_seg(seg, sel, ctxt, ops);
1104 return realmode_load_seg(seg, sel, ctxt, ops);
1107 void *
1108 decode_register(
1109 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
1111 void *p;
1113 switch ( modrm_reg )
1115 case 0: p = &regs->eax; break;
1116 case 1: p = &regs->ecx; break;
1117 case 2: p = &regs->edx; break;
1118 case 3: p = &regs->ebx; break;
1119 case 4: p = (highbyte_regs ?
1120 ((unsigned char *)&regs->eax + 1) :
1121 (unsigned char *)&regs->esp); break;
1122 case 5: p = (highbyte_regs ?
1123 ((unsigned char *)&regs->ecx + 1) :
1124 (unsigned char *)&regs->ebp); break;
1125 case 6: p = (highbyte_regs ?
1126 ((unsigned char *)&regs->edx + 1) :
1127 (unsigned char *)&regs->esi); break;
1128 case 7: p = (highbyte_regs ?
1129 ((unsigned char *)&regs->ebx + 1) :
1130 (unsigned char *)&regs->edi); break;
1131 #if defined(__x86_64__)
1132 case 8: p = &regs->r8; break;
1133 case 9: p = &regs->r9; break;
1134 case 10: p = &regs->r10; break;
1135 case 11: p = &regs->r11; break;
1136 case 12: p = &regs->r12; break;
1137 case 13: p = &regs->r13; break;
1138 case 14: p = &regs->r14; break;
1139 case 15: p = &regs->r15; break;
1140 #endif
1141 default: p = NULL; break;
1144 return p;
1147 #define decode_segment_failed x86_seg_tr
1148 enum x86_segment
1149 decode_segment(
1150 uint8_t modrm_reg)
1152 switch ( modrm_reg )
1154 case 0: return x86_seg_es;
1155 case 1: return x86_seg_cs;
1156 case 2: return x86_seg_ss;
1157 case 3: return x86_seg_ds;
1158 case 4: return x86_seg_fs;
1159 case 5: return x86_seg_gs;
1160 default: break;
1162 return decode_segment_failed;
1165 int
1166 x86_emulate(
1167 struct x86_emulate_ctxt *ctxt,
1168 struct x86_emulate_ops *ops)
1170 /* Shadow copy of register state. Committed on successful emulation. */
1171 struct cpu_user_regs _regs = *ctxt->regs;
1173 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
1174 uint8_t modrm = 0, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
1175 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
1176 #define REPE_PREFIX 1
1177 #define REPNE_PREFIX 2
1178 unsigned int lock_prefix = 0, rep_prefix = 0;
1179 int override_seg = -1, rc = X86EMUL_OKAY;
1180 struct operand src, dst;
1182 /*
1183 * Data operand effective address (usually computed from ModRM).
1184 * Default is a memory operand relative to segment DS.
1185 */
1186 struct operand ea = { .type = OP_MEM };
1187 ea.mem.seg = x86_seg_ds; /* gcc may reject anon union initializer */
1189 ctxt->retire.byte = 0;
1191 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
1192 if ( op_bytes == 8 )
1194 op_bytes = def_op_bytes = 4;
1195 #ifndef __x86_64__
1196 return X86EMUL_UNHANDLEABLE;
1197 #endif
1200 /* Prefix bytes. */
1201 for ( ; ; )
1203 switch ( b = insn_fetch_type(uint8_t) )
1205 case 0x66: /* operand-size override */
1206 op_bytes = def_op_bytes ^ 6;
1207 break;
1208 case 0x67: /* address-size override */
1209 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
1210 break;
1211 case 0x2e: /* CS override */
1212 override_seg = x86_seg_cs;
1213 break;
1214 case 0x3e: /* DS override */
1215 override_seg = x86_seg_ds;
1216 break;
1217 case 0x26: /* ES override */
1218 override_seg = x86_seg_es;
1219 break;
1220 case 0x64: /* FS override */
1221 override_seg = x86_seg_fs;
1222 break;
1223 case 0x65: /* GS override */
1224 override_seg = x86_seg_gs;
1225 break;
1226 case 0x36: /* SS override */
1227 override_seg = x86_seg_ss;
1228 break;
1229 case 0xf0: /* LOCK */
1230 lock_prefix = 1;
1231 break;
1232 case 0xf2: /* REPNE/REPNZ */
1233 rep_prefix = REPNE_PREFIX;
1234 break;
1235 case 0xf3: /* REP/REPE/REPZ */
1236 rep_prefix = REPE_PREFIX;
1237 break;
1238 case 0x40 ... 0x4f: /* REX */
1239 if ( !mode_64bit() )
1240 goto done_prefixes;
1241 rex_prefix = b;
1242 continue;
1243 default:
1244 goto done_prefixes;
1247 /* Any legacy prefix after a REX prefix nullifies its effect. */
1248 rex_prefix = 0;
1250 done_prefixes:
1252 if ( rex_prefix & 8 ) /* REX.W */
1253 op_bytes = 8;
1255 /* Opcode byte(s). */
1256 d = opcode_table[b];
1257 if ( d == 0 )
1259 /* Two-byte opcode? */
1260 if ( b == 0x0f )
1262 twobyte = 1;
1263 b = insn_fetch_type(uint8_t);
1264 d = twobyte_table[b];
1267 /* Unrecognised? */
1268 if ( d == 0 )
1269 goto cannot_emulate;
1272 /* Lock prefix is allowed only on RMW instructions. */
1273 generate_exception_if((d & Mov) && lock_prefix, EXC_GP, 0);
1275 /* ModRM and SIB bytes. */
1276 if ( d & ModRM )
1278 modrm = insn_fetch_type(uint8_t);
1279 modrm_mod = (modrm & 0xc0) >> 6;
1280 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
1281 modrm_rm = modrm & 0x07;
1283 if ( modrm_mod == 3 )
1285 modrm_rm |= (rex_prefix & 1) << 3;
1286 ea.type = OP_REG;
1287 ea.reg = decode_register(
1288 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
1290 else if ( ad_bytes == 2 )
1292 /* 16-bit ModR/M decode. */
1293 switch ( modrm_rm )
1295 case 0:
1296 ea.mem.off = _regs.ebx + _regs.esi;
1297 break;
1298 case 1:
1299 ea.mem.off = _regs.ebx + _regs.edi;
1300 break;
1301 case 2:
1302 ea.mem.seg = x86_seg_ss;
1303 ea.mem.off = _regs.ebp + _regs.esi;
1304 break;
1305 case 3:
1306 ea.mem.seg = x86_seg_ss;
1307 ea.mem.off = _regs.ebp + _regs.edi;
1308 break;
1309 case 4:
1310 ea.mem.off = _regs.esi;
1311 break;
1312 case 5:
1313 ea.mem.off = _regs.edi;
1314 break;
1315 case 6:
1316 if ( modrm_mod == 0 )
1317 break;
1318 ea.mem.seg = x86_seg_ss;
1319 ea.mem.off = _regs.ebp;
1320 break;
1321 case 7:
1322 ea.mem.off = _regs.ebx;
1323 break;
1325 switch ( modrm_mod )
1327 case 0:
1328 if ( modrm_rm == 6 )
1329 ea.mem.off = insn_fetch_type(int16_t);
1330 break;
1331 case 1:
1332 ea.mem.off += insn_fetch_type(int8_t);
1333 break;
1334 case 2:
1335 ea.mem.off += insn_fetch_type(int16_t);
1336 break;
1338 ea.mem.off = truncate_ea(ea.mem.off);
1340 else
1342 /* 32/64-bit ModR/M decode. */
1343 if ( modrm_rm == 4 )
1345 sib = insn_fetch_type(uint8_t);
1346 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1347 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1348 if ( sib_index != 4 )
1349 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1350 ea.mem.off <<= (sib >> 6) & 3;
1351 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1352 ea.mem.off += insn_fetch_type(int32_t);
1353 else if ( sib_base == 4 )
1355 ea.mem.seg = x86_seg_ss;
1356 ea.mem.off += _regs.esp;
1357 if ( !twobyte && (b == 0x8f) )
1358 /* POP <rm> computes its EA post increment. */
1359 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1360 ? 8 : op_bytes);
1362 else if ( sib_base == 5 )
1364 ea.mem.seg = x86_seg_ss;
1365 ea.mem.off += _regs.ebp;
1367 else
1368 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1370 else
1372 modrm_rm |= (rex_prefix & 1) << 3;
1373 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1374 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1375 ea.mem.seg = x86_seg_ss;
1377 switch ( modrm_mod )
1379 case 0:
1380 if ( (modrm_rm & 7) != 5 )
1381 break;
1382 ea.mem.off = insn_fetch_type(int32_t);
1383 if ( !mode_64bit() )
1384 break;
1385 /* Relative to RIP of next instruction. Argh! */
1386 ea.mem.off += _regs.eip;
1387 if ( (d & SrcMask) == SrcImm )
1388 ea.mem.off += (d & ByteOp) ? 1 :
1389 ((op_bytes == 8) ? 4 : op_bytes);
1390 else if ( (d & SrcMask) == SrcImmByte )
1391 ea.mem.off += 1;
1392 else if ( !twobyte && ((b & 0xfe) == 0xf6) &&
1393 ((modrm_reg & 7) <= 1) )
1394 /* Special case in Grp3: test has immediate operand. */
1395 ea.mem.off += (d & ByteOp) ? 1
1396 : ((op_bytes == 8) ? 4 : op_bytes);
1397 else if ( twobyte && ((b & 0xf7) == 0xa4) )
1398 /* SHLD/SHRD with immediate byte third operand. */
1399 ea.mem.off++;
1400 break;
1401 case 1:
1402 ea.mem.off += insn_fetch_type(int8_t);
1403 break;
1404 case 2:
1405 ea.mem.off += insn_fetch_type(int32_t);
1406 break;
1408 ea.mem.off = truncate_ea(ea.mem.off);
1412 if ( override_seg != -1 )
1413 ea.mem.seg = override_seg;
1415 /* Decode and fetch the source operand: register, memory or immediate. */
1416 switch ( d & SrcMask )
1418 case SrcNone: /* case SrcImplicit: */
1419 src.type = OP_NONE;
1420 break;
1421 case SrcReg:
1422 src.type = OP_REG;
1423 if ( d & ByteOp )
1425 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1426 src.val = *(uint8_t *)src.reg;
1427 src.bytes = 1;
1429 else
1431 src.reg = decode_register(modrm_reg, &_regs, 0);
1432 switch ( (src.bytes = op_bytes) )
1434 case 2: src.val = *(uint16_t *)src.reg; break;
1435 case 4: src.val = *(uint32_t *)src.reg; break;
1436 case 8: src.val = *(uint64_t *)src.reg; break;
1439 break;
1440 case SrcMem16:
1441 ea.bytes = 2;
1442 goto srcmem_common;
1443 case SrcMem:
1444 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1445 srcmem_common:
1446 src = ea;
1447 if ( src.type == OP_REG )
1449 switch ( src.bytes )
1451 case 1: src.val = *(uint8_t *)src.reg; break;
1452 case 2: src.val = *(uint16_t *)src.reg; break;
1453 case 4: src.val = *(uint32_t *)src.reg; break;
1454 case 8: src.val = *(uint64_t *)src.reg; break;
1457 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1458 &src.val, src.bytes, ctxt, ops)) )
1459 goto done;
1460 break;
1461 case SrcImm:
1462 src.type = OP_IMM;
1463 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1464 if ( src.bytes == 8 ) src.bytes = 4;
1465 /* NB. Immediates are sign-extended as necessary. */
1466 switch ( src.bytes )
1468 case 1: src.val = insn_fetch_type(int8_t); break;
1469 case 2: src.val = insn_fetch_type(int16_t); break;
1470 case 4: src.val = insn_fetch_type(int32_t); break;
1472 break;
1473 case SrcImmByte:
1474 src.type = OP_IMM;
1475 src.bytes = 1;
1476 src.val = insn_fetch_type(int8_t);
1477 break;
1480 /* Decode and fetch the destination operand: register or memory. */
1481 switch ( d & DstMask )
1483 case DstNone: /* case DstImplicit: */
1484 /*
1485 * The only implicit-operands instructions allowed a LOCK prefix are
1486 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1487 */
1488 generate_exception_if(
1489 lock_prefix &&
1490 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1491 (b != 0xc7), /* CMPXCHG{8,16}B */
1492 EXC_GP, 0);
1493 dst.type = OP_NONE;
1494 break;
1496 case DstReg:
1497 generate_exception_if(lock_prefix, EXC_GP, 0);
1498 dst.type = OP_REG;
1499 if ( d & ByteOp )
1501 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1502 dst.val = *(uint8_t *)dst.reg;
1503 dst.bytes = 1;
1505 else
1507 dst.reg = decode_register(modrm_reg, &_regs, 0);
1508 switch ( (dst.bytes = op_bytes) )
1510 case 2: dst.val = *(uint16_t *)dst.reg; break;
1511 case 4: dst.val = *(uint32_t *)dst.reg; break;
1512 case 8: dst.val = *(uint64_t *)dst.reg; break;
1515 break;
1516 case DstBitBase:
1517 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1519 src.val &= (op_bytes << 3) - 1;
1521 else
1523 /*
1524 * EA += BitOffset DIV op_bytes*8
1525 * BitOffset = BitOffset MOD op_bytes*8
1526 * DIV truncates towards negative infinity.
1527 * MOD always produces a positive result.
1528 */
1529 if ( op_bytes == 2 )
1530 src.val = (int16_t)src.val;
1531 else if ( op_bytes == 4 )
1532 src.val = (int32_t)src.val;
1533 if ( (long)src.val < 0 )
1535 unsigned long byte_offset;
1536 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1537 ea.mem.off -= byte_offset;
1538 src.val = (byte_offset << 3) + src.val;
1540 else
1542 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1543 src.val &= (op_bytes << 3) - 1;
1546 /* Becomes a normal DstMem operation from here on. */
1547 d = (d & ~DstMask) | DstMem;
1548 case DstMem:
1549 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1550 dst = ea;
1551 if ( dst.type == OP_REG )
1553 generate_exception_if(lock_prefix, EXC_GP, 0);
1554 switch ( dst.bytes )
1556 case 1: dst.val = *(uint8_t *)dst.reg; break;
1557 case 2: dst.val = *(uint16_t *)dst.reg; break;
1558 case 4: dst.val = *(uint32_t *)dst.reg; break;
1559 case 8: dst.val = *(uint64_t *)dst.reg; break;
1562 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1564 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
1565 &dst.val, dst.bytes, ctxt, ops)) )
1566 goto done;
1567 dst.orig_val = dst.val;
1569 break;
1572 if ( twobyte )
1573 goto twobyte_insn;
1575 switch ( b )
1577 case 0x04 ... 0x05: /* add imm,%%eax */
1578 dst.reg = (unsigned long *)&_regs.eax;
1579 dst.val = _regs.eax;
1580 case 0x00 ... 0x03: add: /* add */
1581 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1582 break;
1584 case 0x0c ... 0x0d: /* or imm,%%eax */
1585 dst.reg = (unsigned long *)&_regs.eax;
1586 dst.val = _regs.eax;
1587 case 0x08 ... 0x0b: or: /* or */
1588 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1589 break;
1591 case 0x14 ... 0x15: /* adc imm,%%eax */
1592 dst.reg = (unsigned long *)&_regs.eax;
1593 dst.val = _regs.eax;
1594 case 0x10 ... 0x13: adc: /* adc */
1595 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1596 break;
1598 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1599 dst.reg = (unsigned long *)&_regs.eax;
1600 dst.val = _regs.eax;
1601 case 0x18 ... 0x1b: sbb: /* sbb */
1602 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1603 break;
1605 case 0x24 ... 0x25: /* and imm,%%eax */
1606 dst.reg = (unsigned long *)&_regs.eax;
1607 dst.val = _regs.eax;
1608 case 0x20 ... 0x23: and: /* and */
1609 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1610 break;
1612 case 0x2c ... 0x2d: /* sub imm,%%eax */
1613 dst.reg = (unsigned long *)&_regs.eax;
1614 dst.val = _regs.eax;
1615 case 0x28 ... 0x2b: sub: /* sub */
1616 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1617 break;
1619 case 0x34 ... 0x35: /* xor imm,%%eax */
1620 dst.reg = (unsigned long *)&_regs.eax;
1621 dst.val = _regs.eax;
1622 case 0x30 ... 0x33: xor: /* xor */
1623 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1624 break;
1626 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1627 dst.reg = (unsigned long *)&_regs.eax;
1628 dst.val = _regs.eax;
1629 case 0x38 ... 0x3b: cmp: /* cmp */
1630 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1631 dst.type = OP_NONE;
1632 break;
1634 case 0x06: /* push %%es */ {
1635 struct segment_register reg;
1636 src.val = x86_seg_es;
1637 push_seg:
1638 fail_if(ops->read_segment == NULL);
1639 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
1640 return rc;
1641 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1642 if ( mode_64bit() && (op_bytes == 4) )
1643 op_bytes = 8;
1644 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1645 &reg.sel, op_bytes, ctxt)) != 0 )
1646 goto done;
1647 break;
1650 case 0x07: /* pop %%es */
1651 src.val = x86_seg_es;
1652 pop_seg:
1653 fail_if(ops->write_segment == NULL);
1654 /* 64-bit mode: POP defaults to a 64-bit operand. */
1655 if ( mode_64bit() && (op_bytes == 4) )
1656 op_bytes = 8;
1657 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
1658 &dst.val, op_bytes, ctxt, ops)) != 0 )
1659 goto done;
1660 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
1661 return rc;
1662 break;
1664 case 0x0e: /* push %%cs */
1665 src.val = x86_seg_cs;
1666 goto push_seg;
1668 case 0x16: /* push %%ss */
1669 src.val = x86_seg_ss;
1670 goto push_seg;
1672 case 0x17: /* pop %%ss */
1673 src.val = x86_seg_ss;
1674 ctxt->retire.flags.mov_ss = 1;
1675 goto pop_seg;
1677 case 0x1e: /* push %%ds */
1678 src.val = x86_seg_ds;
1679 goto push_seg;
1681 case 0x1f: /* pop %%ds */
1682 src.val = x86_seg_ds;
1683 goto pop_seg;
1685 case 0x27: /* daa */ {
1686 uint8_t al = _regs.eax;
1687 unsigned long eflags = _regs.eflags;
1688 generate_exception_if(mode_64bit(), EXC_UD, -1);
1689 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1690 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1692 *(uint8_t *)&_regs.eax += 6;
1693 _regs.eflags |= EFLG_AF;
1695 if ( (al > 0x99) || (eflags & EFLG_CF) )
1697 *(uint8_t *)&_regs.eax += 0x60;
1698 _regs.eflags |= EFLG_CF;
1700 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1701 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1702 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1703 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1704 break;
1707 case 0x2f: /* das */ {
1708 uint8_t al = _regs.eax;
1709 unsigned long eflags = _regs.eflags;
1710 generate_exception_if(mode_64bit(), EXC_UD, -1);
1711 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1712 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1714 _regs.eflags |= EFLG_AF;
1715 if ( (al < 6) || (eflags & EFLG_CF) )
1716 _regs.eflags |= EFLG_CF;
1717 *(uint8_t *)&_regs.eax -= 6;
1719 if ( (al > 0x99) || (eflags & EFLG_CF) )
1721 *(uint8_t *)&_regs.eax -= 0x60;
1722 _regs.eflags |= EFLG_CF;
1724 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1725 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1726 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1727 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1728 break;
1731 case 0x37: /* aaa */
1732 case 0x3f: /* aas */
1733 generate_exception_if(mode_64bit(), EXC_UD, -1);
1734 _regs.eflags &= ~EFLG_CF;
1735 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1737 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1738 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1739 _regs.eflags |= EFLG_CF | EFLG_AF;
1741 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1742 break;
1744 case 0x40 ... 0x4f: /* inc/dec reg */
1745 dst.type = OP_REG;
1746 dst.reg = decode_register(b & 7, &_regs, 0);
1747 dst.bytes = op_bytes;
1748 dst.val = *dst.reg;
1749 if ( b & 8 )
1750 emulate_1op("dec", dst, _regs.eflags);
1751 else
1752 emulate_1op("inc", dst, _regs.eflags);
1753 break;
1755 case 0x50 ... 0x57: /* push reg */
1756 src.val = *(unsigned long *)decode_register(
1757 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1758 goto push;
1760 case 0x58 ... 0x5f: /* pop reg */
1761 dst.type = OP_REG;
1762 dst.reg = decode_register(
1763 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1764 dst.bytes = op_bytes;
1765 if ( mode_64bit() && (dst.bytes == 4) )
1766 dst.bytes = 8;
1767 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
1768 &dst.val, dst.bytes, ctxt, ops)) != 0 )
1769 goto done;
1770 break;
1772 case 0x60: /* pusha */ {
1773 int i;
1774 unsigned long regs[] = {
1775 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1776 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1777 generate_exception_if(mode_64bit(), EXC_UD, -1);
1778 for ( i = 0; i < 8; i++ )
1779 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1780 &regs[i], op_bytes, ctxt)) != 0 )
1781 goto done;
1782 break;
1785 case 0x61: /* popa */ {
1786 int i;
1787 unsigned long dummy_esp, *regs[] = {
1788 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
1789 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
1790 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
1791 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
1792 generate_exception_if(mode_64bit(), EXC_UD, -1);
1793 for ( i = 0; i < 8; i++ )
1795 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
1796 &dst.val, op_bytes, ctxt, ops)) != 0 )
1797 goto done;
1798 switch ( op_bytes )
1800 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
1801 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
1802 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
1803 case 8: *regs[i] = dst.val; break;
1806 break;
1809 case 0x62: /* bound */ {
1810 unsigned long src_val2;
1811 int lb, ub, idx;
1812 generate_exception_if(mode_64bit() || (src.type != OP_MEM),
1813 EXC_UD, -1);
1814 if ( (rc = read_ulong(src.mem.seg, src.mem.off + op_bytes,
1815 &src_val2, op_bytes, ctxt, ops)) )
1816 goto done;
1817 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1818 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1819 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1820 generate_exception_if((idx < lb) || (idx > ub), EXC_BR, -1);
1821 dst.type = OP_NONE;
1822 break;
1825 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1826 if ( mode_64bit() )
1828 /* movsxd */
1829 if ( src.type == OP_REG )
1830 src.val = *(int32_t *)src.reg;
1831 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1832 &src.val, 4, ctxt, ops)) )
1833 goto done;
1834 dst.val = (int32_t)src.val;
1836 else
1838 /* arpl */
1839 uint16_t src_val = dst.val;
1840 dst = src;
1841 _regs.eflags &= ~EFLG_ZF;
1842 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1843 if ( _regs.eflags & EFLG_ZF )
1844 dst.val = (dst.val & ~3) | (src_val & 3);
1845 else
1846 dst.type = OP_NONE;
1847 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
1849 break;
1851 case 0x68: /* push imm{16,32,64} */
1852 src.val = ((op_bytes == 2)
1853 ? (int32_t)insn_fetch_type(int16_t)
1854 : insn_fetch_type(int32_t));
1855 goto push;
1857 case 0x69: /* imul imm16/32 */
1858 case 0x6b: /* imul imm8 */ {
1859 unsigned long src1; /* ModR/M source operand */
1860 if ( ea.type == OP_REG )
1861 src1 = *ea.reg;
1862 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
1863 &src1, op_bytes, ctxt, ops)) )
1864 goto done;
1865 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1866 switch ( dst.bytes )
1868 case 2:
1869 dst.val = ((uint32_t)(int16_t)src.val *
1870 (uint32_t)(int16_t)src1);
1871 if ( (int16_t)dst.val != (uint32_t)dst.val )
1872 _regs.eflags |= EFLG_OF|EFLG_CF;
1873 break;
1874 #ifdef __x86_64__
1875 case 4:
1876 dst.val = ((uint64_t)(int32_t)src.val *
1877 (uint64_t)(int32_t)src1);
1878 if ( (int32_t)dst.val != dst.val )
1879 _regs.eflags |= EFLG_OF|EFLG_CF;
1880 break;
1881 #endif
1882 default: {
1883 unsigned long m[2] = { src.val, src1 };
1884 if ( imul_dbl(m) )
1885 _regs.eflags |= EFLG_OF|EFLG_CF;
1886 dst.val = m[0];
1887 break;
1890 break;
1893 case 0x6a: /* push imm8 */
1894 src.val = insn_fetch_type(int8_t);
1895 push:
1896 d |= Mov; /* force writeback */
1897 dst.type = OP_MEM;
1898 dst.bytes = op_bytes;
1899 if ( mode_64bit() && (dst.bytes == 4) )
1900 dst.bytes = 8;
1901 dst.val = src.val;
1902 dst.mem.seg = x86_seg_ss;
1903 dst.mem.off = sp_pre_dec(dst.bytes);
1904 break;
1906 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */ {
1907 unsigned long nr_reps = get_rep_prefix();
1908 unsigned int port = (uint16_t)_regs.edx;
1909 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1910 dst.mem.seg = x86_seg_es;
1911 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
1912 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
1913 goto done;
1914 if ( (nr_reps > 1) && (ops->rep_ins != NULL) &&
1915 ((rc = ops->rep_ins(port, dst.mem.seg, dst.mem.off, dst.bytes,
1916 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
1918 if ( rc != 0 )
1919 goto done;
1921 else
1923 fail_if(ops->read_io == NULL);
1924 if ( (rc = ops->read_io(port, dst.bytes, &dst.val, ctxt)) != 0 )
1925 goto done;
1926 dst.type = OP_MEM;
1927 nr_reps = 1;
1929 register_address_increment(
1930 _regs.edi,
1931 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
1932 put_rep_prefix(nr_reps);
1933 break;
1936 case 0x6e ... 0x6f: /* outs %esi,%dx */ {
1937 unsigned long nr_reps = get_rep_prefix();
1938 unsigned int port = (uint16_t)_regs.edx;
1939 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1940 ea.mem.off = truncate_ea_and_reps(_regs.esi, nr_reps, dst.bytes);
1941 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
1942 goto done;
1943 if ( (nr_reps > 1) && (ops->rep_outs != NULL) &&
1944 ((rc = ops->rep_outs(ea.mem.seg, ea.mem.off, port, dst.bytes,
1945 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
1947 if ( rc != 0 )
1948 goto done;
1950 else
1952 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
1953 &dst.val, dst.bytes, ctxt, ops)) != 0 )
1954 goto done;
1955 fail_if(ops->write_io == NULL);
1956 if ( (rc = ops->write_io(port, dst.bytes, dst.val, ctxt)) != 0 )
1957 goto done;
1958 nr_reps = 1;
1960 register_address_increment(
1961 _regs.esi,
1962 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
1963 put_rep_prefix(nr_reps);
1964 break;
1967 case 0x70 ... 0x7f: /* jcc (short) */ {
1968 int rel = insn_fetch_type(int8_t);
1969 if ( test_cc(b, _regs.eflags) )
1970 jmp_rel(rel);
1971 break;
1974 case 0x82: /* Grp1 (x86/32 only) */
1975 generate_exception_if(mode_64bit(), EXC_UD, -1);
1976 case 0x80: case 0x81: case 0x83: /* Grp1 */
1977 switch ( modrm_reg & 7 )
1979 case 0: goto add;
1980 case 1: goto or;
1981 case 2: goto adc;
1982 case 3: goto sbb;
1983 case 4: goto and;
1984 case 5: goto sub;
1985 case 6: goto xor;
1986 case 7: goto cmp;
1988 break;
1990 case 0xa8 ... 0xa9: /* test imm,%%eax */
1991 dst.reg = (unsigned long *)&_regs.eax;
1992 dst.val = _regs.eax;
1993 case 0x84 ... 0x85: test: /* test */
1994 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1995 dst.type = OP_NONE;
1996 break;
1998 case 0x86 ... 0x87: xchg: /* xchg */
1999 /* Write back the register source. */
2000 switch ( dst.bytes )
2002 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2003 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2004 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2005 case 8: *src.reg = dst.val; break;
2007 /* Write back the memory destination with implicit LOCK prefix. */
2008 dst.val = src.val;
2009 lock_prefix = 1;
2010 break;
2012 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
2013 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
2014 case 0x88 ... 0x8b: /* mov */
2015 dst.val = src.val;
2016 break;
2018 case 0x8c: /* mov Sreg,r/m */ {
2019 struct segment_register reg;
2020 enum x86_segment seg = decode_segment(modrm_reg);
2021 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
2022 fail_if(ops->read_segment == NULL);
2023 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
2024 goto done;
2025 dst.val = reg.sel;
2026 if ( dst.type == OP_MEM )
2027 dst.bytes = 2;
2028 break;
2031 case 0x8e: /* mov r/m,Sreg */ {
2032 enum x86_segment seg = decode_segment(modrm_reg);
2033 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
2034 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
2035 goto done;
2036 if ( seg == x86_seg_ss )
2037 ctxt->retire.flags.mov_ss = 1;
2038 dst.type = OP_NONE;
2039 break;
2042 case 0x8d: /* lea */
2043 dst.val = ea.mem.off;
2044 break;
2046 case 0x8f: /* pop (sole member of Grp1a) */
2047 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
2048 /* 64-bit mode: POP defaults to a 64-bit operand. */
2049 if ( mode_64bit() && (dst.bytes == 4) )
2050 dst.bytes = 8;
2051 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2052 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2053 goto done;
2054 break;
2056 case 0x90: /* nop / xchg %%r8,%%rax */
2057 if ( !(rex_prefix & 1) )
2058 break; /* nop */
2060 case 0x91 ... 0x97: /* xchg reg,%%rax */
2061 src.type = dst.type = OP_REG;
2062 src.bytes = dst.bytes = op_bytes;
2063 src.reg = (unsigned long *)&_regs.eax;
2064 src.val = *src.reg;
2065 dst.reg = decode_register(
2066 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2067 dst.val = *dst.reg;
2068 goto xchg;
2070 case 0x98: /* cbw/cwde/cdqe */
2071 switch ( op_bytes )
2073 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2074 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2075 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2077 break;
2079 case 0x99: /* cwd/cdq/cqo */
2080 switch ( op_bytes )
2082 case 2:
2083 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2084 break;
2085 case 4:
2086 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2087 break;
2088 case 8:
2089 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2090 break;
2092 break;
2094 case 0x9a: /* call (far, absolute) */ {
2095 struct segment_register reg;
2096 uint16_t sel;
2097 uint32_t eip;
2099 fail_if(ops->read_segment == NULL);
2100 generate_exception_if(mode_64bit(), EXC_UD, -1);
2102 eip = insn_fetch_bytes(op_bytes);
2103 sel = insn_fetch_type(uint16_t);
2105 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2106 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2107 &reg.sel, op_bytes, ctxt)) ||
2108 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2109 &_regs.eip, op_bytes, ctxt)) )
2110 goto done;
2112 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2113 goto done;
2114 _regs.eip = eip;
2115 break;
2118 case 0x9b: /* wait/fwait */
2119 emulate_fpu_insn("fwait");
2120 break;
2122 case 0x9c: /* pushf */
2123 src.val = _regs.eflags;
2124 goto push;
2126 case 0x9d: /* popf */ {
2127 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2128 if ( !mode_ring0() )
2129 mask |= EFLG_IOPL;
2130 if ( !mode_iopl() )
2131 mask |= EFLG_IF;
2132 /* 64-bit mode: POP defaults to a 64-bit operand. */
2133 if ( mode_64bit() && (op_bytes == 4) )
2134 op_bytes = 8;
2135 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2136 &dst.val, op_bytes, ctxt, ops)) != 0 )
2137 goto done;
2138 if ( op_bytes == 2 )
2139 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2140 dst.val &= 0x257fd5;
2141 _regs.eflags &= mask;
2142 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2143 break;
2146 case 0x9e: /* sahf */
2147 *(uint8_t *)&_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2148 break;
2150 case 0x9f: /* lahf */
2151 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2152 break;
2154 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2155 /* Source EA is not encoded via ModRM. */
2156 dst.type = OP_REG;
2157 dst.reg = (unsigned long *)&_regs.eax;
2158 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2159 if ( (rc = read_ulong(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2160 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2161 goto done;
2162 break;
2164 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2165 /* Destination EA is not encoded via ModRM. */
2166 dst.type = OP_MEM;
2167 dst.mem.seg = ea.mem.seg;
2168 dst.mem.off = insn_fetch_bytes(ad_bytes);
2169 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2170 dst.val = (unsigned long)_regs.eax;
2171 break;
2173 case 0xa4 ... 0xa5: /* movs */ {
2174 unsigned long nr_reps = get_rep_prefix();
2175 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2176 dst.mem.seg = x86_seg_es;
2177 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
2178 if ( (nr_reps > 1) && (ops->rep_movs != NULL) &&
2179 ((rc = ops->rep_movs(ea.mem.seg, truncate_ea(_regs.esi),
2180 dst.mem.seg, dst.mem.off, dst.bytes,
2181 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2183 if ( rc != 0 )
2184 goto done;
2186 else
2188 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2189 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2190 goto done;
2191 dst.type = OP_MEM;
2192 nr_reps = 1;
2194 register_address_increment(
2195 _regs.esi,
2196 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2197 register_address_increment(
2198 _regs.edi,
2199 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2200 put_rep_prefix(nr_reps);
2201 break;
2204 case 0xa6 ... 0xa7: /* cmps */ {
2205 unsigned long next_eip = _regs.eip;
2206 get_rep_prefix();
2207 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2208 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2209 &dst.val, dst.bytes, ctxt, ops)) ||
2210 (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2211 &src.val, src.bytes, ctxt, ops)) )
2212 goto done;
2213 register_address_increment(
2214 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2215 register_address_increment(
2216 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2217 put_rep_prefix(1);
2218 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2219 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2220 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2221 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2222 _regs.eip = next_eip;
2223 break;
2226 case 0xaa ... 0xab: /* stos */ {
2227 /* unsigned long max_reps = */get_rep_prefix();
2228 dst.type = OP_MEM;
2229 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2230 dst.mem.seg = x86_seg_es;
2231 dst.mem.off = truncate_ea(_regs.edi);
2232 dst.val = _regs.eax;
2233 register_address_increment(
2234 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2235 put_rep_prefix(1);
2236 break;
2239 case 0xac ... 0xad: /* lods */ {
2240 /* unsigned long max_reps = */get_rep_prefix();
2241 dst.type = OP_REG;
2242 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2243 dst.reg = (unsigned long *)&_regs.eax;
2244 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2245 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2246 goto done;
2247 register_address_increment(
2248 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2249 put_rep_prefix(1);
2250 break;
2253 case 0xae ... 0xaf: /* scas */ {
2254 unsigned long next_eip = _regs.eip;
2255 get_rep_prefix();
2256 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2257 dst.val = _regs.eax;
2258 if ( (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2259 &src.val, src.bytes, ctxt, ops)) != 0 )
2260 goto done;
2261 register_address_increment(
2262 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2263 put_rep_prefix(1);
2264 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2265 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2266 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2267 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2268 _regs.eip = next_eip;
2269 break;
2272 case 0xb0 ... 0xb7: /* mov imm8,r8 */
2273 dst.reg = decode_register(
2274 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
2275 dst.val = src.val;
2276 break;
2278 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
2279 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
2280 src.val = ((uint32_t)src.val |
2281 ((uint64_t)insn_fetch_type(uint32_t) << 32));
2282 dst.reg = decode_register(
2283 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2284 dst.val = src.val;
2285 break;
2287 case 0xc0 ... 0xc1: grp2: /* Grp2 */
2288 switch ( modrm_reg & 7 )
2290 case 0: /* rol */
2291 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
2292 break;
2293 case 1: /* ror */
2294 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
2295 break;
2296 case 2: /* rcl */
2297 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
2298 break;
2299 case 3: /* rcr */
2300 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
2301 break;
2302 case 4: /* sal/shl */
2303 case 6: /* sal/shl */
2304 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
2305 break;
2306 case 5: /* shr */
2307 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
2308 break;
2309 case 7: /* sar */
2310 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
2311 break;
2313 break;
2315 case 0xc2: /* ret imm16 (near) */
2316 case 0xc3: /* ret (near) */ {
2317 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2318 op_bytes = mode_64bit() ? 8 : op_bytes;
2319 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2320 &dst.val, op_bytes, ctxt, ops)) != 0 )
2321 goto done;
2322 _regs.eip = dst.val;
2323 break;
2326 case 0xc4: /* les */ {
2327 unsigned long sel;
2328 dst.val = x86_seg_es;
2329 les: /* dst.val identifies the segment */
2330 generate_exception_if(src.type != OP_MEM, EXC_UD, -1);
2331 if ( (rc = read_ulong(src.mem.seg, src.mem.off + src.bytes,
2332 &sel, 2, ctxt, ops)) != 0 )
2333 goto done;
2334 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
2335 goto done;
2336 dst.val = src.val;
2337 break;
2340 case 0xc5: /* lds */
2341 dst.val = x86_seg_ds;
2342 goto les;
2344 case 0xc8: /* enter imm16,imm8 */ {
2345 uint16_t size = insn_fetch_type(uint16_t);
2346 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2347 int i;
2349 dst.type = OP_REG;
2350 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2351 dst.reg = (unsigned long *)&_regs.ebp;
2352 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2353 &_regs.ebp, dst.bytes, ctxt)) )
2354 goto done;
2355 dst.val = _regs.esp;
2357 if ( depth > 0 )
2359 for ( i = 1; i < depth; i++ )
2361 unsigned long ebp, temp_data;
2362 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2363 if ( (rc = read_ulong(x86_seg_ss, ebp,
2364 &temp_data, dst.bytes, ctxt, ops)) ||
2365 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2366 &temp_data, dst.bytes, ctxt)) )
2367 goto done;
2369 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2370 &dst.val, dst.bytes, ctxt)) )
2371 goto done;
2374 sp_pre_dec(size);
2375 break;
2378 case 0xc9: /* leave */
2379 /* First writeback, to %%esp. */
2380 dst.type = OP_REG;
2381 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2382 dst.reg = (unsigned long *)&_regs.esp;
2383 dst.val = _regs.ebp;
2385 /* Flush first writeback, since there is a second. */
2386 switch ( dst.bytes )
2388 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2389 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2390 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2391 case 8: *dst.reg = dst.val; break;
2394 /* Second writeback, to %%ebp. */
2395 dst.reg = (unsigned long *)&_regs.ebp;
2396 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2397 &dst.val, dst.bytes, ctxt, ops)) )
2398 goto done;
2399 break;
2401 case 0xca: /* ret imm16 (far) */
2402 case 0xcb: /* ret (far) */ {
2403 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2404 op_bytes = mode_64bit() ? 8 : op_bytes;
2405 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2406 &dst.val, op_bytes, ctxt, ops)) ||
2407 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2408 &src.val, op_bytes, ctxt, ops)) ||
2409 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2410 goto done;
2411 _regs.eip = dst.val;
2412 break;
2415 case 0xcc: /* int3 */
2416 src.val = EXC_BP;
2417 goto swint;
2419 case 0xcd: /* int imm8 */
2420 src.val = insn_fetch_type(uint8_t);
2421 swint:
2422 fail_if(ops->inject_sw_interrupt == NULL);
2423 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2424 ctxt) ? : X86EMUL_EXCEPTION;
2425 goto done;
2427 case 0xce: /* into */
2428 generate_exception_if(mode_64bit(), EXC_UD, -1);
2429 if ( !(_regs.eflags & EFLG_OF) )
2430 break;
2431 src.val = EXC_OF;
2432 goto swint;
2434 case 0xcf: /* iret */ {
2435 unsigned long cs, eip, eflags;
2436 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2437 if ( !mode_ring0() )
2438 mask |= EFLG_IOPL;
2439 if ( !mode_iopl() )
2440 mask |= EFLG_IF;
2441 fail_if(!in_realmode(ctxt, ops));
2442 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2443 &eip, op_bytes, ctxt, ops)) ||
2444 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2445 &cs, op_bytes, ctxt, ops)) ||
2446 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2447 &eflags, op_bytes, ctxt, ops)) )
2448 goto done;
2449 if ( op_bytes == 2 )
2450 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2451 eflags &= 0x257fd5;
2452 _regs.eflags &= mask;
2453 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2454 _regs.eip = eip;
2455 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2456 goto done;
2457 break;
2460 case 0xd0 ... 0xd1: /* Grp2 */
2461 src.val = 1;
2462 goto grp2;
2464 case 0xd2 ... 0xd3: /* Grp2 */
2465 src.val = _regs.ecx;
2466 goto grp2;
2468 case 0xd4: /* aam */ {
2469 unsigned int base = insn_fetch_type(uint8_t);
2470 uint8_t al = _regs.eax;
2471 generate_exception_if(mode_64bit(), EXC_UD, -1);
2472 generate_exception_if(base == 0, EXC_DE, -1);
2473 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2474 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2475 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2476 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2477 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2478 break;
2481 case 0xd5: /* aad */ {
2482 unsigned int base = insn_fetch_type(uint8_t);
2483 uint16_t ax = _regs.eax;
2484 generate_exception_if(mode_64bit(), EXC_UD, -1);
2485 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2486 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2487 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2488 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2489 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2490 break;
2493 case 0xd6: /* salc */
2494 generate_exception_if(mode_64bit(), EXC_UD, -1);
2495 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2496 break;
2498 case 0xd7: /* xlat */ {
2499 unsigned long al = (uint8_t)_regs.eax;
2500 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.ebx + al),
2501 &al, 1, ctxt, ops)) != 0 )
2502 goto done;
2503 *(uint8_t *)&_regs.eax = al;
2504 break;
2507 case 0xd8: /* FPU 0xd8 */
2508 switch ( modrm )
2510 case 0xc0 ... 0xc7: /* fadd %stN,%stN */
2511 case 0xc8 ... 0xcf: /* fmul %stN,%stN */
2512 case 0xd0 ... 0xd7: /* fcom %stN,%stN */
2513 case 0xd8 ... 0xdf: /* fcomp %stN,%stN */
2514 case 0xe0 ... 0xe7: /* fsub %stN,%stN */
2515 case 0xe8 ... 0xef: /* fsubr %stN,%stN */
2516 case 0xf0 ... 0xf7: /* fdiv %stN,%stN */
2517 case 0xf8 ... 0xff: /* fdivr %stN,%stN */
2518 emulate_fpu_insn_stub(0xd8, modrm);
2519 break;
2520 default:
2521 fail_if(modrm >= 0xc0);
2522 ea.bytes = 4;
2523 src = ea;
2524 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2525 src.bytes, ctxt)) != 0 )
2526 goto done;
2527 switch ( modrm_reg & 7 )
2529 case 0: /* fadd */
2530 emulate_fpu_insn_memsrc("fadds", src.val);
2531 break;
2532 case 1: /* fmul */
2533 emulate_fpu_insn_memsrc("fmuls", src.val);
2534 break;
2535 case 2: /* fcom */
2536 emulate_fpu_insn_memsrc("fcoms", src.val);
2537 break;
2538 case 3: /* fcomp */
2539 emulate_fpu_insn_memsrc("fcomps", src.val);
2540 break;
2541 case 4: /* fsub */
2542 emulate_fpu_insn_memsrc("fsubs", src.val);
2543 break;
2544 case 5: /* fsubr */
2545 emulate_fpu_insn_memsrc("fsubrs", src.val);
2546 break;
2547 case 6: /* fdiv */
2548 emulate_fpu_insn_memsrc("fdivs", src.val);
2549 break;
2550 case 7: /* fdivr */
2551 emulate_fpu_insn_memsrc("fdivrs", src.val);
2552 break;
2553 default:
2554 goto cannot_emulate;
2557 break;
2559 case 0xd9: /* FPU 0xd9 */
2560 switch ( modrm )
2562 case 0xc0 ... 0xc7: /* fld %stN */
2563 case 0xc8 ... 0xcf: /* fxch %stN */
2564 case 0xd0: /* fnop */
2565 case 0xe0: /* fchs */
2566 case 0xe1: /* fabs */
2567 case 0xe4: /* ftst */
2568 case 0xe5: /* fxam */
2569 case 0xe8: /* fld1 */
2570 case 0xe9: /* fldl2t */
2571 case 0xea: /* fldl2e */
2572 case 0xeb: /* fldpi */
2573 case 0xec: /* fldlg2 */
2574 case 0xed: /* fldln2 */
2575 case 0xee: /* fldz */
2576 case 0xf0: /* f2xm1 */
2577 case 0xf1: /* fyl2x */
2578 case 0xf2: /* fptan */
2579 case 0xf3: /* fpatan */
2580 case 0xf4: /* fxtract */
2581 case 0xf5: /* fprem1 */
2582 case 0xf6: /* fdecstp */
2583 case 0xf7: /* fincstp */
2584 case 0xf8: /* fprem */
2585 case 0xf9: /* fyl2xp1 */
2586 case 0xfa: /* fsqrt */
2587 case 0xfb: /* fsincos */
2588 case 0xfc: /* frndint */
2589 case 0xfd: /* fscale */
2590 case 0xfe: /* fsin */
2591 case 0xff: /* fcos */
2592 emulate_fpu_insn_stub(0xd9, modrm);
2593 break;
2594 default:
2595 fail_if(modrm >= 0xc0);
2596 switch ( modrm_reg & 7 )
2598 case 0: /* fld m32fp */
2599 ea.bytes = 4;
2600 src = ea;
2601 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &src.val,
2602 src.bytes, ctxt)) != 0 )
2603 goto done;
2604 emulate_fpu_insn_memsrc("flds", src.val);
2605 break;
2606 case 2: /* fstp m32fp */
2607 ea.bytes = 4;
2608 dst = ea;
2609 dst.type = OP_MEM;
2610 emulate_fpu_insn_memdst("fsts", dst.val);
2611 break;
2612 case 3: /* fstp m32fp */
2613 ea.bytes = 4;
2614 dst = ea;
2615 dst.type = OP_MEM;
2616 emulate_fpu_insn_memdst("fstps", dst.val);
2617 break;
2618 /* case 4: fldenv - TODO */
2619 case 5: /* fldcw m2byte */
2620 ea.bytes = 2;
2621 src = ea;
2622 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2623 src.bytes, ctxt)) != 0 )
2624 goto done;
2625 emulate_fpu_insn_memsrc("fldcw", src.val);
2626 break;
2627 /* case 6: fstenv - TODO */
2628 case 7: /* fnstcw m2byte */
2629 ea.bytes = 2;
2630 dst = ea;
2631 dst.type = OP_MEM;
2632 emulate_fpu_insn_memdst("fnstcw", dst.val);
2633 break;
2634 default:
2635 goto cannot_emulate;
2638 break;
2640 case 0xda: /* FPU 0xda */
2641 switch ( modrm )
2643 case 0xc0 ... 0xc7: /* fcmovb %stN */
2644 case 0xc8 ... 0xcf: /* fcmove %stN */
2645 case 0xd0 ... 0xd7: /* fcmovbe %stN */
2646 case 0xd8 ... 0xdf: /* fcmovu %stN */
2647 case 0xe9: /* fucompp */
2648 emulate_fpu_insn_stub(0xda, modrm);
2649 break;
2650 default:
2651 fail_if(modrm >= 0xc0);
2652 ea.bytes = 8;
2653 src = ea;
2654 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2655 src.bytes, ctxt)) != 0 )
2656 goto done;
2657 switch ( modrm_reg & 7 )
2659 case 0: /* fiadd m64i */
2660 emulate_fpu_insn_memsrc("fiaddl", src.val);
2661 break;
2662 case 1: /* fimul m64i */
2663 emulate_fpu_insn_memsrc("fimul", src.val);
2664 break;
2665 case 2: /* ficom m64i */
2666 emulate_fpu_insn_memsrc("ficoml", src.val);
2667 break;
2668 case 3: /* ficomp m64i */
2669 emulate_fpu_insn_memsrc("ficompl", src.val);
2670 break;
2671 case 4: /* fisub m64i */
2672 emulate_fpu_insn_memsrc("fisubl", src.val);
2673 break;
2674 case 5: /* fisubr m64i */
2675 emulate_fpu_insn_memsrc("fisubrl", src.val);
2676 break;
2677 case 6: /* fidiv m64i */
2678 emulate_fpu_insn_memsrc("fidivl", src.val);
2679 break;
2680 case 7: /* fidivr m64i */
2681 emulate_fpu_insn_memsrc("fidivrl", src.val);
2682 break;
2683 default:
2684 goto cannot_emulate;
2687 break;
2689 case 0xdb: /* FPU 0xdb */
2690 switch ( modrm )
2692 case 0xc0 ... 0xc7: /* fcmovnb %stN */
2693 case 0xc8 ... 0xcf: /* fcmovne %stN */
2694 case 0xd0 ... 0xd7: /* fcmovnbe %stN */
2695 case 0xd8 ... 0xdf: /* fcmovnu %stN */
2696 emulate_fpu_insn_stub(0xdb, modrm);
2697 break;
2698 case 0xe2: /* fnclex */
2699 emulate_fpu_insn("fnclex");
2700 break;
2701 case 0xe3: /* fninit */
2702 emulate_fpu_insn("fninit");
2703 break;
2704 case 0xe4: /* fsetpm - 287 only, ignored by 387 */
2705 break;
2706 case 0xe8 ... 0xef: /* fucomi %stN */
2707 case 0xf0 ... 0xf7: /* fcomi %stN */
2708 emulate_fpu_insn_stub(0xdb, modrm);
2709 break;
2710 default:
2711 fail_if(modrm >= 0xc0);
2712 switch ( modrm_reg & 7 )
2714 case 0: /* fild m32i */
2715 ea.bytes = 4;
2716 src = ea;
2717 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2718 src.bytes, ctxt)) != 0 )
2719 goto done;
2720 emulate_fpu_insn_memsrc("fildl", src.val);
2721 break;
2722 case 1: /* fisttp m32i */
2723 ea.bytes = 4;
2724 dst = ea;
2725 dst.type = OP_MEM;
2726 emulate_fpu_insn_memdst("fisttpl", dst.val);
2727 break;
2728 case 2: /* fist m32i */
2729 ea.bytes = 4;
2730 dst = ea;
2731 dst.type = OP_MEM;
2732 emulate_fpu_insn_memdst("fistl", dst.val);
2733 break;
2734 case 3: /* fistp m32i */
2735 ea.bytes = 4;
2736 dst = ea;
2737 dst.type = OP_MEM;
2738 emulate_fpu_insn_memdst("fistpl", dst.val);
2739 break;
2740 case 5: /* fld m80fp */
2741 ea.bytes = 10;
2742 src = ea;
2743 if ( (rc = ops->read(src.mem.seg, src.mem.off,
2744 &src.val, src.bytes, ctxt)) != 0 )
2745 goto done;
2746 emulate_fpu_insn_memdst("fldt", src.val);
2747 break;
2748 case 7: /* fstp m80fp */
2749 ea.bytes = 10;
2750 dst.type = OP_MEM;
2751 dst = ea;
2752 emulate_fpu_insn_memdst("fstpt", dst.val);
2753 break;
2754 default:
2755 goto cannot_emulate;
2758 break;
2760 case 0xdc: /* FPU 0xdc */
2761 switch ( modrm )
2763 case 0xc0 ... 0xc7: /* fadd %stN */
2764 case 0xc8 ... 0xcf: /* fmul %stN */
2765 case 0xe0 ... 0xe7: /* fsubr %stN */
2766 case 0xe8 ... 0xef: /* fsub %stN */
2767 case 0xf0 ... 0xf7: /* fdivr %stN */
2768 case 0xf8 ... 0xff: /* fdiv %stN */
2769 emulate_fpu_insn_stub(0xdc, modrm);
2770 break;
2771 default:
2772 fail_if(modrm >= 0xc0);
2773 ea.bytes = 8;
2774 src = ea;
2775 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2776 src.bytes, ctxt)) != 0 )
2777 goto done;
2778 switch ( modrm_reg & 7 )
2780 case 0: /* fadd m64fp */
2781 emulate_fpu_insn_memsrc("faddl", src.val);
2782 break;
2783 case 1: /* fmul m64fp */
2784 emulate_fpu_insn_memsrc("fmull", src.val);
2785 break;
2786 case 2: /* fcom m64fp */
2787 emulate_fpu_insn_memsrc("fcoml", src.val);
2788 break;
2789 case 3: /* fcomp m64fp */
2790 emulate_fpu_insn_memsrc("fcompl", src.val);
2791 break;
2792 case 4: /* fsub m64fp */
2793 emulate_fpu_insn_memsrc("fsubl", src.val);
2794 break;
2795 case 5: /* fsubr m64fp */
2796 emulate_fpu_insn_memsrc("fsubrl", src.val);
2797 break;
2798 case 6: /* fdiv m64fp */
2799 emulate_fpu_insn_memsrc("fdivl", src.val);
2800 break;
2801 case 7: /* fdivr m64fp */
2802 emulate_fpu_insn_memsrc("fdivrl", src.val);
2803 break;
2806 break;
2808 case 0xdd: /* FPU 0xdd */
2809 switch ( modrm )
2811 case 0xc0 ... 0xc7: /* ffree %stN */
2812 case 0xd0 ... 0xd7: /* fst %stN */
2813 case 0xd8 ... 0xdf: /* fstp %stN */
2814 case 0xe0 ... 0xe7: /* fucom %stN */
2815 case 0xe8 ... 0xef: /* fucomp %stN */
2816 emulate_fpu_insn_stub(0xdd, modrm);
2817 break;
2818 default:
2819 fail_if(modrm >= 0xc0);
2820 switch ( modrm_reg & 7 )
2822 case 0: /* fld m64fp */;
2823 ea.bytes = 8;
2824 src = ea;
2825 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2826 src.bytes, ctxt)) != 0 )
2827 goto done;
2828 emulate_fpu_insn_memsrc("fldl", src.val);
2829 break;
2830 case 1: /* fisttp m64i */
2831 ea.bytes = 8;
2832 dst = ea;
2833 dst.type = OP_MEM;
2834 emulate_fpu_insn_memdst("fisttpll", dst.val);
2835 break;
2836 case 2: /* fst m64fp */
2837 ea.bytes = 8;
2838 dst = ea;
2839 dst.type = OP_MEM;
2840 emulate_fpu_insn_memsrc("fstl", dst.val);
2841 break;
2842 case 3: /* fstp m64fp */
2843 ea.bytes = 8;
2844 dst = ea;
2845 dst.type = OP_MEM;
2846 emulate_fpu_insn_memdst("fstpl", dst.val);
2847 break;
2848 case 7: /* fnstsw m2byte */
2849 ea.bytes = 2;
2850 dst = ea;
2851 dst.type = OP_MEM;
2852 emulate_fpu_insn_memdst("fnstsw", dst.val);
2853 break;
2854 default:
2855 goto cannot_emulate;
2858 break;
2860 case 0xde: /* FPU 0xde */
2861 switch ( modrm )
2863 case 0xc0 ... 0xc7: /* faddp %stN */
2864 case 0xc8 ... 0xcf: /* fmulp %stN */
2865 case 0xd9: /* fcompp */
2866 case 0xe0 ... 0xe7: /* fsubrp %stN */
2867 case 0xe8 ... 0xef: /* fsubp %stN */
2868 case 0xf0 ... 0xf7: /* fdivrp %stN */
2869 case 0xf8 ... 0xff: /* fdivp %stN */
2870 emulate_fpu_insn_stub(0xde, modrm);
2871 break;
2872 default:
2873 fail_if(modrm >= 0xc0);
2874 ea.bytes = 2;
2875 src = ea;
2876 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2877 src.bytes, ctxt)) != 0 )
2878 goto done;
2879 switch ( modrm_reg & 7 )
2881 case 0: /* fiadd m16i */
2882 emulate_fpu_insn_memsrc("fiadd", src.val);
2883 break;
2884 case 1: /* fimul m16i */
2885 emulate_fpu_insn_memsrc("fimul", src.val);
2886 break;
2887 case 2: /* ficom m16i */
2888 emulate_fpu_insn_memsrc("ficom", src.val);
2889 break;
2890 case 3: /* ficomp m16i */
2891 emulate_fpu_insn_memsrc("ficomp", src.val);
2892 break;
2893 case 4: /* fisub m16i */
2894 emulate_fpu_insn_memsrc("fisub", src.val);
2895 break;
2896 case 5: /* fisubr m16i */
2897 emulate_fpu_insn_memsrc("fisubr", src.val);
2898 break;
2899 case 6: /* fidiv m16i */
2900 emulate_fpu_insn_memsrc("fidiv", src.val);
2901 break;
2902 case 7: /* fidivr m16i */
2903 emulate_fpu_insn_memsrc("fidivr", src.val);
2904 break;
2905 default:
2906 goto cannot_emulate;
2909 break;
2911 case 0xdf: /* FPU 0xdf */
2912 switch ( modrm )
2914 case 0xe0:
2915 /* fnstsw %ax */
2916 dst.bytes = 2;
2917 dst.type = OP_REG;
2918 dst.reg = (unsigned long *)&_regs.eax;
2919 emulate_fpu_insn_memdst("fnstsw", dst.val);
2920 break;
2921 case 0xf0 ... 0xf7: /* fcomip %stN */
2922 case 0xf8 ... 0xff: /* fucomip %stN */
2923 emulate_fpu_insn_stub(0xdf, modrm);
2924 break;
2925 default:
2926 fail_if(modrm >= 0xc0);
2927 switch ( modrm_reg & 7 )
2929 case 0: /* fild m16i */
2930 ea.bytes = 2;
2931 src = ea;
2932 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2933 src.bytes, ctxt)) != 0 )
2934 goto done;
2935 emulate_fpu_insn_memsrc("fild", src.val);
2936 break;
2937 case 1: /* fisttp m16i */
2938 ea.bytes = 2;
2939 dst = ea;
2940 dst.type = OP_MEM;
2941 emulate_fpu_insn_memdst("fisttp", dst.val);
2942 break;
2943 case 2: /* fist m16i */
2944 ea.bytes = 2;
2945 dst = ea;
2946 dst.type = OP_MEM;
2947 emulate_fpu_insn_memdst("fist", dst.val);
2948 break;
2949 case 3: /* fistp m16i */
2950 ea.bytes = 2;
2951 dst = ea;
2952 dst.type = OP_MEM;
2953 emulate_fpu_insn_memdst("fistp", dst.val);
2954 break;
2955 case 4: /* fbld m80dec */
2956 ea.bytes = 10;
2957 dst = ea;
2958 if ( (rc = ops->read(src.mem.seg, src.mem.off,
2959 &src.val, src.bytes, ctxt)) != 0 )
2960 goto done;
2961 emulate_fpu_insn_memdst("fbld", src.val);
2962 break;
2963 case 5: /* fild m64i */
2964 ea.bytes = 8;
2965 src = ea;
2966 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2967 src.bytes, ctxt)) != 0 )
2968 goto done;
2969 emulate_fpu_insn_memsrc("fildll", src.val);
2970 break;
2971 case 6: /* fbstp packed bcd */
2972 ea.bytes = 10;
2973 dst = ea;
2974 dst.type = OP_MEM;
2975 emulate_fpu_insn_memdst("fbstp", dst.val);
2976 break;
2977 case 7: /* fistp m64i */
2978 ea.bytes = 8;
2979 dst = ea;
2980 dst.type = OP_MEM;
2981 emulate_fpu_insn_memdst("fistpll", dst.val);
2982 break;
2983 default:
2984 goto cannot_emulate;
2987 break;
2989 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2990 int rel = insn_fetch_type(int8_t);
2991 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2992 if ( b == 0xe1 )
2993 do_jmp = !do_jmp; /* loopz */
2994 else if ( b == 0xe2 )
2995 do_jmp = 1; /* loop */
2996 switch ( ad_bytes )
2998 case 2:
2999 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
3000 break;
3001 case 4:
3002 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
3003 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
3004 break;
3005 default: /* case 8: */
3006 do_jmp &= --_regs.ecx != 0;
3007 break;
3009 if ( do_jmp )
3010 jmp_rel(rel);
3011 break;
3014 case 0xe3: /* jcxz/jecxz (short) */ {
3015 int rel = insn_fetch_type(int8_t);
3016 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
3017 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
3018 jmp_rel(rel);
3019 break;
3022 case 0xe4: /* in imm8,%al */
3023 case 0xe5: /* in imm8,%eax */
3024 case 0xe6: /* out %al,imm8 */
3025 case 0xe7: /* out %eax,imm8 */
3026 case 0xec: /* in %dx,%al */
3027 case 0xed: /* in %dx,%eax */
3028 case 0xee: /* out %al,%dx */
3029 case 0xef: /* out %eax,%dx */ {
3030 unsigned int port = ((b < 0xe8)
3031 ? insn_fetch_type(uint8_t)
3032 : (uint16_t)_regs.edx);
3033 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
3034 if ( (rc = ioport_access_check(port, op_bytes, ctxt, ops)) != 0 )
3035 goto done;
3036 if ( b & 2 )
3038 /* out */
3039 fail_if(ops->write_io == NULL);
3040 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
3042 else
3044 /* in */
3045 dst.type = OP_REG;
3046 dst.bytes = op_bytes;
3047 dst.reg = (unsigned long *)&_regs.eax;
3048 fail_if(ops->read_io == NULL);
3049 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
3051 if ( rc != 0 )
3052 goto done;
3053 break;
3056 case 0xe8: /* call (near) */ {
3057 int rel = (((op_bytes == 2) && !mode_64bit())
3058 ? (int32_t)insn_fetch_type(int16_t)
3059 : insn_fetch_type(int32_t));
3060 op_bytes = mode_64bit() ? 8 : op_bytes;
3061 src.val = _regs.eip;
3062 jmp_rel(rel);
3063 goto push;
3066 case 0xe9: /* jmp (near) */ {
3067 int rel = (((op_bytes == 2) && !mode_64bit())
3068 ? (int32_t)insn_fetch_type(int16_t)
3069 : insn_fetch_type(int32_t));
3070 jmp_rel(rel);
3071 break;
3074 case 0xea: /* jmp (far, absolute) */ {
3075 uint16_t sel;
3076 uint32_t eip;
3077 generate_exception_if(mode_64bit(), EXC_UD, -1);
3078 eip = insn_fetch_bytes(op_bytes);
3079 sel = insn_fetch_type(uint16_t);
3080 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
3081 goto done;
3082 _regs.eip = eip;
3083 break;
3086 case 0xeb: /* jmp (short) */ {
3087 int rel = insn_fetch_type(int8_t);
3088 jmp_rel(rel);
3089 break;
3092 case 0xf1: /* int1 (icebp) */
3093 src.val = EXC_DB;
3094 goto swint;
3096 case 0xf4: /* hlt */
3097 ctxt->retire.flags.hlt = 1;
3098 break;
3100 case 0xf5: /* cmc */
3101 _regs.eflags ^= EFLG_CF;
3102 break;
3104 case 0xf6 ... 0xf7: /* Grp3 */
3105 switch ( modrm_reg & 7 )
3107 case 0 ... 1: /* test */
3108 /* Special case in Grp3: test has an immediate source operand. */
3109 src.type = OP_IMM;
3110 src.bytes = (d & ByteOp) ? 1 : op_bytes;
3111 if ( src.bytes == 8 ) src.bytes = 4;
3112 switch ( src.bytes )
3114 case 1: src.val = insn_fetch_type(int8_t); break;
3115 case 2: src.val = insn_fetch_type(int16_t); break;
3116 case 4: src.val = insn_fetch_type(int32_t); break;
3118 goto test;
3119 case 2: /* not */
3120 dst.val = ~dst.val;
3121 break;
3122 case 3: /* neg */
3123 emulate_1op("neg", dst, _regs.eflags);
3124 break;
3125 case 4: /* mul */
3126 src = dst;
3127 dst.type = OP_REG;
3128 dst.reg = (unsigned long *)&_regs.eax;
3129 dst.val = *dst.reg;
3130 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3131 switch ( src.bytes )
3133 case 1:
3134 dst.val = (uint8_t)dst.val;
3135 dst.val *= src.val;
3136 if ( (uint8_t)dst.val != (uint16_t)dst.val )
3137 _regs.eflags |= EFLG_OF|EFLG_CF;
3138 dst.bytes = 2;
3139 break;
3140 case 2:
3141 dst.val = (uint16_t)dst.val;
3142 dst.val *= src.val;
3143 if ( (uint16_t)dst.val != (uint32_t)dst.val )
3144 _regs.eflags |= EFLG_OF|EFLG_CF;
3145 *(uint16_t *)&_regs.edx = dst.val >> 16;
3146 break;
3147 #ifdef __x86_64__
3148 case 4:
3149 dst.val = (uint32_t)dst.val;
3150 dst.val *= src.val;
3151 if ( (uint32_t)dst.val != dst.val )
3152 _regs.eflags |= EFLG_OF|EFLG_CF;
3153 _regs.edx = (uint32_t)(dst.val >> 32);
3154 break;
3155 #endif
3156 default: {
3157 unsigned long m[2] = { src.val, dst.val };
3158 if ( mul_dbl(m) )
3159 _regs.eflags |= EFLG_OF|EFLG_CF;
3160 _regs.edx = m[1];
3161 dst.val = m[0];
3162 break;
3165 break;
3166 case 5: /* imul */
3167 src = dst;
3168 dst.type = OP_REG;
3169 dst.reg = (unsigned long *)&_regs.eax;
3170 dst.val = *dst.reg;
3171 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3172 switch ( src.bytes )
3174 case 1:
3175 dst.val = ((uint16_t)(int8_t)src.val *
3176 (uint16_t)(int8_t)dst.val);
3177 if ( (int8_t)dst.val != (uint16_t)dst.val )
3178 _regs.eflags |= EFLG_OF|EFLG_CF;
3179 dst.bytes = 2;
3180 break;
3181 case 2:
3182 dst.val = ((uint32_t)(int16_t)src.val *
3183 (uint32_t)(int16_t)dst.val);
3184 if ( (int16_t)dst.val != (uint32_t)dst.val )
3185 _regs.eflags |= EFLG_OF|EFLG_CF;
3186 *(uint16_t *)&_regs.edx = dst.val >> 16;
3187 break;
3188 #ifdef __x86_64__
3189 case 4:
3190 dst.val = ((uint64_t)(int32_t)src.val *
3191 (uint64_t)(int32_t)dst.val);
3192 if ( (int32_t)dst.val != dst.val )
3193 _regs.eflags |= EFLG_OF|EFLG_CF;
3194 _regs.edx = (uint32_t)(dst.val >> 32);
3195 break;
3196 #endif
3197 default: {
3198 unsigned long m[2] = { src.val, dst.val };
3199 if ( imul_dbl(m) )
3200 _regs.eflags |= EFLG_OF|EFLG_CF;
3201 _regs.edx = m[1];
3202 dst.val = m[0];
3203 break;
3206 break;
3207 case 6: /* div */ {
3208 unsigned long u[2], v;
3209 src = dst;
3210 dst.type = OP_REG;
3211 dst.reg = (unsigned long *)&_regs.eax;
3212 switch ( src.bytes )
3214 case 1:
3215 u[0] = (uint16_t)_regs.eax;
3216 u[1] = 0;
3217 v = (uint8_t)src.val;
3218 generate_exception_if(
3219 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
3220 EXC_DE, -1);
3221 dst.val = (uint8_t)u[0];
3222 ((uint8_t *)&_regs.eax)[1] = u[1];
3223 break;
3224 case 2:
3225 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
3226 u[1] = 0;
3227 v = (uint16_t)src.val;
3228 generate_exception_if(
3229 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
3230 EXC_DE, -1);
3231 dst.val = (uint16_t)u[0];
3232 *(uint16_t *)&_regs.edx = u[1];
3233 break;
3234 #ifdef __x86_64__
3235 case 4:
3236 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
3237 u[1] = 0;
3238 v = (uint32_t)src.val;
3239 generate_exception_if(
3240 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
3241 EXC_DE, -1);
3242 dst.val = (uint32_t)u[0];
3243 _regs.edx = (uint32_t)u[1];
3244 break;
3245 #endif
3246 default:
3247 u[0] = _regs.eax;
3248 u[1] = _regs.edx;
3249 v = src.val;
3250 generate_exception_if(div_dbl(u, v), EXC_DE, -1);
3251 dst.val = u[0];
3252 _regs.edx = u[1];
3253 break;
3255 break;
3257 case 7: /* idiv */ {
3258 unsigned long u[2], v;
3259 src = dst;
3260 dst.type = OP_REG;
3261 dst.reg = (unsigned long *)&_regs.eax;
3262 switch ( src.bytes )
3264 case 1:
3265 u[0] = (int16_t)_regs.eax;
3266 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3267 v = (int8_t)src.val;
3268 generate_exception_if(
3269 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
3270 EXC_DE, -1);
3271 dst.val = (int8_t)u[0];
3272 ((int8_t *)&_regs.eax)[1] = u[1];
3273 break;
3274 case 2:
3275 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
3276 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3277 v = (int16_t)src.val;
3278 generate_exception_if(
3279 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
3280 EXC_DE, -1);
3281 dst.val = (int16_t)u[0];
3282 *(int16_t *)&_regs.edx = u[1];
3283 break;
3284 #ifdef __x86_64__
3285 case 4:
3286 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
3287 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3288 v = (int32_t)src.val;
3289 generate_exception_if(
3290 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
3291 EXC_DE, -1);
3292 dst.val = (int32_t)u[0];
3293 _regs.edx = (uint32_t)u[1];
3294 break;
3295 #endif
3296 default:
3297 u[0] = _regs.eax;
3298 u[1] = _regs.edx;
3299 v = src.val;
3300 generate_exception_if(idiv_dbl(u, v), EXC_DE, -1);
3301 dst.val = u[0];
3302 _regs.edx = u[1];
3303 break;
3305 break;
3307 default:
3308 goto cannot_emulate;
3310 break;
3312 case 0xf8: /* clc */
3313 _regs.eflags &= ~EFLG_CF;
3314 break;
3316 case 0xf9: /* stc */
3317 _regs.eflags |= EFLG_CF;
3318 break;
3320 case 0xfa: /* cli */
3321 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3322 _regs.eflags &= ~EFLG_IF;
3323 break;
3325 case 0xfb: /* sti */
3326 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3327 if ( !(_regs.eflags & EFLG_IF) )
3329 _regs.eflags |= EFLG_IF;
3330 ctxt->retire.flags.sti = 1;
3332 break;
3334 case 0xfc: /* cld */
3335 _regs.eflags &= ~EFLG_DF;
3336 break;
3338 case 0xfd: /* std */
3339 _regs.eflags |= EFLG_DF;
3340 break;
3342 case 0xfe: /* Grp4 */
3343 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD, -1);
3344 case 0xff: /* Grp5 */
3345 switch ( modrm_reg & 7 )
3347 case 0: /* inc */
3348 emulate_1op("inc", dst, _regs.eflags);
3349 break;
3350 case 1: /* dec */
3351 emulate_1op("dec", dst, _regs.eflags);
3352 break;
3353 case 2: /* call (near) */
3354 case 4: /* jmp (near) */
3355 if ( (dst.bytes != 8) && mode_64bit() )
3357 dst.bytes = op_bytes = 8;
3358 if ( dst.type == OP_REG )
3359 dst.val = *dst.reg;
3360 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
3361 &dst.val, 8, ctxt, ops)) != 0 )
3362 goto done;
3364 src.val = _regs.eip;
3365 _regs.eip = dst.val;
3366 if ( (modrm_reg & 7) == 2 )
3367 goto push; /* call */
3368 dst.type = OP_NONE;
3369 break;
3370 case 3: /* call (far, absolute indirect) */
3371 case 5: /* jmp (far, absolute indirect) */ {
3372 unsigned long sel;
3374 generate_exception_if(dst.type != OP_MEM, EXC_UD, -1);
3376 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off+dst.bytes,
3377 &sel, 2, ctxt, ops)) )
3378 goto done;
3380 if ( (modrm_reg & 7) == 3 ) /* call */
3382 struct segment_register reg;
3383 fail_if(ops->read_segment == NULL);
3384 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
3385 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
3386 &reg.sel, op_bytes, ctxt)) ||
3387 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
3388 &_regs.eip, op_bytes, ctxt)) )
3389 goto done;
3392 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
3393 goto done;
3394 _regs.eip = dst.val;
3396 dst.type = OP_NONE;
3397 break;
3399 case 6: /* push */
3400 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
3401 if ( mode_64bit() && (dst.bytes == 4) )
3403 dst.bytes = 8;
3404 if ( dst.type == OP_REG )
3405 dst.val = *dst.reg;
3406 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
3407 &dst.val, 8, ctxt, ops)) != 0 )
3408 goto done;
3410 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
3411 &dst.val, dst.bytes, ctxt)) != 0 )
3412 goto done;
3413 dst.type = OP_NONE;
3414 break;
3415 case 7:
3416 generate_exception_if(1, EXC_UD, -1);
3417 default:
3418 goto cannot_emulate;
3420 break;
3423 writeback:
3424 switch ( dst.type )
3426 case OP_REG:
3427 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
3428 switch ( dst.bytes )
3430 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
3431 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
3432 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
3433 case 8: *dst.reg = dst.val; break;
3435 break;
3436 case OP_MEM:
3437 if ( !(d & Mov) && (dst.orig_val == dst.val) &&
3438 !ctxt->force_writeback )
3439 /* nothing to do */;
3440 else if ( lock_prefix )
3441 rc = ops->cmpxchg(
3442 dst.mem.seg, dst.mem.off, &dst.orig_val,
3443 &dst.val, dst.bytes, ctxt);
3444 else
3445 rc = ops->write(
3446 dst.mem.seg, dst.mem.off, &dst.val, dst.bytes, ctxt);
3447 if ( rc != 0 )
3448 goto done;
3449 default:
3450 break;
3453 /* Inject #DB if single-step tracing was enabled at instruction start. */
3454 if ( (ctxt->regs->eflags & EFLG_TF) && (rc == X86EMUL_OKAY) &&
3455 (ops->inject_hw_exception != NULL) )
3456 rc = ops->inject_hw_exception(EXC_DB, -1, ctxt) ? : X86EMUL_EXCEPTION;
3458 /* Commit shadow register state. */
3459 _regs.eflags &= ~EFLG_RF;
3460 *ctxt->regs = _regs;
3462 done:
3463 return rc;
3465 twobyte_insn:
3466 switch ( b )
3468 case 0x01: /* Grp7 */ {
3469 struct segment_register reg;
3470 unsigned long base, limit, cr0, cr0w;
3472 if ( modrm == 0xdf ) /* invlpga */
3474 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
3475 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3476 fail_if(ops->invlpg == NULL);
3477 if ( (rc = ops->invlpg(x86_seg_none, truncate_ea(_regs.eax),
3478 ctxt)) )
3479 goto done;
3480 break;
3483 switch ( modrm_reg & 7 )
3485 case 0: /* sgdt */
3486 case 1: /* sidt */
3487 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3488 fail_if(ops->read_segment == NULL);
3489 if ( (rc = ops->read_segment((modrm_reg & 1) ?
3490 x86_seg_idtr : x86_seg_gdtr,
3491 &reg, ctxt)) )
3492 goto done;
3493 if ( op_bytes == 2 )
3494 reg.base &= 0xffffff;
3495 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
3496 &reg.limit, 2, ctxt)) ||
3497 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
3498 &reg.base, mode_64bit() ? 8 : 4, ctxt)) )
3499 goto done;
3500 break;
3501 case 2: /* lgdt */
3502 case 3: /* lidt */
3503 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3504 fail_if(ops->write_segment == NULL);
3505 memset(&reg, 0, sizeof(reg));
3506 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3507 &limit, 2, ctxt, ops)) ||
3508 (rc = read_ulong(ea.mem.seg, ea.mem.off+2,
3509 &base, mode_64bit() ? 8 : 4, ctxt, ops)) )
3510 goto done;
3511 reg.base = base;
3512 reg.limit = limit;
3513 if ( op_bytes == 2 )
3514 reg.base &= 0xffffff;
3515 if ( (rc = ops->write_segment((modrm_reg & 1) ?
3516 x86_seg_idtr : x86_seg_gdtr,
3517 &reg, ctxt)) )
3518 goto done;
3519 break;
3520 case 4: /* smsw */
3521 if ( ea.type == OP_MEM )
3522 ea.bytes = 2;
3523 dst = ea;
3524 fail_if(ops->read_cr == NULL);
3525 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
3526 goto done;
3527 d |= Mov; /* force writeback */
3528 break;
3529 case 6: /* lmsw */
3530 fail_if(ops->read_cr == NULL);
3531 fail_if(ops->write_cr == NULL);
3532 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
3533 goto done;
3534 if ( ea.type == OP_REG )
3535 cr0w = *ea.reg;
3536 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
3537 &cr0w, 2, ctxt, ops)) )
3538 goto done;
3539 /* LMSW can: (1) set bits 0-3; (2) clear bits 1-3. */
3540 cr0 = (cr0 & ~0xe) | (cr0w & 0xf);
3541 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
3542 goto done;
3543 break;
3544 case 7: /* invlpg */
3545 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3546 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3547 fail_if(ops->invlpg == NULL);
3548 if ( (rc = ops->invlpg(ea.mem.seg, ea.mem.off, ctxt)) )
3549 goto done;
3550 break;
3551 default:
3552 goto cannot_emulate;
3554 break;
3557 case 0x06: /* clts */
3558 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3559 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
3560 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
3561 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
3562 goto done;
3563 break;
3565 case 0x08: /* invd */
3566 case 0x09: /* wbinvd */
3567 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3568 fail_if(ops->wbinvd == NULL);
3569 if ( (rc = ops->wbinvd(ctxt)) != 0 )
3570 goto done;
3571 break;
3573 case 0x0d: /* GrpP (prefetch) */
3574 case 0x18: /* Grp16 (prefetch/nop) */
3575 case 0x19 ... 0x1f: /* nop (amd-defined) */
3576 break;
3578 case 0x20: /* mov cr,reg */
3579 case 0x21: /* mov dr,reg */
3580 case 0x22: /* mov reg,cr */
3581 case 0x23: /* mov reg,dr */
3582 generate_exception_if(ea.type != OP_REG, EXC_UD, -1);
3583 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3584 modrm_reg |= lock_prefix << 3;
3585 if ( b & 2 )
3587 /* Write to CR/DR. */
3588 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
3589 if ( !mode_64bit() )
3590 src.val = (uint32_t)src.val;
3591 rc = ((b & 1)
3592 ? (ops->write_dr
3593 ? ops->write_dr(modrm_reg, src.val, ctxt)
3594 : X86EMUL_UNHANDLEABLE)
3595 : (ops->write_cr
3596 ? ops->write_cr(modrm_reg, src.val, ctxt)
3597 : X86EMUL_UNHANDLEABLE));
3599 else
3601 /* Read from CR/DR. */
3602 dst.type = OP_REG;
3603 dst.bytes = mode_64bit() ? 8 : 4;
3604 dst.reg = decode_register(modrm_rm, &_regs, 0);
3605 rc = ((b & 1)
3606 ? (ops->read_dr
3607 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
3608 : X86EMUL_UNHANDLEABLE)
3609 : (ops->read_cr
3610 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
3611 : X86EMUL_UNHANDLEABLE));
3613 if ( rc != 0 )
3614 goto done;
3615 break;
3617 case 0x30: /* wrmsr */ {
3618 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
3619 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3620 fail_if(ops->write_msr == NULL);
3621 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
3622 goto done;
3623 break;
3626 case 0x31: /* rdtsc */ {
3627 unsigned long cr4;
3628 uint64_t val;
3629 fail_if(ops->read_cr == NULL);
3630 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
3631 goto done;
3632 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP, 0);
3633 fail_if(ops->read_msr == NULL);
3634 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
3635 goto done;
3636 _regs.edx = (uint32_t)(val >> 32);
3637 _regs.eax = (uint32_t)(val >> 0);
3638 break;
3641 case 0x32: /* rdmsr */ {
3642 uint64_t val;
3643 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3644 fail_if(ops->read_msr == NULL);
3645 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
3646 goto done;
3647 _regs.edx = (uint32_t)(val >> 32);
3648 _regs.eax = (uint32_t)(val >> 0);
3649 break;
3652 case 0x40 ... 0x4f: /* cmovcc */
3653 dst.val = src.val;
3654 if ( !test_cc(b, _regs.eflags) )
3655 dst.type = OP_NONE;
3656 break;
3658 case 0x6f: /* movq mm/m64,mm */ {
3659 uint8_t stub[] = { 0x0f, 0x6f, modrm, 0xc3 };
3660 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3661 uint64_t val;
3662 if ( ea.type == OP_MEM )
3664 unsigned long lval, hval;
3665 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3666 &lval, 4, ctxt, ops)) ||
3667 (rc = read_ulong(ea.mem.seg, ea.mem.off+4,
3668 &hval, 4, ctxt, ops)) )
3669 goto done;
3670 val = ((uint64_t)hval << 32) | (uint32_t)lval;
3671 stub[2] = modrm & 0x38; /* movq (%eax),%mmN */
3673 get_fpu(X86EMUL_FPU_mmx, &fic);
3674 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3675 put_fpu(&fic);
3676 break;
3679 case 0x7f: /* movq mm,mm/m64 */ {
3680 uint8_t stub[] = { 0x0f, 0x7f, modrm, 0xc3 };
3681 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3682 uint64_t val;
3683 if ( ea.type == OP_MEM )
3684 stub[2] = modrm & 0x38; /* movq %mmN,(%eax) */
3685 get_fpu(X86EMUL_FPU_mmx, &fic);
3686 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3687 put_fpu(&fic);
3688 if ( ea.type == OP_MEM )
3690 unsigned long lval = (uint32_t)val, hval = (uint32_t)(val >> 32);
3691 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0, &lval, 4, ctxt)) ||
3692 (rc = ops->write(ea.mem.seg, ea.mem.off+4, &hval, 4, ctxt)) )
3693 goto done;
3695 break;
3698 case 0x80 ... 0x8f: /* jcc (near) */ {
3699 int rel = (((op_bytes == 2) && !mode_64bit())
3700 ? (int32_t)insn_fetch_type(int16_t)
3701 : insn_fetch_type(int32_t));
3702 if ( test_cc(b, _regs.eflags) )
3703 jmp_rel(rel);
3704 break;
3707 case 0x90 ... 0x9f: /* setcc */
3708 dst.val = test_cc(b, _regs.eflags);
3709 break;
3711 case 0xa0: /* push %%fs */
3712 src.val = x86_seg_fs;
3713 goto push_seg;
3715 case 0xa1: /* pop %%fs */
3716 src.val = x86_seg_fs;
3717 goto pop_seg;
3719 case 0xa2: /* cpuid */ {
3720 unsigned int eax = _regs.eax, ebx = _regs.ebx;
3721 unsigned int ecx = _regs.ecx, edx = _regs.edx;
3722 fail_if(ops->cpuid == NULL);
3723 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
3724 goto done;
3725 _regs.eax = eax; _regs.ebx = ebx;
3726 _regs.ecx = ecx; _regs.edx = edx;
3727 break;
3730 case 0xa8: /* push %%gs */
3731 src.val = x86_seg_gs;
3732 goto push_seg;
3734 case 0xa9: /* pop %%gs */
3735 src.val = x86_seg_gs;
3736 goto pop_seg;
3738 case 0xb0 ... 0xb1: /* cmpxchg */
3739 /* Save real source value, then compare EAX against destination. */
3740 src.orig_val = src.val;
3741 src.val = _regs.eax;
3742 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
3743 if ( _regs.eflags & EFLG_ZF )
3745 /* Success: write back to memory. */
3746 dst.val = src.orig_val;
3748 else
3750 /* Failure: write the value we saw to EAX. */
3751 dst.type = OP_REG;
3752 dst.reg = (unsigned long *)&_regs.eax;
3754 break;
3756 case 0xa3: bt: /* bt */
3757 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
3758 dst.type = OP_NONE;
3759 break;
3761 case 0xa4: /* shld imm8,r,r/m */
3762 case 0xa5: /* shld %%cl,r,r/m */
3763 case 0xac: /* shrd imm8,r,r/m */
3764 case 0xad: /* shrd %%cl,r,r/m */ {
3765 uint8_t shift, width = dst.bytes << 3;
3766 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
3767 if ( (shift &= width - 1) == 0 )
3768 break;
3769 dst.orig_val = truncate_word(dst.val, dst.bytes);
3770 dst.val = ((shift == width) ? src.val :
3771 (b & 8) ?
3772 /* shrd */
3773 ((dst.orig_val >> shift) |
3774 truncate_word(src.val << (width - shift), dst.bytes)) :
3775 /* shld */
3776 ((dst.orig_val << shift) |
3777 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
3778 dst.val = truncate_word(dst.val, dst.bytes);
3779 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
3780 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
3781 _regs.eflags |= EFLG_CF;
3782 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
3783 _regs.eflags |= EFLG_OF;
3784 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
3785 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
3786 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
3787 break;
3790 case 0xb3: btr: /* btr */
3791 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
3792 break;
3794 case 0xab: bts: /* bts */
3795 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
3796 break;
3798 case 0xaf: /* imul */
3799 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3800 switch ( dst.bytes )
3802 case 2:
3803 dst.val = ((uint32_t)(int16_t)src.val *
3804 (uint32_t)(int16_t)dst.val);
3805 if ( (int16_t)dst.val != (uint32_t)dst.val )
3806 _regs.eflags |= EFLG_OF|EFLG_CF;
3807 break;
3808 #ifdef __x86_64__
3809 case 4:
3810 dst.val = ((uint64_t)(int32_t)src.val *
3811 (uint64_t)(int32_t)dst.val);
3812 if ( (int32_t)dst.val != dst.val )
3813 _regs.eflags |= EFLG_OF|EFLG_CF;
3814 break;
3815 #endif
3816 default: {
3817 unsigned long m[2] = { src.val, dst.val };
3818 if ( imul_dbl(m) )
3819 _regs.eflags |= EFLG_OF|EFLG_CF;
3820 dst.val = m[0];
3821 break;
3824 break;
3826 case 0xb2: /* lss */
3827 dst.val = x86_seg_ss;
3828 goto les;
3830 case 0xb4: /* lfs */
3831 dst.val = x86_seg_fs;
3832 goto les;
3834 case 0xb5: /* lgs */
3835 dst.val = x86_seg_gs;
3836 goto les;
3838 case 0xb6: /* movzx rm8,r{16,32,64} */
3839 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3840 dst.reg = decode_register(modrm_reg, &_regs, 0);
3841 dst.bytes = op_bytes;
3842 dst.val = (uint8_t)src.val;
3843 break;
3845 case 0xbc: /* bsf */ {
3846 int zf;
3847 asm ( "bsf %2,%0; setz %b1"
3848 : "=r" (dst.val), "=q" (zf)
3849 : "r" (src.val), "1" (0) );
3850 _regs.eflags &= ~EFLG_ZF;
3851 if ( zf )
3853 _regs.eflags |= EFLG_ZF;
3854 dst.type = OP_NONE;
3856 break;
3859 case 0xbd: /* bsr */ {
3860 int zf;
3861 asm ( "bsr %2,%0; setz %b1"
3862 : "=r" (dst.val), "=q" (zf)
3863 : "r" (src.val), "1" (0) );
3864 _regs.eflags &= ~EFLG_ZF;
3865 if ( zf )
3867 _regs.eflags |= EFLG_ZF;
3868 dst.type = OP_NONE;
3870 break;
3873 case 0xb7: /* movzx rm16,r{16,32,64} */
3874 dst.val = (uint16_t)src.val;
3875 break;
3877 case 0xbb: btc: /* btc */
3878 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
3879 break;
3881 case 0xba: /* Grp8 */
3882 switch ( modrm_reg & 7 )
3884 case 4: goto bt;
3885 case 5: goto bts;
3886 case 6: goto btr;
3887 case 7: goto btc;
3888 default: generate_exception_if(1, EXC_UD, -1);
3890 break;
3892 case 0xbe: /* movsx rm8,r{16,32,64} */
3893 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3894 dst.reg = decode_register(modrm_reg, &_regs, 0);
3895 dst.bytes = op_bytes;
3896 dst.val = (int8_t)src.val;
3897 break;
3899 case 0xbf: /* movsx rm16,r{16,32,64} */
3900 dst.val = (int16_t)src.val;
3901 break;
3903 case 0xc0 ... 0xc1: /* xadd */
3904 /* Write back the register source. */
3905 switch ( dst.bytes )
3907 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
3908 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
3909 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
3910 case 8: *src.reg = dst.val; break;
3912 goto add;
3914 case 0xc3: /* movnti */
3915 /* Ignore the non-temporal hint for now. */
3916 generate_exception_if(dst.bytes <= 2, EXC_UD, -1);
3917 dst.val = src.val;
3918 break;
3920 case 0xc7: /* Grp9 (cmpxchg8b/cmpxchg16b) */ {
3921 unsigned long old[2], exp[2], new[2];
3922 unsigned int i;
3924 generate_exception_if((modrm_reg & 7) != 1, EXC_UD, -1);
3925 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3926 op_bytes *= 2;
3928 /* Get actual old value. */
3929 for ( i = 0; i < (op_bytes/sizeof(long)); i++ )
3930 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off + i*sizeof(long),
3931 &old[i], sizeof(long), ctxt, ops)) != 0 )
3932 goto done;
3934 /* Get expected and proposed values. */
3935 if ( op_bytes == 8 )
3937 ((uint32_t *)exp)[0] = _regs.eax; ((uint32_t *)exp)[1] = _regs.edx;
3938 ((uint32_t *)new)[0] = _regs.ebx; ((uint32_t *)new)[1] = _regs.ecx;
3940 else
3942 exp[0] = _regs.eax; exp[1] = _regs.edx;
3943 new[0] = _regs.ebx; new[1] = _regs.ecx;
3946 if ( memcmp(old, exp, op_bytes) )
3948 /* Expected != actual: store actual to rDX:rAX and clear ZF. */
3949 _regs.eax = (op_bytes == 8) ? ((uint32_t *)old)[0] : old[0];
3950 _regs.edx = (op_bytes == 8) ? ((uint32_t *)old)[1] : old[1];
3951 _regs.eflags &= ~EFLG_ZF;
3953 else
3955 /* Expected == actual: attempt atomic cmpxchg and set ZF. */
3956 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3957 new, op_bytes, ctxt)) != 0 )
3958 goto done;
3959 _regs.eflags |= EFLG_ZF;
3961 break;
3964 case 0xc8 ... 0xcf: /* bswap */
3965 dst.type = OP_REG;
3966 dst.reg = decode_register(
3967 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3968 switch ( dst.bytes = op_bytes )
3970 default: /* case 2: */
3971 /* Undefined behaviour. Writes zero on all tested CPUs. */
3972 dst.val = 0;
3973 break;
3974 case 4:
3975 #ifdef __x86_64__
3976 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3977 break;
3978 case 8:
3979 #endif
3980 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3981 break;
3983 break;
3985 goto writeback;
3987 cannot_emulate:
3988 return X86EMUL_UNHANDLEABLE;