ia64/xen-unstable

view xen/arch/x86/x86_emulate/x86_emulate.c @ 19614:e421fd04e150

x86_emulate: Emulate LLDT and LTR instructions.

Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Tue May 19 02:09:36 2009 +0100 (2009-05-19)
parents 90ed7af65570
children f0e2df69a8eb
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 /* Operand sizes: 8-bit operands or specified/overridden size. */
25 #define ByteOp (1<<0) /* 8-bit operands. */
26 /* Destination operand type. */
27 #define DstNone (0<<1) /* No destination operand. */
28 #define DstImplicit (0<<1) /* Destination operand is implicit in the opcode. */
29 #define DstBitBase (1<<1) /* Memory operand, bit string. */
30 #define DstReg (2<<1) /* Register operand. */
31 #define DstEax DstReg /* Register EAX (aka DstReg with no ModRM) */
32 #define DstMem (3<<1) /* Memory operand. */
33 #define DstMask (3<<1)
34 /* Source operand type. */
35 #define SrcInvalid (0<<3) /* Unimplemented opcode. */
36 #define SrcNone (1<<3) /* No source operand. */
37 #define SrcImplicit (1<<3) /* Source operand is implicit in the opcode. */
38 #define SrcReg (2<<3) /* Register operand. */
39 #define SrcMem (3<<3) /* Memory operand. */
40 #define SrcMem16 (4<<3) /* Memory operand (16-bit). */
41 #define SrcImm (5<<3) /* Immediate operand. */
42 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
43 #define SrcMask (7<<3)
44 /* Generic ModRM decode. */
45 #define ModRM (1<<6)
46 /* Destination is only written; never read. */
47 #define Mov (1<<7)
48 /* All operands are implicit in the opcode. */
49 #define ImplicitOps (DstImplicit|SrcImplicit)
51 static uint8_t opcode_table[256] = {
52 /* 0x00 - 0x07 */
53 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
54 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
55 ByteOp|DstEax|SrcImm, DstEax|SrcImm, ImplicitOps, ImplicitOps,
56 /* 0x08 - 0x0F */
57 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
58 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
59 ByteOp|DstEax|SrcImm, DstEax|SrcImm, ImplicitOps, 0,
60 /* 0x10 - 0x17 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstEax|SrcImm, DstEax|SrcImm, ImplicitOps, ImplicitOps,
64 /* 0x18 - 0x1F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstEax|SrcImm, DstEax|SrcImm, ImplicitOps, ImplicitOps,
68 /* 0x20 - 0x27 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstEax|SrcImm, DstEax|SrcImm, 0, ImplicitOps,
72 /* 0x28 - 0x2F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstEax|SrcImm, DstEax|SrcImm, 0, ImplicitOps,
76 /* 0x30 - 0x37 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstEax|SrcImm, DstEax|SrcImm, 0, ImplicitOps,
80 /* 0x38 - 0x3F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstEax|SrcImm, DstEax|SrcImm, 0, ImplicitOps,
84 /* 0x40 - 0x4F */
85 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
86 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
87 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
88 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
89 /* 0x50 - 0x5F */
90 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
91 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
92 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
93 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
94 /* 0x60 - 0x67 */
95 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
96 0, 0, 0, 0,
97 /* 0x68 - 0x6F */
98 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
99 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 /* 0x70 - 0x77 */
102 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
103 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
104 /* 0x78 - 0x7F */
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
106 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
107 /* 0x80 - 0x87 */
108 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
109 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
110 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
111 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
112 /* 0x88 - 0x8F */
113 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
114 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
115 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
116 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
117 /* 0x90 - 0x97 */
118 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
119 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
120 /* 0x98 - 0x9F */
121 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
122 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
123 /* 0xA0 - 0xA7 */
124 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
125 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
126 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
127 ByteOp|ImplicitOps, ImplicitOps,
128 /* 0xA8 - 0xAF */
129 ByteOp|DstEax|SrcImm, DstEax|SrcImm,
130 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
131 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
132 ByteOp|ImplicitOps, ImplicitOps,
133 /* 0xB0 - 0xB7 */
134 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
135 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
136 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
137 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
138 /* 0xB8 - 0xBF */
139 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
140 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
141 /* 0xC0 - 0xC7 */
142 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
143 ImplicitOps, ImplicitOps,
144 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
145 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
146 /* 0xC8 - 0xCF */
147 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
148 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
149 /* 0xD0 - 0xD7 */
150 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
151 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
152 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
153 /* 0xD8 - 0xDF */
154 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
155 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
156 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
157 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
158 /* 0xE0 - 0xE7 */
159 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
160 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
161 /* 0xE8 - 0xEF */
162 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
163 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
164 /* 0xF0 - 0xF7 */
165 0, ImplicitOps, 0, 0,
166 ImplicitOps, ImplicitOps,
167 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
168 /* 0xF8 - 0xFF */
169 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
170 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
171 };
173 static uint8_t twobyte_table[256] = {
174 /* 0x00 - 0x07 */
175 SrcMem16|ModRM, ImplicitOps|ModRM, 0, 0, 0, 0, ImplicitOps, 0,
176 /* 0x08 - 0x0F */
177 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
178 /* 0x10 - 0x17 */
179 0, 0, 0, 0, 0, 0, 0, 0,
180 /* 0x18 - 0x1F */
181 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
182 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
183 /* 0x20 - 0x27 */
184 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
185 0, 0, 0, 0,
186 /* 0x28 - 0x2F */
187 0, 0, 0, 0, 0, 0, 0, 0,
188 /* 0x30 - 0x37 */
189 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
190 /* 0x38 - 0x3F */
191 0, 0, 0, 0, 0, 0, 0, 0,
192 /* 0x40 - 0x47 */
193 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
194 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
195 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
196 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
197 /* 0x48 - 0x4F */
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
202 /* 0x50 - 0x5F */
203 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
204 /* 0x60 - 0x6F */
205 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
206 /* 0x70 - 0x7F */
207 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
208 /* 0x80 - 0x87 */
209 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
210 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
211 /* 0x88 - 0x8F */
212 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
213 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
214 /* 0x90 - 0x97 */
215 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
216 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
217 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
218 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
219 /* 0x98 - 0x9F */
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
224 /* 0xA0 - 0xA7 */
225 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
226 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
227 /* 0xA8 - 0xAF */
228 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
229 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
230 /* 0xB0 - 0xB7 */
231 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
232 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
233 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
234 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
235 /* 0xB8 - 0xBF */
236 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
237 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
238 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
239 /* 0xC0 - 0xC7 */
240 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
241 0, DstMem|SrcReg|ModRM|Mov,
242 0, 0, 0, ImplicitOps|ModRM,
243 /* 0xC8 - 0xCF */
244 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
245 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
246 /* 0xD0 - 0xDF */
247 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
248 /* 0xE0 - 0xEF */
249 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
250 /* 0xF0 - 0xFF */
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
252 };
254 /* Type, address-of, and value of an instruction's operand. */
255 struct operand {
256 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
257 unsigned int bytes;
259 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
260 union {
261 unsigned long val;
262 uint32_t bigval[4];
263 };
265 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
266 union {
267 unsigned long orig_val;
268 uint32_t orig_bigval[4];
269 };
271 union {
272 /* OP_REG: Pointer to register field. */
273 unsigned long *reg;
274 /* OP_MEM: Segment and offset. */
275 struct {
276 enum x86_segment seg;
277 unsigned long off;
278 } mem;
279 };
280 };
282 /* MSRs. */
283 #define MSR_TSC 0x10
285 /* Control register flags. */
286 #define CR0_PE (1<<0)
287 #define CR4_TSD (1<<2)
289 /* EFLAGS bit definitions. */
290 #define EFLG_VIP (1<<20)
291 #define EFLG_VIF (1<<19)
292 #define EFLG_AC (1<<18)
293 #define EFLG_VM (1<<17)
294 #define EFLG_RF (1<<16)
295 #define EFLG_NT (1<<14)
296 #define EFLG_IOPL (3<<12)
297 #define EFLG_OF (1<<11)
298 #define EFLG_DF (1<<10)
299 #define EFLG_IF (1<<9)
300 #define EFLG_TF (1<<8)
301 #define EFLG_SF (1<<7)
302 #define EFLG_ZF (1<<6)
303 #define EFLG_AF (1<<4)
304 #define EFLG_PF (1<<2)
305 #define EFLG_CF (1<<0)
307 /* Exception definitions. */
308 #define EXC_DE 0
309 #define EXC_DB 1
310 #define EXC_BP 3
311 #define EXC_OF 4
312 #define EXC_BR 5
313 #define EXC_UD 6
314 #define EXC_TS 10
315 #define EXC_NP 11
316 #define EXC_SS 12
317 #define EXC_GP 13
318 #define EXC_PF 14
319 #define EXC_MF 16
321 /*
322 * Instruction emulation:
323 * Most instructions are emulated directly via a fragment of inline assembly
324 * code. This allows us to save/restore EFLAGS and thus very easily pick up
325 * any modified flags.
326 */
328 #if defined(__x86_64__)
329 #define _LO32 "k" /* force 32-bit operand */
330 #define _STK "%%rsp" /* stack pointer */
331 #define _BYTES_PER_LONG "8"
332 #elif defined(__i386__)
333 #define _LO32 "" /* force 32-bit operand */
334 #define _STK "%%esp" /* stack pointer */
335 #define _BYTES_PER_LONG "4"
336 #endif
338 /*
339 * These EFLAGS bits are restored from saved value during emulation, and
340 * any changes are written back to the saved value after emulation.
341 */
342 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
344 /* Before executing instruction: restore necessary bits in EFLAGS. */
345 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
346 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
347 "movl %"_sav",%"_LO32 _tmp"; " \
348 "push %"_tmp"; " \
349 "push %"_tmp"; " \
350 "movl %"_msk",%"_LO32 _tmp"; " \
351 "andl %"_LO32 _tmp",("_STK"); " \
352 "pushf; " \
353 "notl %"_LO32 _tmp"; " \
354 "andl %"_LO32 _tmp",("_STK"); " \
355 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
356 "pop %"_tmp"; " \
357 "orl %"_LO32 _tmp",("_STK"); " \
358 "popf; " \
359 "pop %"_sav"; "
361 /* After executing instruction: write-back necessary bits in EFLAGS. */
362 #define _POST_EFLAGS(_sav, _msk, _tmp) \
363 /* _sav |= EFLAGS & _msk; */ \
364 "pushf; " \
365 "pop %"_tmp"; " \
366 "andl %"_msk",%"_LO32 _tmp"; " \
367 "orl %"_LO32 _tmp",%"_sav"; "
369 /* Raw emulation: instruction has two explicit operands. */
370 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
371 do{ unsigned long _tmp; \
372 switch ( (_dst).bytes ) \
373 { \
374 case 2: \
375 asm volatile ( \
376 _PRE_EFLAGS("0","4","2") \
377 _op"w %"_wx"3,%1; " \
378 _POST_EFLAGS("0","4","2") \
379 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
380 : _wy ((_src).val), "i" (EFLAGS_MASK), \
381 "m" (_eflags), "m" ((_dst).val) ); \
382 break; \
383 case 4: \
384 asm volatile ( \
385 _PRE_EFLAGS("0","4","2") \
386 _op"l %"_lx"3,%1; " \
387 _POST_EFLAGS("0","4","2") \
388 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
389 : _ly ((_src).val), "i" (EFLAGS_MASK), \
390 "m" (_eflags), "m" ((_dst).val) ); \
391 break; \
392 case 8: \
393 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
394 break; \
395 } \
396 } while (0)
397 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
398 do{ unsigned long _tmp; \
399 switch ( (_dst).bytes ) \
400 { \
401 case 1: \
402 asm volatile ( \
403 _PRE_EFLAGS("0","4","2") \
404 _op"b %"_bx"3,%1; " \
405 _POST_EFLAGS("0","4","2") \
406 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
407 : _by ((_src).val), "i" (EFLAGS_MASK), \
408 "m" (_eflags), "m" ((_dst).val) ); \
409 break; \
410 default: \
411 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
412 break; \
413 } \
414 } while (0)
415 /* Source operand is byte-sized and may be restricted to just %cl. */
416 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
417 __emulate_2op(_op, _src, _dst, _eflags, \
418 "b", "c", "b", "c", "b", "c", "b", "c")
419 /* Source operand is byte, word, long or quad sized. */
420 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
421 __emulate_2op(_op, _src, _dst, _eflags, \
422 "b", "q", "w", "r", _LO32, "r", "", "r")
423 /* Source operand is word, long or quad sized. */
424 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
425 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
426 "w", "r", _LO32, "r", "", "r")
428 /* Instruction has only one explicit operand (no source operand). */
429 #define emulate_1op(_op,_dst,_eflags) \
430 do{ unsigned long _tmp; \
431 switch ( (_dst).bytes ) \
432 { \
433 case 1: \
434 asm volatile ( \
435 _PRE_EFLAGS("0","3","2") \
436 _op"b %1; " \
437 _POST_EFLAGS("0","3","2") \
438 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
439 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
440 break; \
441 case 2: \
442 asm volatile ( \
443 _PRE_EFLAGS("0","3","2") \
444 _op"w %1; " \
445 _POST_EFLAGS("0","3","2") \
446 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
447 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
448 break; \
449 case 4: \
450 asm volatile ( \
451 _PRE_EFLAGS("0","3","2") \
452 _op"l %1; " \
453 _POST_EFLAGS("0","3","2") \
454 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
455 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
456 break; \
457 case 8: \
458 __emulate_1op_8byte(_op, _dst, _eflags); \
459 break; \
460 } \
461 } while (0)
463 /* Emulate an instruction with quadword operands (x86/64 only). */
464 #if defined(__x86_64__)
465 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
466 do{ asm volatile ( \
467 _PRE_EFLAGS("0","4","2") \
468 _op"q %"_qx"3,%1; " \
469 _POST_EFLAGS("0","4","2") \
470 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
471 : _qy ((_src).val), "i" (EFLAGS_MASK), \
472 "m" (_eflags), "m" ((_dst).val) ); \
473 } while (0)
474 #define __emulate_1op_8byte(_op, _dst, _eflags) \
475 do{ asm volatile ( \
476 _PRE_EFLAGS("0","3","2") \
477 _op"q %1; " \
478 _POST_EFLAGS("0","3","2") \
479 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
480 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
481 } while (0)
482 #elif defined(__i386__)
483 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
484 #define __emulate_1op_8byte(_op, _dst, _eflags)
485 #endif /* __i386__ */
487 /* Fetch next part of the instruction being emulated. */
488 #define insn_fetch_bytes(_size) \
489 ({ unsigned long _x = 0, _eip = _regs.eip; \
490 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
491 _regs.eip += (_size); /* real hardware doesn't truncate */ \
492 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
493 EXC_GP, 0); \
494 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
495 if ( rc ) goto done; \
496 _x; \
497 })
498 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
500 #define truncate_word(ea, byte_width) \
501 ({ unsigned long __ea = (ea); \
502 unsigned int _width = (byte_width); \
503 ((_width == sizeof(unsigned long)) ? __ea : \
504 (__ea & ((1UL << (_width << 3)) - 1))); \
505 })
506 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
508 #define mode_64bit() (def_ad_bytes == 8)
510 #define fail_if(p) \
511 do { \
512 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
513 if ( rc ) goto done; \
514 } while (0)
516 #define generate_exception_if(p, e, ec) \
517 ({ if ( (p) ) { \
518 fail_if(ops->inject_hw_exception == NULL); \
519 rc = ops->inject_hw_exception(e, ec, ctxt) ? : X86EMUL_EXCEPTION; \
520 goto done; \
521 } \
522 })
524 /*
525 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
526 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
527 */
528 static int even_parity(uint8_t v)
529 {
530 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
531 return v;
532 }
534 /* Update address held in a register, based on addressing mode. */
535 #define _register_address_increment(reg, inc, byte_width) \
536 do { \
537 int _inc = (inc); /* signed type ensures sign extension to long */ \
538 unsigned int _width = (byte_width); \
539 if ( _width == sizeof(unsigned long) ) \
540 (reg) += _inc; \
541 else if ( mode_64bit() ) \
542 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
543 else \
544 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
545 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
546 } while (0)
547 #define register_address_increment(reg, inc) \
548 _register_address_increment((reg), (inc), ad_bytes)
550 #define sp_pre_dec(dec) ({ \
551 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
552 truncate_word(_regs.esp, ctxt->sp_size/8); \
553 })
554 #define sp_post_inc(inc) ({ \
555 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
556 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
557 __esp; \
558 })
560 #define jmp_rel(rel) \
561 do { \
562 int _rel = (int)(rel); \
563 _regs.eip += _rel; \
564 if ( !mode_64bit() ) \
565 _regs.eip = ((op_bytes == 2) \
566 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
567 } while (0)
569 struct fpu_insn_ctxt {
570 uint8_t insn_bytes;
571 uint8_t exn_raised;
572 };
574 static void fpu_handle_exception(void *_fic, struct cpu_user_regs *regs)
575 {
576 struct fpu_insn_ctxt *fic = _fic;
577 fic->exn_raised = 1;
578 regs->eip += fic->insn_bytes;
579 }
581 #define get_fpu(_type, _fic) \
582 do{ (_fic)->exn_raised = 0; \
583 fail_if(ops->get_fpu == NULL); \
584 rc = ops->get_fpu(fpu_handle_exception, _fic, _type, ctxt); \
585 if ( rc ) goto done; \
586 } while (0)
587 #define put_fpu(_fic) \
588 do{ \
589 if ( ops->put_fpu != NULL ) \
590 ops->put_fpu(ctxt); \
591 generate_exception_if((_fic)->exn_raised, EXC_MF, -1); \
592 } while (0)
594 #define emulate_fpu_insn(_op) \
595 do{ struct fpu_insn_ctxt fic; \
596 get_fpu(X86EMUL_FPU_fpu, &fic); \
597 asm volatile ( \
598 "movb $2f-1f,%0 \n" \
599 "1: " _op " \n" \
600 "2: \n" \
601 : "=m" (fic.insn_bytes) : : "memory" ); \
602 put_fpu(&fic); \
603 } while (0)
605 #define emulate_fpu_insn_memdst(_op, _arg) \
606 do{ struct fpu_insn_ctxt fic; \
607 get_fpu(X86EMUL_FPU_fpu, &fic); \
608 asm volatile ( \
609 "movb $2f-1f,%0 \n" \
610 "1: " _op " %1 \n" \
611 "2: \n" \
612 : "=m" (fic.insn_bytes), "=m" (_arg) \
613 : : "memory" ); \
614 put_fpu(&fic); \
615 } while (0)
617 #define emulate_fpu_insn_memsrc(_op, _arg) \
618 do{ struct fpu_insn_ctxt fic; \
619 get_fpu(X86EMUL_FPU_fpu, &fic); \
620 asm volatile ( \
621 "movb $2f-1f,%0 \n" \
622 "1: " _op " %1 \n" \
623 "2: \n" \
624 : "=m" (fic.insn_bytes) \
625 : "m" (_arg) : "memory" ); \
626 put_fpu(&fic); \
627 } while (0)
629 #define emulate_fpu_insn_stub(_bytes...) \
630 do{ uint8_t stub[] = { _bytes, 0xc3 }; \
631 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 }; \
632 get_fpu(X86EMUL_FPU_fpu, &fic); \
633 (*(void(*)(void))stub)(); \
634 put_fpu(&fic); \
635 } while (0)
637 static unsigned long __get_rep_prefix(
638 struct cpu_user_regs *int_regs,
639 struct cpu_user_regs *ext_regs,
640 int ad_bytes)
641 {
642 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
643 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
644 int_regs->ecx);
646 /* Skip the instruction if no repetitions are required. */
647 if ( ecx == 0 )
648 ext_regs->eip = int_regs->eip;
650 return ecx;
651 }
653 #define get_rep_prefix() ({ \
654 unsigned long max_reps = 1; \
655 if ( rep_prefix ) \
656 max_reps = __get_rep_prefix(&_regs, ctxt->regs, ad_bytes); \
657 if ( max_reps == 0 ) \
658 goto done; \
659 max_reps; \
660 })
662 static void __put_rep_prefix(
663 struct cpu_user_regs *int_regs,
664 struct cpu_user_regs *ext_regs,
665 int ad_bytes,
666 unsigned long reps_completed)
667 {
668 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
669 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
670 int_regs->ecx);
672 /* Reduce counter appropriately, and repeat instruction if non-zero. */
673 ecx -= reps_completed;
674 if ( ecx != 0 )
675 int_regs->eip = ext_regs->eip;
677 if ( ad_bytes == 2 )
678 *(uint16_t *)&int_regs->ecx = ecx;
679 else if ( ad_bytes == 4 )
680 int_regs->ecx = (uint32_t)ecx;
681 else
682 int_regs->ecx = ecx;
683 }
685 #define put_rep_prefix(reps_completed) ({ \
686 if ( rep_prefix ) \
687 __put_rep_prefix(&_regs, ctxt->regs, ad_bytes, reps_completed); \
688 })
690 /* Clip maximum repetitions so that the index register only just wraps. */
691 #define truncate_ea_and_reps(ea, reps, bytes_per_rep) ({ \
692 unsigned long __todo = (ctxt->regs->eflags & EFLG_DF) ? (ea) : ~(ea); \
693 __todo = truncate_word(__todo, ad_bytes); \
694 __todo = (__todo / (bytes_per_rep)) + 1; \
695 (reps) = (__todo < (reps)) ? __todo : (reps); \
696 truncate_word((ea), ad_bytes); \
697 })
699 /* Compatibility function: read guest memory, zero-extend result to a ulong. */
700 static int read_ulong(
701 enum x86_segment seg,
702 unsigned long offset,
703 unsigned long *val,
704 unsigned int bytes,
705 struct x86_emulate_ctxt *ctxt,
706 struct x86_emulate_ops *ops)
707 {
708 *val = 0;
709 return ops->read(seg, offset, val, bytes, ctxt);
710 }
712 /*
713 * Unsigned multiplication with double-word result.
714 * IN: Multiplicand=m[0], Multiplier=m[1]
715 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
716 */
717 static int mul_dbl(unsigned long m[2])
718 {
719 int rc;
720 asm ( "mul %4; seto %b2"
721 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
722 : "0" (m[0]), "1" (m[1]), "2" (0) );
723 return rc;
724 }
726 /*
727 * Signed multiplication with double-word result.
728 * IN: Multiplicand=m[0], Multiplier=m[1]
729 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
730 */
731 static int imul_dbl(unsigned long m[2])
732 {
733 int rc;
734 asm ( "imul %4; seto %b2"
735 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
736 : "0" (m[0]), "1" (m[1]), "2" (0) );
737 return rc;
738 }
740 /*
741 * Unsigned division of double-word dividend.
742 * IN: Dividend=u[1]:u[0], Divisor=v
743 * OUT: Return 1: #DE
744 * Return 0: Quotient=u[0], Remainder=u[1]
745 */
746 static int div_dbl(unsigned long u[2], unsigned long v)
747 {
748 if ( (v == 0) || (u[1] >= v) )
749 return 1;
750 asm ( "div %4"
751 : "=a" (u[0]), "=d" (u[1])
752 : "0" (u[0]), "1" (u[1]), "r" (v) );
753 return 0;
754 }
756 /*
757 * Signed division of double-word dividend.
758 * IN: Dividend=u[1]:u[0], Divisor=v
759 * OUT: Return 1: #DE
760 * Return 0: Quotient=u[0], Remainder=u[1]
761 * NB. We don't use idiv directly as it's moderately hard to work out
762 * ahead of time whether it will #DE, which we cannot allow to happen.
763 */
764 static int idiv_dbl(unsigned long u[2], unsigned long v)
765 {
766 int negu = (long)u[1] < 0, negv = (long)v < 0;
768 /* u = abs(u) */
769 if ( negu )
770 {
771 u[1] = ~u[1];
772 if ( (u[0] = -u[0]) == 0 )
773 u[1]++;
774 }
776 /* abs(u) / abs(v) */
777 if ( div_dbl(u, negv ? -v : v) )
778 return 1;
780 /* Remainder has same sign as dividend. It cannot overflow. */
781 if ( negu )
782 u[1] = -u[1];
784 /* Quotient is overflowed if sign bit is set. */
785 if ( negu ^ negv )
786 {
787 if ( (long)u[0] >= 0 )
788 u[0] = -u[0];
789 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
790 return 1;
791 }
792 else if ( (long)u[0] < 0 )
793 return 1;
795 return 0;
796 }
798 static int
799 test_cc(
800 unsigned int condition, unsigned int flags)
801 {
802 int rc = 0;
804 switch ( (condition & 15) >> 1 )
805 {
806 case 0: /* o */
807 rc |= (flags & EFLG_OF);
808 break;
809 case 1: /* b/c/nae */
810 rc |= (flags & EFLG_CF);
811 break;
812 case 2: /* z/e */
813 rc |= (flags & EFLG_ZF);
814 break;
815 case 3: /* be/na */
816 rc |= (flags & (EFLG_CF|EFLG_ZF));
817 break;
818 case 4: /* s */
819 rc |= (flags & EFLG_SF);
820 break;
821 case 5: /* p/pe */
822 rc |= (flags & EFLG_PF);
823 break;
824 case 7: /* le/ng */
825 rc |= (flags & EFLG_ZF);
826 /* fall through */
827 case 6: /* l/nge */
828 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
829 break;
830 }
832 /* Odd condition identifiers (lsb == 1) have inverted sense. */
833 return (!!rc ^ (condition & 1));
834 }
836 static int
837 get_cpl(
838 struct x86_emulate_ctxt *ctxt,
839 struct x86_emulate_ops *ops)
840 {
841 struct segment_register reg;
843 if ( ctxt->regs->eflags & EFLG_VM )
844 return 3;
846 if ( (ops->read_segment == NULL) ||
847 ops->read_segment(x86_seg_ss, &reg, ctxt) )
848 return -1;
850 return reg.attr.fields.dpl;
851 }
853 static int
854 _mode_iopl(
855 struct x86_emulate_ctxt *ctxt,
856 struct x86_emulate_ops *ops)
857 {
858 int cpl = get_cpl(ctxt, ops);
859 if ( cpl == -1 )
860 return -1;
861 return (cpl <= ((ctxt->regs->eflags >> 12) & 3));
862 }
864 #define mode_ring0() ({ \
865 int _cpl = get_cpl(ctxt, ops); \
866 fail_if(_cpl < 0); \
867 (_cpl == 0); \
868 })
869 #define mode_iopl() ({ \
870 int _iopl = _mode_iopl(ctxt, ops); \
871 fail_if(_iopl < 0); \
872 _iopl; \
873 })
875 static int ioport_access_check(
876 unsigned int first_port,
877 unsigned int bytes,
878 struct x86_emulate_ctxt *ctxt,
879 struct x86_emulate_ops *ops)
880 {
881 unsigned long iobmp;
882 struct segment_register tr;
883 int rc = X86EMUL_OKAY;
885 if ( !(ctxt->regs->eflags & EFLG_VM) && mode_iopl() )
886 return X86EMUL_OKAY;
888 fail_if(ops->read_segment == NULL);
889 if ( (rc = ops->read_segment(x86_seg_tr, &tr, ctxt)) != 0 )
890 return rc;
892 /* Ensure that the TSS is valid and has an io-bitmap-offset field. */
893 if ( !tr.attr.fields.p ||
894 ((tr.attr.fields.type & 0xd) != 0x9) ||
895 (tr.limit < 0x67) )
896 goto raise_exception;
898 if ( (rc = read_ulong(x86_seg_none, tr.base + 0x66,
899 &iobmp, 2, ctxt, ops)) )
900 return rc;
902 /* Ensure TSS includes two bytes including byte containing first port. */
903 iobmp += first_port / 8;
904 if ( tr.limit <= iobmp )
905 goto raise_exception;
907 if ( (rc = read_ulong(x86_seg_none, tr.base + iobmp,
908 &iobmp, 2, ctxt, ops)) )
909 return rc;
910 if ( (iobmp & (((1<<bytes)-1) << (first_port&7))) != 0 )
911 goto raise_exception;
913 done:
914 return rc;
916 raise_exception:
917 fail_if(ops->inject_hw_exception == NULL);
918 return ops->inject_hw_exception(EXC_GP, 0, ctxt) ? : X86EMUL_EXCEPTION;
919 }
921 static int
922 in_realmode(
923 struct x86_emulate_ctxt *ctxt,
924 struct x86_emulate_ops *ops)
925 {
926 unsigned long cr0;
927 int rc;
929 if ( ops->read_cr == NULL )
930 return 0;
932 rc = ops->read_cr(0, &cr0, ctxt);
933 return (!rc && !(cr0 & CR0_PE));
934 }
936 static int
937 in_protmode(
938 struct x86_emulate_ctxt *ctxt,
939 struct x86_emulate_ops *ops)
940 {
941 return !(in_realmode(ctxt, ops) || (ctxt->regs->eflags & EFLG_VM));
942 }
944 static int
945 realmode_load_seg(
946 enum x86_segment seg,
947 uint16_t sel,
948 struct x86_emulate_ctxt *ctxt,
949 struct x86_emulate_ops *ops)
950 {
951 struct segment_register reg;
952 int rc;
954 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
955 return rc;
957 reg.sel = sel;
958 reg.base = (uint32_t)sel << 4;
960 return ops->write_segment(seg, &reg, ctxt);
961 }
963 static int
964 protmode_load_seg(
965 enum x86_segment seg,
966 uint16_t sel,
967 struct x86_emulate_ctxt *ctxt,
968 struct x86_emulate_ops *ops)
969 {
970 struct segment_register desctab, ss, segr;
971 struct { uint32_t a, b; } desc;
972 unsigned long val;
973 uint8_t dpl, rpl, cpl;
974 uint32_t new_desc_b, a_flag = 0x100;
975 int rc, fault_type = EXC_GP;
977 /* NULL selector? */
978 if ( (sel & 0xfffc) == 0 )
979 {
980 if ( (seg == x86_seg_cs) || (seg == x86_seg_ss) )
981 goto raise_exn;
982 memset(&segr, 0, sizeof(segr));
983 return ops->write_segment(seg, &segr, ctxt);
984 }
986 /* System segment descriptors must reside in the GDT. */
987 if ( !is_x86_user_segment(seg) && (sel & 4) )
988 goto raise_exn;
990 if ( (rc = ops->read_segment(x86_seg_ss, &ss, ctxt)) ||
991 (rc = ops->read_segment((sel & 4) ? x86_seg_ldtr : x86_seg_gdtr,
992 &desctab, ctxt)) )
993 return rc;
995 /* Check against descriptor table limit. */
996 if ( ((sel & 0xfff8) + 7) > desctab.limit )
997 goto raise_exn;
999 do {
1000 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8),
1001 &val, 4, ctxt, ops)) )
1002 return rc;
1003 desc.a = val;
1004 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1005 &val, 4, ctxt, ops)) )
1006 return rc;
1007 desc.b = val;
1009 /* Segment present in memory? */
1010 if ( !(desc.b & (1u<<15)) )
1012 fault_type = EXC_NP;
1013 goto raise_exn;
1016 /* System segments must have the system flag (S) set. */
1017 if ( (desc.b & (1u<<12)) == (!is_x86_user_segment(seg) << 12) )
1018 goto raise_exn;
1020 dpl = (desc.b >> 13) & 3;
1021 rpl = sel & 3;
1022 cpl = ss.attr.fields.dpl;
1024 switch ( seg )
1026 case x86_seg_cs:
1027 /* Code segment? */
1028 if ( !(desc.b & (1u<<11)) )
1029 goto raise_exn;
1030 /* Non-conforming segment: check DPL against RPL. */
1031 if ( ((desc.b & (6u<<9)) != (6u<<9)) && (dpl != rpl) )
1032 goto raise_exn;
1033 break;
1034 case x86_seg_ss:
1035 /* Writable data segment? */
1036 if ( (desc.b & (5u<<9)) != (1u<<9) )
1037 goto raise_exn;
1038 if ( (dpl != cpl) || (dpl != rpl) )
1039 goto raise_exn;
1040 break;
1041 case x86_seg_ldtr:
1042 /* LDT system segment? */
1043 if ( (desc.b & (15u<<8)) != (2u<<8) )
1044 goto raise_exn;
1045 goto skip_accessed_flag;
1046 case x86_seg_tr:
1047 /* Available TSS system segment? */
1048 if ( (desc.b & (15u<<8)) != (9u<<8) )
1049 goto raise_exn;
1050 a_flag = 0x200; /* busy flag */
1051 break;
1052 default:
1053 /* Readable code or data segment? */
1054 if ( (desc.b & (5u<<9)) == (4u<<9) )
1055 goto raise_exn;
1056 /* Non-conforming segment: check DPL against RPL and CPL. */
1057 if ( ((desc.b & (6u<<9)) != (6u<<9)) &&
1058 ((dpl < cpl) || (dpl < rpl)) )
1059 goto raise_exn;
1060 break;
1063 /* Ensure Accessed flag is set. */
1064 new_desc_b = desc.b | a_flag;
1065 rc = ((desc.b & a_flag) ? X86EMUL_OKAY :
1066 ops->cmpxchg(
1067 x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1068 &desc.b, &new_desc_b, 4, ctxt));
1069 } while ( rc == X86EMUL_CMPXCHG_FAILED );
1071 if ( rc )
1072 return rc;
1074 /* Force the Accessed flag in our local copy. */
1075 desc.b |= a_flag;
1077 skip_accessed_flag:
1078 segr.base = (((desc.b << 0) & 0xff000000u) |
1079 ((desc.b << 16) & 0x00ff0000u) |
1080 ((desc.a >> 16) & 0x0000ffffu));
1081 segr.attr.bytes = (((desc.b >> 8) & 0x00ffu) |
1082 ((desc.b >> 12) & 0x0f00u));
1083 segr.limit = (desc.b & 0x000f0000u) | (desc.a & 0x0000ffffu);
1084 if ( segr.attr.fields.g )
1085 segr.limit = (segr.limit << 12) | 0xfffu;
1086 segr.sel = sel;
1087 return ops->write_segment(seg, &segr, ctxt);
1089 raise_exn:
1090 if ( ops->inject_hw_exception == NULL )
1091 return X86EMUL_UNHANDLEABLE;
1092 if ( (rc = ops->inject_hw_exception(fault_type, sel & 0xfffc, ctxt)) )
1093 return rc;
1094 return X86EMUL_EXCEPTION;
1097 static int
1098 load_seg(
1099 enum x86_segment seg,
1100 uint16_t sel,
1101 struct x86_emulate_ctxt *ctxt,
1102 struct x86_emulate_ops *ops)
1104 if ( (ops->read_segment == NULL) ||
1105 (ops->write_segment == NULL) )
1106 return X86EMUL_UNHANDLEABLE;
1108 if ( in_protmode(ctxt, ops) )
1109 return protmode_load_seg(seg, sel, ctxt, ops);
1111 return realmode_load_seg(seg, sel, ctxt, ops);
1114 void *
1115 decode_register(
1116 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
1118 void *p;
1120 switch ( modrm_reg )
1122 case 0: p = &regs->eax; break;
1123 case 1: p = &regs->ecx; break;
1124 case 2: p = &regs->edx; break;
1125 case 3: p = &regs->ebx; break;
1126 case 4: p = (highbyte_regs ?
1127 ((unsigned char *)&regs->eax + 1) :
1128 (unsigned char *)&regs->esp); break;
1129 case 5: p = (highbyte_regs ?
1130 ((unsigned char *)&regs->ecx + 1) :
1131 (unsigned char *)&regs->ebp); break;
1132 case 6: p = (highbyte_regs ?
1133 ((unsigned char *)&regs->edx + 1) :
1134 (unsigned char *)&regs->esi); break;
1135 case 7: p = (highbyte_regs ?
1136 ((unsigned char *)&regs->ebx + 1) :
1137 (unsigned char *)&regs->edi); break;
1138 #if defined(__x86_64__)
1139 case 8: p = &regs->r8; break;
1140 case 9: p = &regs->r9; break;
1141 case 10: p = &regs->r10; break;
1142 case 11: p = &regs->r11; break;
1143 case 12: p = &regs->r12; break;
1144 case 13: p = &regs->r13; break;
1145 case 14: p = &regs->r14; break;
1146 case 15: p = &regs->r15; break;
1147 #endif
1148 default: p = NULL; break;
1151 return p;
1154 #define decode_segment_failed x86_seg_tr
1155 enum x86_segment
1156 decode_segment(
1157 uint8_t modrm_reg)
1159 switch ( modrm_reg )
1161 case 0: return x86_seg_es;
1162 case 1: return x86_seg_cs;
1163 case 2: return x86_seg_ss;
1164 case 3: return x86_seg_ds;
1165 case 4: return x86_seg_fs;
1166 case 5: return x86_seg_gs;
1167 default: break;
1169 return decode_segment_failed;
1172 int
1173 x86_emulate(
1174 struct x86_emulate_ctxt *ctxt,
1175 struct x86_emulate_ops *ops)
1177 /* Shadow copy of register state. Committed on successful emulation. */
1178 struct cpu_user_regs _regs = *ctxt->regs;
1180 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
1181 uint8_t modrm = 0, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
1182 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
1183 #define REPE_PREFIX 1
1184 #define REPNE_PREFIX 2
1185 unsigned int lock_prefix = 0, rep_prefix = 0;
1186 int override_seg = -1, rc = X86EMUL_OKAY;
1187 struct operand src, dst;
1189 /*
1190 * Data operand effective address (usually computed from ModRM).
1191 * Default is a memory operand relative to segment DS.
1192 */
1193 struct operand ea = { .type = OP_MEM };
1194 ea.mem.seg = x86_seg_ds; /* gcc may reject anon union initializer */
1196 ctxt->retire.byte = 0;
1198 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
1199 if ( op_bytes == 8 )
1201 op_bytes = def_op_bytes = 4;
1202 #ifndef __x86_64__
1203 return X86EMUL_UNHANDLEABLE;
1204 #endif
1207 /* Prefix bytes. */
1208 for ( ; ; )
1210 switch ( b = insn_fetch_type(uint8_t) )
1212 case 0x66: /* operand-size override */
1213 op_bytes = def_op_bytes ^ 6;
1214 break;
1215 case 0x67: /* address-size override */
1216 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
1217 break;
1218 case 0x2e: /* CS override */
1219 override_seg = x86_seg_cs;
1220 break;
1221 case 0x3e: /* DS override */
1222 override_seg = x86_seg_ds;
1223 break;
1224 case 0x26: /* ES override */
1225 override_seg = x86_seg_es;
1226 break;
1227 case 0x64: /* FS override */
1228 override_seg = x86_seg_fs;
1229 break;
1230 case 0x65: /* GS override */
1231 override_seg = x86_seg_gs;
1232 break;
1233 case 0x36: /* SS override */
1234 override_seg = x86_seg_ss;
1235 break;
1236 case 0xf0: /* LOCK */
1237 lock_prefix = 1;
1238 break;
1239 case 0xf2: /* REPNE/REPNZ */
1240 rep_prefix = REPNE_PREFIX;
1241 break;
1242 case 0xf3: /* REP/REPE/REPZ */
1243 rep_prefix = REPE_PREFIX;
1244 break;
1245 case 0x40 ... 0x4f: /* REX */
1246 if ( !mode_64bit() )
1247 goto done_prefixes;
1248 rex_prefix = b;
1249 continue;
1250 default:
1251 goto done_prefixes;
1254 /* Any legacy prefix after a REX prefix nullifies its effect. */
1255 rex_prefix = 0;
1257 done_prefixes:
1259 if ( rex_prefix & 8 ) /* REX.W */
1260 op_bytes = 8;
1262 /* Opcode byte(s). */
1263 d = opcode_table[b];
1264 if ( d == 0 )
1266 /* Two-byte opcode? */
1267 if ( b == 0x0f )
1269 twobyte = 1;
1270 b = insn_fetch_type(uint8_t);
1271 d = twobyte_table[b];
1274 /* Unrecognised? */
1275 if ( d == 0 )
1276 goto cannot_emulate;
1279 /* Lock prefix is allowed only on RMW instructions. */
1280 generate_exception_if((d & Mov) && lock_prefix, EXC_GP, 0);
1282 /* ModRM and SIB bytes. */
1283 if ( d & ModRM )
1285 modrm = insn_fetch_type(uint8_t);
1286 modrm_mod = (modrm & 0xc0) >> 6;
1287 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
1288 modrm_rm = modrm & 0x07;
1290 if ( modrm_mod == 3 )
1292 modrm_rm |= (rex_prefix & 1) << 3;
1293 ea.type = OP_REG;
1294 ea.reg = decode_register(
1295 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
1297 else if ( ad_bytes == 2 )
1299 /* 16-bit ModR/M decode. */
1300 switch ( modrm_rm )
1302 case 0:
1303 ea.mem.off = _regs.ebx + _regs.esi;
1304 break;
1305 case 1:
1306 ea.mem.off = _regs.ebx + _regs.edi;
1307 break;
1308 case 2:
1309 ea.mem.seg = x86_seg_ss;
1310 ea.mem.off = _regs.ebp + _regs.esi;
1311 break;
1312 case 3:
1313 ea.mem.seg = x86_seg_ss;
1314 ea.mem.off = _regs.ebp + _regs.edi;
1315 break;
1316 case 4:
1317 ea.mem.off = _regs.esi;
1318 break;
1319 case 5:
1320 ea.mem.off = _regs.edi;
1321 break;
1322 case 6:
1323 if ( modrm_mod == 0 )
1324 break;
1325 ea.mem.seg = x86_seg_ss;
1326 ea.mem.off = _regs.ebp;
1327 break;
1328 case 7:
1329 ea.mem.off = _regs.ebx;
1330 break;
1332 switch ( modrm_mod )
1334 case 0:
1335 if ( modrm_rm == 6 )
1336 ea.mem.off = insn_fetch_type(int16_t);
1337 break;
1338 case 1:
1339 ea.mem.off += insn_fetch_type(int8_t);
1340 break;
1341 case 2:
1342 ea.mem.off += insn_fetch_type(int16_t);
1343 break;
1345 ea.mem.off = truncate_ea(ea.mem.off);
1347 else
1349 /* 32/64-bit ModR/M decode. */
1350 if ( modrm_rm == 4 )
1352 sib = insn_fetch_type(uint8_t);
1353 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1354 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1355 if ( sib_index != 4 )
1356 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1357 ea.mem.off <<= (sib >> 6) & 3;
1358 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1359 ea.mem.off += insn_fetch_type(int32_t);
1360 else if ( sib_base == 4 )
1362 ea.mem.seg = x86_seg_ss;
1363 ea.mem.off += _regs.esp;
1364 if ( !twobyte && (b == 0x8f) )
1365 /* POP <rm> computes its EA post increment. */
1366 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1367 ? 8 : op_bytes);
1369 else if ( sib_base == 5 )
1371 ea.mem.seg = x86_seg_ss;
1372 ea.mem.off += _regs.ebp;
1374 else
1375 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1377 else
1379 modrm_rm |= (rex_prefix & 1) << 3;
1380 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1381 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1382 ea.mem.seg = x86_seg_ss;
1384 switch ( modrm_mod )
1386 case 0:
1387 if ( (modrm_rm & 7) != 5 )
1388 break;
1389 ea.mem.off = insn_fetch_type(int32_t);
1390 if ( !mode_64bit() )
1391 break;
1392 /* Relative to RIP of next instruction. Argh! */
1393 ea.mem.off += _regs.eip;
1394 if ( (d & SrcMask) == SrcImm )
1395 ea.mem.off += (d & ByteOp) ? 1 :
1396 ((op_bytes == 8) ? 4 : op_bytes);
1397 else if ( (d & SrcMask) == SrcImmByte )
1398 ea.mem.off += 1;
1399 else if ( !twobyte && ((b & 0xfe) == 0xf6) &&
1400 ((modrm_reg & 7) <= 1) )
1401 /* Special case in Grp3: test has immediate operand. */
1402 ea.mem.off += (d & ByteOp) ? 1
1403 : ((op_bytes == 8) ? 4 : op_bytes);
1404 else if ( twobyte && ((b & 0xf7) == 0xa4) )
1405 /* SHLD/SHRD with immediate byte third operand. */
1406 ea.mem.off++;
1407 break;
1408 case 1:
1409 ea.mem.off += insn_fetch_type(int8_t);
1410 break;
1411 case 2:
1412 ea.mem.off += insn_fetch_type(int32_t);
1413 break;
1415 ea.mem.off = truncate_ea(ea.mem.off);
1419 if ( override_seg != -1 )
1420 ea.mem.seg = override_seg;
1422 /* Decode and fetch the source operand: register, memory or immediate. */
1423 switch ( d & SrcMask )
1425 case SrcNone: /* case SrcImplicit: */
1426 src.type = OP_NONE;
1427 break;
1428 case SrcReg:
1429 src.type = OP_REG;
1430 if ( d & ByteOp )
1432 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1433 src.val = *(uint8_t *)src.reg;
1434 src.bytes = 1;
1436 else
1438 src.reg = decode_register(modrm_reg, &_regs, 0);
1439 switch ( (src.bytes = op_bytes) )
1441 case 2: src.val = *(uint16_t *)src.reg; break;
1442 case 4: src.val = *(uint32_t *)src.reg; break;
1443 case 8: src.val = *(uint64_t *)src.reg; break;
1446 break;
1447 case SrcMem16:
1448 ea.bytes = 2;
1449 goto srcmem_common;
1450 case SrcMem:
1451 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1452 srcmem_common:
1453 src = ea;
1454 if ( src.type == OP_REG )
1456 switch ( src.bytes )
1458 case 1: src.val = *(uint8_t *)src.reg; break;
1459 case 2: src.val = *(uint16_t *)src.reg; break;
1460 case 4: src.val = *(uint32_t *)src.reg; break;
1461 case 8: src.val = *(uint64_t *)src.reg; break;
1464 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1465 &src.val, src.bytes, ctxt, ops)) )
1466 goto done;
1467 break;
1468 case SrcImm:
1469 src.type = OP_IMM;
1470 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1471 if ( src.bytes == 8 ) src.bytes = 4;
1472 /* NB. Immediates are sign-extended as necessary. */
1473 switch ( src.bytes )
1475 case 1: src.val = insn_fetch_type(int8_t); break;
1476 case 2: src.val = insn_fetch_type(int16_t); break;
1477 case 4: src.val = insn_fetch_type(int32_t); break;
1479 break;
1480 case SrcImmByte:
1481 src.type = OP_IMM;
1482 src.bytes = 1;
1483 src.val = insn_fetch_type(int8_t);
1484 break;
1487 /* Decode and fetch the destination operand: register or memory. */
1488 switch ( d & DstMask )
1490 case DstNone: /* case DstImplicit: */
1491 /*
1492 * The only implicit-operands instructions allowed a LOCK prefix are
1493 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1494 */
1495 generate_exception_if(
1496 lock_prefix &&
1497 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1498 (b != 0xc7), /* CMPXCHG{8,16}B */
1499 EXC_GP, 0);
1500 dst.type = OP_NONE;
1501 break;
1503 case DstReg:
1504 generate_exception_if(lock_prefix, EXC_GP, 0);
1505 dst.type = OP_REG;
1506 if ( d & ByteOp )
1508 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1509 dst.val = *(uint8_t *)dst.reg;
1510 dst.bytes = 1;
1512 else
1514 dst.reg = decode_register(modrm_reg, &_regs, 0);
1515 switch ( (dst.bytes = op_bytes) )
1517 case 2: dst.val = *(uint16_t *)dst.reg; break;
1518 case 4: dst.val = *(uint32_t *)dst.reg; break;
1519 case 8: dst.val = *(uint64_t *)dst.reg; break;
1522 break;
1523 case DstBitBase:
1524 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1526 src.val &= (op_bytes << 3) - 1;
1528 else
1530 /*
1531 * EA += BitOffset DIV op_bytes*8
1532 * BitOffset = BitOffset MOD op_bytes*8
1533 * DIV truncates towards negative infinity.
1534 * MOD always produces a positive result.
1535 */
1536 if ( op_bytes == 2 )
1537 src.val = (int16_t)src.val;
1538 else if ( op_bytes == 4 )
1539 src.val = (int32_t)src.val;
1540 if ( (long)src.val < 0 )
1542 unsigned long byte_offset;
1543 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1544 ea.mem.off -= byte_offset;
1545 src.val = (byte_offset << 3) + src.val;
1547 else
1549 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1550 src.val &= (op_bytes << 3) - 1;
1553 /* Becomes a normal DstMem operation from here on. */
1554 d = (d & ~DstMask) | DstMem;
1555 case DstMem:
1556 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1557 dst = ea;
1558 if ( dst.type == OP_REG )
1560 generate_exception_if(lock_prefix, EXC_GP, 0);
1561 switch ( dst.bytes )
1563 case 1: dst.val = *(uint8_t *)dst.reg; break;
1564 case 2: dst.val = *(uint16_t *)dst.reg; break;
1565 case 4: dst.val = *(uint32_t *)dst.reg; break;
1566 case 8: dst.val = *(uint64_t *)dst.reg; break;
1569 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1571 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
1572 &dst.val, dst.bytes, ctxt, ops)) )
1573 goto done;
1574 dst.orig_val = dst.val;
1576 break;
1579 if ( twobyte )
1580 goto twobyte_insn;
1582 switch ( b )
1584 case 0x00 ... 0x05: add: /* add */
1585 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1586 break;
1588 case 0x08 ... 0x0d: or: /* or */
1589 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1590 break;
1592 case 0x10 ... 0x15: adc: /* adc */
1593 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1594 break;
1596 case 0x18 ... 0x1d: sbb: /* sbb */
1597 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1598 break;
1600 case 0x20 ... 0x25: and: /* and */
1601 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1602 break;
1604 case 0x28 ... 0x2d: sub: /* sub */
1605 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1606 break;
1608 case 0x30 ... 0x35: xor: /* xor */
1609 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1610 break;
1612 case 0x38 ... 0x3d: cmp: /* cmp */
1613 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1614 dst.type = OP_NONE;
1615 break;
1617 case 0x06: /* push %%es */ {
1618 struct segment_register reg;
1619 src.val = x86_seg_es;
1620 push_seg:
1621 fail_if(ops->read_segment == NULL);
1622 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
1623 return rc;
1624 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1625 if ( mode_64bit() && (op_bytes == 4) )
1626 op_bytes = 8;
1627 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1628 &reg.sel, op_bytes, ctxt)) != 0 )
1629 goto done;
1630 break;
1633 case 0x07: /* pop %%es */
1634 src.val = x86_seg_es;
1635 pop_seg:
1636 fail_if(ops->write_segment == NULL);
1637 /* 64-bit mode: POP defaults to a 64-bit operand. */
1638 if ( mode_64bit() && (op_bytes == 4) )
1639 op_bytes = 8;
1640 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
1641 &dst.val, op_bytes, ctxt, ops)) != 0 )
1642 goto done;
1643 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
1644 return rc;
1645 break;
1647 case 0x0e: /* push %%cs */
1648 src.val = x86_seg_cs;
1649 goto push_seg;
1651 case 0x16: /* push %%ss */
1652 src.val = x86_seg_ss;
1653 goto push_seg;
1655 case 0x17: /* pop %%ss */
1656 src.val = x86_seg_ss;
1657 ctxt->retire.flags.mov_ss = 1;
1658 goto pop_seg;
1660 case 0x1e: /* push %%ds */
1661 src.val = x86_seg_ds;
1662 goto push_seg;
1664 case 0x1f: /* pop %%ds */
1665 src.val = x86_seg_ds;
1666 goto pop_seg;
1668 case 0x27: /* daa */ {
1669 uint8_t al = _regs.eax;
1670 unsigned long eflags = _regs.eflags;
1671 generate_exception_if(mode_64bit(), EXC_UD, -1);
1672 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1673 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1675 *(uint8_t *)&_regs.eax += 6;
1676 _regs.eflags |= EFLG_AF;
1678 if ( (al > 0x99) || (eflags & EFLG_CF) )
1680 *(uint8_t *)&_regs.eax += 0x60;
1681 _regs.eflags |= EFLG_CF;
1683 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1684 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1685 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1686 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1687 break;
1690 case 0x2f: /* das */ {
1691 uint8_t al = _regs.eax;
1692 unsigned long eflags = _regs.eflags;
1693 generate_exception_if(mode_64bit(), EXC_UD, -1);
1694 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1695 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1697 _regs.eflags |= EFLG_AF;
1698 if ( (al < 6) || (eflags & EFLG_CF) )
1699 _regs.eflags |= EFLG_CF;
1700 *(uint8_t *)&_regs.eax -= 6;
1702 if ( (al > 0x99) || (eflags & EFLG_CF) )
1704 *(uint8_t *)&_regs.eax -= 0x60;
1705 _regs.eflags |= EFLG_CF;
1707 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1708 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1709 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1710 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1711 break;
1714 case 0x37: /* aaa */
1715 case 0x3f: /* aas */
1716 generate_exception_if(mode_64bit(), EXC_UD, -1);
1717 _regs.eflags &= ~EFLG_CF;
1718 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1720 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1721 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1722 _regs.eflags |= EFLG_CF | EFLG_AF;
1724 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1725 break;
1727 case 0x40 ... 0x4f: /* inc/dec reg */
1728 dst.type = OP_REG;
1729 dst.reg = decode_register(b & 7, &_regs, 0);
1730 dst.bytes = op_bytes;
1731 dst.val = *dst.reg;
1732 if ( b & 8 )
1733 emulate_1op("dec", dst, _regs.eflags);
1734 else
1735 emulate_1op("inc", dst, _regs.eflags);
1736 break;
1738 case 0x50 ... 0x57: /* push reg */
1739 src.val = *(unsigned long *)decode_register(
1740 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1741 goto push;
1743 case 0x58 ... 0x5f: /* pop reg */
1744 dst.type = OP_REG;
1745 dst.reg = decode_register(
1746 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1747 dst.bytes = op_bytes;
1748 if ( mode_64bit() && (dst.bytes == 4) )
1749 dst.bytes = 8;
1750 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
1751 &dst.val, dst.bytes, ctxt, ops)) != 0 )
1752 goto done;
1753 break;
1755 case 0x60: /* pusha */ {
1756 int i;
1757 unsigned long regs[] = {
1758 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1759 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1760 generate_exception_if(mode_64bit(), EXC_UD, -1);
1761 for ( i = 0; i < 8; i++ )
1762 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1763 &regs[i], op_bytes, ctxt)) != 0 )
1764 goto done;
1765 break;
1768 case 0x61: /* popa */ {
1769 int i;
1770 unsigned long dummy_esp, *regs[] = {
1771 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
1772 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
1773 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
1774 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
1775 generate_exception_if(mode_64bit(), EXC_UD, -1);
1776 for ( i = 0; i < 8; i++ )
1778 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
1779 &dst.val, op_bytes, ctxt, ops)) != 0 )
1780 goto done;
1781 switch ( op_bytes )
1783 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
1784 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
1785 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
1786 case 8: *regs[i] = dst.val; break;
1789 break;
1792 case 0x62: /* bound */ {
1793 unsigned long src_val2;
1794 int lb, ub, idx;
1795 generate_exception_if(mode_64bit() || (src.type != OP_MEM),
1796 EXC_UD, -1);
1797 if ( (rc = read_ulong(src.mem.seg, src.mem.off + op_bytes,
1798 &src_val2, op_bytes, ctxt, ops)) )
1799 goto done;
1800 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1801 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1802 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1803 generate_exception_if((idx < lb) || (idx > ub), EXC_BR, -1);
1804 dst.type = OP_NONE;
1805 break;
1808 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1809 if ( mode_64bit() )
1811 /* movsxd */
1812 if ( src.type == OP_REG )
1813 src.val = *(int32_t *)src.reg;
1814 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1815 &src.val, 4, ctxt, ops)) )
1816 goto done;
1817 dst.val = (int32_t)src.val;
1819 else
1821 /* arpl */
1822 uint16_t src_val = dst.val;
1823 dst = src;
1824 _regs.eflags &= ~EFLG_ZF;
1825 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1826 if ( _regs.eflags & EFLG_ZF )
1827 dst.val = (dst.val & ~3) | (src_val & 3);
1828 else
1829 dst.type = OP_NONE;
1830 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
1832 break;
1834 case 0x68: /* push imm{16,32,64} */
1835 src.val = ((op_bytes == 2)
1836 ? (int32_t)insn_fetch_type(int16_t)
1837 : insn_fetch_type(int32_t));
1838 goto push;
1840 case 0x69: /* imul imm16/32 */
1841 case 0x6b: /* imul imm8 */ {
1842 unsigned long src1; /* ModR/M source operand */
1843 if ( ea.type == OP_REG )
1844 src1 = *ea.reg;
1845 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
1846 &src1, op_bytes, ctxt, ops)) )
1847 goto done;
1848 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1849 switch ( dst.bytes )
1851 case 2:
1852 dst.val = ((uint32_t)(int16_t)src.val *
1853 (uint32_t)(int16_t)src1);
1854 if ( (int16_t)dst.val != (uint32_t)dst.val )
1855 _regs.eflags |= EFLG_OF|EFLG_CF;
1856 break;
1857 #ifdef __x86_64__
1858 case 4:
1859 dst.val = ((uint64_t)(int32_t)src.val *
1860 (uint64_t)(int32_t)src1);
1861 if ( (int32_t)dst.val != dst.val )
1862 _regs.eflags |= EFLG_OF|EFLG_CF;
1863 break;
1864 #endif
1865 default: {
1866 unsigned long m[2] = { src.val, src1 };
1867 if ( imul_dbl(m) )
1868 _regs.eflags |= EFLG_OF|EFLG_CF;
1869 dst.val = m[0];
1870 break;
1873 break;
1876 case 0x6a: /* push imm8 */
1877 src.val = insn_fetch_type(int8_t);
1878 push:
1879 d |= Mov; /* force writeback */
1880 dst.type = OP_MEM;
1881 dst.bytes = op_bytes;
1882 if ( mode_64bit() && (dst.bytes == 4) )
1883 dst.bytes = 8;
1884 dst.val = src.val;
1885 dst.mem.seg = x86_seg_ss;
1886 dst.mem.off = sp_pre_dec(dst.bytes);
1887 break;
1889 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */ {
1890 unsigned long nr_reps = get_rep_prefix();
1891 unsigned int port = (uint16_t)_regs.edx;
1892 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1893 dst.mem.seg = x86_seg_es;
1894 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
1895 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
1896 goto done;
1897 if ( (nr_reps > 1) && (ops->rep_ins != NULL) &&
1898 ((rc = ops->rep_ins(port, dst.mem.seg, dst.mem.off, dst.bytes,
1899 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
1901 if ( rc != 0 )
1902 goto done;
1904 else
1906 fail_if(ops->read_io == NULL);
1907 if ( (rc = ops->read_io(port, dst.bytes, &dst.val, ctxt)) != 0 )
1908 goto done;
1909 dst.type = OP_MEM;
1910 nr_reps = 1;
1912 register_address_increment(
1913 _regs.edi,
1914 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
1915 put_rep_prefix(nr_reps);
1916 break;
1919 case 0x6e ... 0x6f: /* outs %esi,%dx */ {
1920 unsigned long nr_reps = get_rep_prefix();
1921 unsigned int port = (uint16_t)_regs.edx;
1922 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1923 ea.mem.off = truncate_ea_and_reps(_regs.esi, nr_reps, dst.bytes);
1924 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
1925 goto done;
1926 if ( (nr_reps > 1) && (ops->rep_outs != NULL) &&
1927 ((rc = ops->rep_outs(ea.mem.seg, ea.mem.off, port, dst.bytes,
1928 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
1930 if ( rc != 0 )
1931 goto done;
1933 else
1935 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
1936 &dst.val, dst.bytes, ctxt, ops)) != 0 )
1937 goto done;
1938 fail_if(ops->write_io == NULL);
1939 if ( (rc = ops->write_io(port, dst.bytes, dst.val, ctxt)) != 0 )
1940 goto done;
1941 nr_reps = 1;
1943 register_address_increment(
1944 _regs.esi,
1945 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
1946 put_rep_prefix(nr_reps);
1947 break;
1950 case 0x70 ... 0x7f: /* jcc (short) */ {
1951 int rel = insn_fetch_type(int8_t);
1952 if ( test_cc(b, _regs.eflags) )
1953 jmp_rel(rel);
1954 break;
1957 case 0x82: /* Grp1 (x86/32 only) */
1958 generate_exception_if(mode_64bit(), EXC_UD, -1);
1959 case 0x80: case 0x81: case 0x83: /* Grp1 */
1960 switch ( modrm_reg & 7 )
1962 case 0: goto add;
1963 case 1: goto or;
1964 case 2: goto adc;
1965 case 3: goto sbb;
1966 case 4: goto and;
1967 case 5: goto sub;
1968 case 6: goto xor;
1969 case 7: goto cmp;
1971 break;
1973 case 0xa8 ... 0xa9: /* test imm,%%eax */
1974 case 0x84 ... 0x85: test: /* test */
1975 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1976 dst.type = OP_NONE;
1977 break;
1979 case 0x86 ... 0x87: xchg: /* xchg */
1980 /* Write back the register source. */
1981 switch ( dst.bytes )
1983 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1984 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1985 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1986 case 8: *src.reg = dst.val; break;
1988 /* Write back the memory destination with implicit LOCK prefix. */
1989 dst.val = src.val;
1990 lock_prefix = 1;
1991 break;
1993 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1994 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
1995 case 0x88 ... 0x8b: /* mov */
1996 dst.val = src.val;
1997 break;
1999 case 0x8c: /* mov Sreg,r/m */ {
2000 struct segment_register reg;
2001 enum x86_segment seg = decode_segment(modrm_reg);
2002 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
2003 fail_if(ops->read_segment == NULL);
2004 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
2005 goto done;
2006 dst.val = reg.sel;
2007 if ( dst.type == OP_MEM )
2008 dst.bytes = 2;
2009 break;
2012 case 0x8e: /* mov r/m,Sreg */ {
2013 enum x86_segment seg = decode_segment(modrm_reg);
2014 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
2015 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
2016 goto done;
2017 if ( seg == x86_seg_ss )
2018 ctxt->retire.flags.mov_ss = 1;
2019 dst.type = OP_NONE;
2020 break;
2023 case 0x8d: /* lea */
2024 dst.val = ea.mem.off;
2025 break;
2027 case 0x8f: /* pop (sole member of Grp1a) */
2028 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
2029 /* 64-bit mode: POP defaults to a 64-bit operand. */
2030 if ( mode_64bit() && (dst.bytes == 4) )
2031 dst.bytes = 8;
2032 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2033 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2034 goto done;
2035 break;
2037 case 0x90: /* nop / xchg %%r8,%%rax */
2038 if ( !(rex_prefix & 1) )
2039 break; /* nop */
2041 case 0x91 ... 0x97: /* xchg reg,%%rax */
2042 src.type = dst.type = OP_REG;
2043 src.bytes = dst.bytes = op_bytes;
2044 src.reg = (unsigned long *)&_regs.eax;
2045 src.val = *src.reg;
2046 dst.reg = decode_register(
2047 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2048 dst.val = *dst.reg;
2049 goto xchg;
2051 case 0x98: /* cbw/cwde/cdqe */
2052 switch ( op_bytes )
2054 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2055 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2056 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2058 break;
2060 case 0x99: /* cwd/cdq/cqo */
2061 switch ( op_bytes )
2063 case 2:
2064 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2065 break;
2066 case 4:
2067 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2068 break;
2069 case 8:
2070 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2071 break;
2073 break;
2075 case 0x9a: /* call (far, absolute) */ {
2076 struct segment_register reg;
2077 uint16_t sel;
2078 uint32_t eip;
2080 fail_if(ops->read_segment == NULL);
2081 generate_exception_if(mode_64bit(), EXC_UD, -1);
2083 eip = insn_fetch_bytes(op_bytes);
2084 sel = insn_fetch_type(uint16_t);
2086 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2087 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2088 &reg.sel, op_bytes, ctxt)) ||
2089 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2090 &_regs.eip, op_bytes, ctxt)) )
2091 goto done;
2093 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2094 goto done;
2095 _regs.eip = eip;
2096 break;
2099 case 0x9b: /* wait/fwait */
2100 emulate_fpu_insn("fwait");
2101 break;
2103 case 0x9c: /* pushf */
2104 src.val = _regs.eflags;
2105 goto push;
2107 case 0x9d: /* popf */ {
2108 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2109 if ( !mode_ring0() )
2110 mask |= EFLG_IOPL;
2111 if ( !mode_iopl() )
2112 mask |= EFLG_IF;
2113 /* 64-bit mode: POP defaults to a 64-bit operand. */
2114 if ( mode_64bit() && (op_bytes == 4) )
2115 op_bytes = 8;
2116 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2117 &dst.val, op_bytes, ctxt, ops)) != 0 )
2118 goto done;
2119 if ( op_bytes == 2 )
2120 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2121 dst.val &= 0x257fd5;
2122 _regs.eflags &= mask;
2123 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2124 break;
2127 case 0x9e: /* sahf */
2128 *(uint8_t *)&_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2129 break;
2131 case 0x9f: /* lahf */
2132 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2133 break;
2135 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2136 /* Source EA is not encoded via ModRM. */
2137 dst.type = OP_REG;
2138 dst.reg = (unsigned long *)&_regs.eax;
2139 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2140 if ( (rc = read_ulong(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2141 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2142 goto done;
2143 break;
2145 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2146 /* Destination EA is not encoded via ModRM. */
2147 dst.type = OP_MEM;
2148 dst.mem.seg = ea.mem.seg;
2149 dst.mem.off = insn_fetch_bytes(ad_bytes);
2150 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2151 dst.val = (unsigned long)_regs.eax;
2152 break;
2154 case 0xa4 ... 0xa5: /* movs */ {
2155 unsigned long nr_reps = get_rep_prefix();
2156 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2157 dst.mem.seg = x86_seg_es;
2158 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
2159 if ( (nr_reps > 1) && (ops->rep_movs != NULL) &&
2160 ((rc = ops->rep_movs(ea.mem.seg, truncate_ea(_regs.esi),
2161 dst.mem.seg, dst.mem.off, dst.bytes,
2162 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2164 if ( rc != 0 )
2165 goto done;
2167 else
2169 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2170 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2171 goto done;
2172 dst.type = OP_MEM;
2173 nr_reps = 1;
2175 register_address_increment(
2176 _regs.esi,
2177 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2178 register_address_increment(
2179 _regs.edi,
2180 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2181 put_rep_prefix(nr_reps);
2182 break;
2185 case 0xa6 ... 0xa7: /* cmps */ {
2186 unsigned long next_eip = _regs.eip;
2187 get_rep_prefix();
2188 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2189 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2190 &dst.val, dst.bytes, ctxt, ops)) ||
2191 (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2192 &src.val, src.bytes, ctxt, ops)) )
2193 goto done;
2194 register_address_increment(
2195 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2196 register_address_increment(
2197 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2198 put_rep_prefix(1);
2199 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2200 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2201 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2202 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2203 _regs.eip = next_eip;
2204 break;
2207 case 0xaa ... 0xab: /* stos */ {
2208 /* unsigned long max_reps = */get_rep_prefix();
2209 dst.type = OP_MEM;
2210 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2211 dst.mem.seg = x86_seg_es;
2212 dst.mem.off = truncate_ea(_regs.edi);
2213 dst.val = _regs.eax;
2214 register_address_increment(
2215 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2216 put_rep_prefix(1);
2217 break;
2220 case 0xac ... 0xad: /* lods */ {
2221 /* unsigned long max_reps = */get_rep_prefix();
2222 dst.type = OP_REG;
2223 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2224 dst.reg = (unsigned long *)&_regs.eax;
2225 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2226 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2227 goto done;
2228 register_address_increment(
2229 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2230 put_rep_prefix(1);
2231 break;
2234 case 0xae ... 0xaf: /* scas */ {
2235 unsigned long next_eip = _regs.eip;
2236 get_rep_prefix();
2237 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2238 dst.val = _regs.eax;
2239 if ( (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2240 &src.val, src.bytes, ctxt, ops)) != 0 )
2241 goto done;
2242 register_address_increment(
2243 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2244 put_rep_prefix(1);
2245 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2246 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2247 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2248 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2249 _regs.eip = next_eip;
2250 break;
2253 case 0xb0 ... 0xb7: /* mov imm8,r8 */
2254 dst.reg = decode_register(
2255 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
2256 dst.val = src.val;
2257 break;
2259 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
2260 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
2261 src.val = ((uint32_t)src.val |
2262 ((uint64_t)insn_fetch_type(uint32_t) << 32));
2263 dst.reg = decode_register(
2264 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2265 dst.val = src.val;
2266 break;
2268 case 0xc0 ... 0xc1: grp2: /* Grp2 */
2269 switch ( modrm_reg & 7 )
2271 case 0: /* rol */
2272 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
2273 break;
2274 case 1: /* ror */
2275 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
2276 break;
2277 case 2: /* rcl */
2278 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
2279 break;
2280 case 3: /* rcr */
2281 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
2282 break;
2283 case 4: /* sal/shl */
2284 case 6: /* sal/shl */
2285 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
2286 break;
2287 case 5: /* shr */
2288 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
2289 break;
2290 case 7: /* sar */
2291 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
2292 break;
2294 break;
2296 case 0xc2: /* ret imm16 (near) */
2297 case 0xc3: /* ret (near) */ {
2298 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2299 op_bytes = mode_64bit() ? 8 : op_bytes;
2300 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2301 &dst.val, op_bytes, ctxt, ops)) != 0 )
2302 goto done;
2303 _regs.eip = dst.val;
2304 break;
2307 case 0xc4: /* les */ {
2308 unsigned long sel;
2309 dst.val = x86_seg_es;
2310 les: /* dst.val identifies the segment */
2311 generate_exception_if(src.type != OP_MEM, EXC_UD, -1);
2312 if ( (rc = read_ulong(src.mem.seg, src.mem.off + src.bytes,
2313 &sel, 2, ctxt, ops)) != 0 )
2314 goto done;
2315 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
2316 goto done;
2317 dst.val = src.val;
2318 break;
2321 case 0xc5: /* lds */
2322 dst.val = x86_seg_ds;
2323 goto les;
2325 case 0xc8: /* enter imm16,imm8 */ {
2326 uint16_t size = insn_fetch_type(uint16_t);
2327 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2328 int i;
2330 dst.type = OP_REG;
2331 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2332 dst.reg = (unsigned long *)&_regs.ebp;
2333 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2334 &_regs.ebp, dst.bytes, ctxt)) )
2335 goto done;
2336 dst.val = _regs.esp;
2338 if ( depth > 0 )
2340 for ( i = 1; i < depth; i++ )
2342 unsigned long ebp, temp_data;
2343 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2344 if ( (rc = read_ulong(x86_seg_ss, ebp,
2345 &temp_data, dst.bytes, ctxt, ops)) ||
2346 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2347 &temp_data, dst.bytes, ctxt)) )
2348 goto done;
2350 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2351 &dst.val, dst.bytes, ctxt)) )
2352 goto done;
2355 sp_pre_dec(size);
2356 break;
2359 case 0xc9: /* leave */
2360 /* First writeback, to %%esp. */
2361 dst.type = OP_REG;
2362 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2363 dst.reg = (unsigned long *)&_regs.esp;
2364 dst.val = _regs.ebp;
2366 /* Flush first writeback, since there is a second. */
2367 switch ( dst.bytes )
2369 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2370 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2371 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2372 case 8: *dst.reg = dst.val; break;
2375 /* Second writeback, to %%ebp. */
2376 dst.reg = (unsigned long *)&_regs.ebp;
2377 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2378 &dst.val, dst.bytes, ctxt, ops)) )
2379 goto done;
2380 break;
2382 case 0xca: /* ret imm16 (far) */
2383 case 0xcb: /* ret (far) */ {
2384 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2385 op_bytes = mode_64bit() ? 8 : op_bytes;
2386 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2387 &dst.val, op_bytes, ctxt, ops)) ||
2388 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2389 &src.val, op_bytes, ctxt, ops)) ||
2390 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2391 goto done;
2392 _regs.eip = dst.val;
2393 break;
2396 case 0xcc: /* int3 */
2397 src.val = EXC_BP;
2398 goto swint;
2400 case 0xcd: /* int imm8 */
2401 src.val = insn_fetch_type(uint8_t);
2402 swint:
2403 fail_if(ops->inject_sw_interrupt == NULL);
2404 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2405 ctxt) ? : X86EMUL_EXCEPTION;
2406 goto done;
2408 case 0xce: /* into */
2409 generate_exception_if(mode_64bit(), EXC_UD, -1);
2410 if ( !(_regs.eflags & EFLG_OF) )
2411 break;
2412 src.val = EXC_OF;
2413 goto swint;
2415 case 0xcf: /* iret */ {
2416 unsigned long cs, eip, eflags;
2417 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2418 if ( !mode_ring0() )
2419 mask |= EFLG_IOPL;
2420 if ( !mode_iopl() )
2421 mask |= EFLG_IF;
2422 fail_if(!in_realmode(ctxt, ops));
2423 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2424 &eip, op_bytes, ctxt, ops)) ||
2425 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2426 &cs, op_bytes, ctxt, ops)) ||
2427 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2428 &eflags, op_bytes, ctxt, ops)) )
2429 goto done;
2430 if ( op_bytes == 2 )
2431 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2432 eflags &= 0x257fd5;
2433 _regs.eflags &= mask;
2434 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2435 _regs.eip = eip;
2436 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2437 goto done;
2438 break;
2441 case 0xd0 ... 0xd1: /* Grp2 */
2442 src.val = 1;
2443 goto grp2;
2445 case 0xd2 ... 0xd3: /* Grp2 */
2446 src.val = _regs.ecx;
2447 goto grp2;
2449 case 0xd4: /* aam */ {
2450 unsigned int base = insn_fetch_type(uint8_t);
2451 uint8_t al = _regs.eax;
2452 generate_exception_if(mode_64bit(), EXC_UD, -1);
2453 generate_exception_if(base == 0, EXC_DE, -1);
2454 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2455 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2456 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2457 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2458 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2459 break;
2462 case 0xd5: /* aad */ {
2463 unsigned int base = insn_fetch_type(uint8_t);
2464 uint16_t ax = _regs.eax;
2465 generate_exception_if(mode_64bit(), EXC_UD, -1);
2466 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2467 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2468 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2469 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2470 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2471 break;
2474 case 0xd6: /* salc */
2475 generate_exception_if(mode_64bit(), EXC_UD, -1);
2476 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2477 break;
2479 case 0xd7: /* xlat */ {
2480 unsigned long al = (uint8_t)_regs.eax;
2481 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.ebx + al),
2482 &al, 1, ctxt, ops)) != 0 )
2483 goto done;
2484 *(uint8_t *)&_regs.eax = al;
2485 break;
2488 case 0xd8: /* FPU 0xd8 */
2489 switch ( modrm )
2491 case 0xc0 ... 0xc7: /* fadd %stN,%stN */
2492 case 0xc8 ... 0xcf: /* fmul %stN,%stN */
2493 case 0xd0 ... 0xd7: /* fcom %stN,%stN */
2494 case 0xd8 ... 0xdf: /* fcomp %stN,%stN */
2495 case 0xe0 ... 0xe7: /* fsub %stN,%stN */
2496 case 0xe8 ... 0xef: /* fsubr %stN,%stN */
2497 case 0xf0 ... 0xf7: /* fdiv %stN,%stN */
2498 case 0xf8 ... 0xff: /* fdivr %stN,%stN */
2499 emulate_fpu_insn_stub(0xd8, modrm);
2500 break;
2501 default:
2502 fail_if(modrm >= 0xc0);
2503 ea.bytes = 4;
2504 src = ea;
2505 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2506 src.bytes, ctxt)) != 0 )
2507 goto done;
2508 switch ( modrm_reg & 7 )
2510 case 0: /* fadd */
2511 emulate_fpu_insn_memsrc("fadds", src.val);
2512 break;
2513 case 1: /* fmul */
2514 emulate_fpu_insn_memsrc("fmuls", src.val);
2515 break;
2516 case 2: /* fcom */
2517 emulate_fpu_insn_memsrc("fcoms", src.val);
2518 break;
2519 case 3: /* fcomp */
2520 emulate_fpu_insn_memsrc("fcomps", src.val);
2521 break;
2522 case 4: /* fsub */
2523 emulate_fpu_insn_memsrc("fsubs", src.val);
2524 break;
2525 case 5: /* fsubr */
2526 emulate_fpu_insn_memsrc("fsubrs", src.val);
2527 break;
2528 case 6: /* fdiv */
2529 emulate_fpu_insn_memsrc("fdivs", src.val);
2530 break;
2531 case 7: /* fdivr */
2532 emulate_fpu_insn_memsrc("fdivrs", src.val);
2533 break;
2534 default:
2535 goto cannot_emulate;
2538 break;
2540 case 0xd9: /* FPU 0xd9 */
2541 switch ( modrm )
2543 case 0xc0 ... 0xc7: /* fld %stN */
2544 case 0xc8 ... 0xcf: /* fxch %stN */
2545 case 0xd0: /* fnop */
2546 case 0xe0: /* fchs */
2547 case 0xe1: /* fabs */
2548 case 0xe4: /* ftst */
2549 case 0xe5: /* fxam */
2550 case 0xe8: /* fld1 */
2551 case 0xe9: /* fldl2t */
2552 case 0xea: /* fldl2e */
2553 case 0xeb: /* fldpi */
2554 case 0xec: /* fldlg2 */
2555 case 0xed: /* fldln2 */
2556 case 0xee: /* fldz */
2557 case 0xf0: /* f2xm1 */
2558 case 0xf1: /* fyl2x */
2559 case 0xf2: /* fptan */
2560 case 0xf3: /* fpatan */
2561 case 0xf4: /* fxtract */
2562 case 0xf5: /* fprem1 */
2563 case 0xf6: /* fdecstp */
2564 case 0xf7: /* fincstp */
2565 case 0xf8: /* fprem */
2566 case 0xf9: /* fyl2xp1 */
2567 case 0xfa: /* fsqrt */
2568 case 0xfb: /* fsincos */
2569 case 0xfc: /* frndint */
2570 case 0xfd: /* fscale */
2571 case 0xfe: /* fsin */
2572 case 0xff: /* fcos */
2573 emulate_fpu_insn_stub(0xd9, modrm);
2574 break;
2575 default:
2576 fail_if(modrm >= 0xc0);
2577 switch ( modrm_reg & 7 )
2579 case 0: /* fld m32fp */
2580 ea.bytes = 4;
2581 src = ea;
2582 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &src.val,
2583 src.bytes, ctxt)) != 0 )
2584 goto done;
2585 emulate_fpu_insn_memsrc("flds", src.val);
2586 break;
2587 case 2: /* fstp m32fp */
2588 ea.bytes = 4;
2589 dst = ea;
2590 dst.type = OP_MEM;
2591 emulate_fpu_insn_memdst("fsts", dst.val);
2592 break;
2593 case 3: /* fstp m32fp */
2594 ea.bytes = 4;
2595 dst = ea;
2596 dst.type = OP_MEM;
2597 emulate_fpu_insn_memdst("fstps", dst.val);
2598 break;
2599 /* case 4: fldenv - TODO */
2600 case 5: /* fldcw m2byte */
2601 ea.bytes = 2;
2602 src = ea;
2603 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2604 src.bytes, ctxt)) != 0 )
2605 goto done;
2606 emulate_fpu_insn_memsrc("fldcw", src.val);
2607 break;
2608 /* case 6: fstenv - TODO */
2609 case 7: /* fnstcw m2byte */
2610 ea.bytes = 2;
2611 dst = ea;
2612 dst.type = OP_MEM;
2613 emulate_fpu_insn_memdst("fnstcw", dst.val);
2614 break;
2615 default:
2616 goto cannot_emulate;
2619 break;
2621 case 0xda: /* FPU 0xda */
2622 switch ( modrm )
2624 case 0xc0 ... 0xc7: /* fcmovb %stN */
2625 case 0xc8 ... 0xcf: /* fcmove %stN */
2626 case 0xd0 ... 0xd7: /* fcmovbe %stN */
2627 case 0xd8 ... 0xdf: /* fcmovu %stN */
2628 case 0xe9: /* fucompp */
2629 emulate_fpu_insn_stub(0xda, modrm);
2630 break;
2631 default:
2632 fail_if(modrm >= 0xc0);
2633 ea.bytes = 8;
2634 src = ea;
2635 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2636 src.bytes, ctxt)) != 0 )
2637 goto done;
2638 switch ( modrm_reg & 7 )
2640 case 0: /* fiadd m64i */
2641 emulate_fpu_insn_memsrc("fiaddl", src.val);
2642 break;
2643 case 1: /* fimul m64i */
2644 emulate_fpu_insn_memsrc("fimul", src.val);
2645 break;
2646 case 2: /* ficom m64i */
2647 emulate_fpu_insn_memsrc("ficoml", src.val);
2648 break;
2649 case 3: /* ficomp m64i */
2650 emulate_fpu_insn_memsrc("ficompl", src.val);
2651 break;
2652 case 4: /* fisub m64i */
2653 emulate_fpu_insn_memsrc("fisubl", src.val);
2654 break;
2655 case 5: /* fisubr m64i */
2656 emulate_fpu_insn_memsrc("fisubrl", src.val);
2657 break;
2658 case 6: /* fidiv m64i */
2659 emulate_fpu_insn_memsrc("fidivl", src.val);
2660 break;
2661 case 7: /* fidivr m64i */
2662 emulate_fpu_insn_memsrc("fidivrl", src.val);
2663 break;
2664 default:
2665 goto cannot_emulate;
2668 break;
2670 case 0xdb: /* FPU 0xdb */
2671 switch ( modrm )
2673 case 0xc0 ... 0xc7: /* fcmovnb %stN */
2674 case 0xc8 ... 0xcf: /* fcmovne %stN */
2675 case 0xd0 ... 0xd7: /* fcmovnbe %stN */
2676 case 0xd8 ... 0xdf: /* fcmovnu %stN */
2677 emulate_fpu_insn_stub(0xdb, modrm);
2678 break;
2679 case 0xe2: /* fnclex */
2680 emulate_fpu_insn("fnclex");
2681 break;
2682 case 0xe3: /* fninit */
2683 emulate_fpu_insn("fninit");
2684 break;
2685 case 0xe4: /* fsetpm - 287 only, ignored by 387 */
2686 break;
2687 case 0xe8 ... 0xef: /* fucomi %stN */
2688 case 0xf0 ... 0xf7: /* fcomi %stN */
2689 emulate_fpu_insn_stub(0xdb, modrm);
2690 break;
2691 default:
2692 fail_if(modrm >= 0xc0);
2693 switch ( modrm_reg & 7 )
2695 case 0: /* fild m32i */
2696 ea.bytes = 4;
2697 src = ea;
2698 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2699 src.bytes, ctxt)) != 0 )
2700 goto done;
2701 emulate_fpu_insn_memsrc("fildl", src.val);
2702 break;
2703 case 1: /* fisttp m32i */
2704 ea.bytes = 4;
2705 dst = ea;
2706 dst.type = OP_MEM;
2707 emulate_fpu_insn_memdst("fisttpl", dst.val);
2708 break;
2709 case 2: /* fist m32i */
2710 ea.bytes = 4;
2711 dst = ea;
2712 dst.type = OP_MEM;
2713 emulate_fpu_insn_memdst("fistl", dst.val);
2714 break;
2715 case 3: /* fistp m32i */
2716 ea.bytes = 4;
2717 dst = ea;
2718 dst.type = OP_MEM;
2719 emulate_fpu_insn_memdst("fistpl", dst.val);
2720 break;
2721 case 5: /* fld m80fp */
2722 ea.bytes = 10;
2723 src = ea;
2724 if ( (rc = ops->read(src.mem.seg, src.mem.off,
2725 &src.val, src.bytes, ctxt)) != 0 )
2726 goto done;
2727 emulate_fpu_insn_memdst("fldt", src.val);
2728 break;
2729 case 7: /* fstp m80fp */
2730 ea.bytes = 10;
2731 dst.type = OP_MEM;
2732 dst = ea;
2733 emulate_fpu_insn_memdst("fstpt", dst.val);
2734 break;
2735 default:
2736 goto cannot_emulate;
2739 break;
2741 case 0xdc: /* FPU 0xdc */
2742 switch ( modrm )
2744 case 0xc0 ... 0xc7: /* fadd %stN */
2745 case 0xc8 ... 0xcf: /* fmul %stN */
2746 case 0xe0 ... 0xe7: /* fsubr %stN */
2747 case 0xe8 ... 0xef: /* fsub %stN */
2748 case 0xf0 ... 0xf7: /* fdivr %stN */
2749 case 0xf8 ... 0xff: /* fdiv %stN */
2750 emulate_fpu_insn_stub(0xdc, modrm);
2751 break;
2752 default:
2753 fail_if(modrm >= 0xc0);
2754 ea.bytes = 8;
2755 src = ea;
2756 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2757 src.bytes, ctxt)) != 0 )
2758 goto done;
2759 switch ( modrm_reg & 7 )
2761 case 0: /* fadd m64fp */
2762 emulate_fpu_insn_memsrc("faddl", src.val);
2763 break;
2764 case 1: /* fmul m64fp */
2765 emulate_fpu_insn_memsrc("fmull", src.val);
2766 break;
2767 case 2: /* fcom m64fp */
2768 emulate_fpu_insn_memsrc("fcoml", src.val);
2769 break;
2770 case 3: /* fcomp m64fp */
2771 emulate_fpu_insn_memsrc("fcompl", src.val);
2772 break;
2773 case 4: /* fsub m64fp */
2774 emulate_fpu_insn_memsrc("fsubl", src.val);
2775 break;
2776 case 5: /* fsubr m64fp */
2777 emulate_fpu_insn_memsrc("fsubrl", src.val);
2778 break;
2779 case 6: /* fdiv m64fp */
2780 emulate_fpu_insn_memsrc("fdivl", src.val);
2781 break;
2782 case 7: /* fdivr m64fp */
2783 emulate_fpu_insn_memsrc("fdivrl", src.val);
2784 break;
2787 break;
2789 case 0xdd: /* FPU 0xdd */
2790 switch ( modrm )
2792 case 0xc0 ... 0xc7: /* ffree %stN */
2793 case 0xd0 ... 0xd7: /* fst %stN */
2794 case 0xd8 ... 0xdf: /* fstp %stN */
2795 case 0xe0 ... 0xe7: /* fucom %stN */
2796 case 0xe8 ... 0xef: /* fucomp %stN */
2797 emulate_fpu_insn_stub(0xdd, modrm);
2798 break;
2799 default:
2800 fail_if(modrm >= 0xc0);
2801 switch ( modrm_reg & 7 )
2803 case 0: /* fld m64fp */;
2804 ea.bytes = 8;
2805 src = ea;
2806 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2807 src.bytes, ctxt)) != 0 )
2808 goto done;
2809 emulate_fpu_insn_memsrc("fldl", src.val);
2810 break;
2811 case 1: /* fisttp m64i */
2812 ea.bytes = 8;
2813 dst = ea;
2814 dst.type = OP_MEM;
2815 emulate_fpu_insn_memdst("fisttpll", dst.val);
2816 break;
2817 case 2: /* fst m64fp */
2818 ea.bytes = 8;
2819 dst = ea;
2820 dst.type = OP_MEM;
2821 emulate_fpu_insn_memsrc("fstl", dst.val);
2822 break;
2823 case 3: /* fstp m64fp */
2824 ea.bytes = 8;
2825 dst = ea;
2826 dst.type = OP_MEM;
2827 emulate_fpu_insn_memdst("fstpl", dst.val);
2828 break;
2829 case 7: /* fnstsw m2byte */
2830 ea.bytes = 2;
2831 dst = ea;
2832 dst.type = OP_MEM;
2833 emulate_fpu_insn_memdst("fnstsw", dst.val);
2834 break;
2835 default:
2836 goto cannot_emulate;
2839 break;
2841 case 0xde: /* FPU 0xde */
2842 switch ( modrm )
2844 case 0xc0 ... 0xc7: /* faddp %stN */
2845 case 0xc8 ... 0xcf: /* fmulp %stN */
2846 case 0xd9: /* fcompp */
2847 case 0xe0 ... 0xe7: /* fsubrp %stN */
2848 case 0xe8 ... 0xef: /* fsubp %stN */
2849 case 0xf0 ... 0xf7: /* fdivrp %stN */
2850 case 0xf8 ... 0xff: /* fdivp %stN */
2851 emulate_fpu_insn_stub(0xde, modrm);
2852 break;
2853 default:
2854 fail_if(modrm >= 0xc0);
2855 ea.bytes = 2;
2856 src = ea;
2857 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2858 src.bytes, ctxt)) != 0 )
2859 goto done;
2860 switch ( modrm_reg & 7 )
2862 case 0: /* fiadd m16i */
2863 emulate_fpu_insn_memsrc("fiadd", src.val);
2864 break;
2865 case 1: /* fimul m16i */
2866 emulate_fpu_insn_memsrc("fimul", src.val);
2867 break;
2868 case 2: /* ficom m16i */
2869 emulate_fpu_insn_memsrc("ficom", src.val);
2870 break;
2871 case 3: /* ficomp m16i */
2872 emulate_fpu_insn_memsrc("ficomp", src.val);
2873 break;
2874 case 4: /* fisub m16i */
2875 emulate_fpu_insn_memsrc("fisub", src.val);
2876 break;
2877 case 5: /* fisubr m16i */
2878 emulate_fpu_insn_memsrc("fisubr", src.val);
2879 break;
2880 case 6: /* fidiv m16i */
2881 emulate_fpu_insn_memsrc("fidiv", src.val);
2882 break;
2883 case 7: /* fidivr m16i */
2884 emulate_fpu_insn_memsrc("fidivr", src.val);
2885 break;
2886 default:
2887 goto cannot_emulate;
2890 break;
2892 case 0xdf: /* FPU 0xdf */
2893 switch ( modrm )
2895 case 0xe0:
2896 /* fnstsw %ax */
2897 dst.bytes = 2;
2898 dst.type = OP_REG;
2899 dst.reg = (unsigned long *)&_regs.eax;
2900 emulate_fpu_insn_memdst("fnstsw", dst.val);
2901 break;
2902 case 0xf0 ... 0xf7: /* fcomip %stN */
2903 case 0xf8 ... 0xff: /* fucomip %stN */
2904 emulate_fpu_insn_stub(0xdf, modrm);
2905 break;
2906 default:
2907 fail_if(modrm >= 0xc0);
2908 switch ( modrm_reg & 7 )
2910 case 0: /* fild m16i */
2911 ea.bytes = 2;
2912 src = ea;
2913 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2914 src.bytes, ctxt)) != 0 )
2915 goto done;
2916 emulate_fpu_insn_memsrc("fild", src.val);
2917 break;
2918 case 1: /* fisttp m16i */
2919 ea.bytes = 2;
2920 dst = ea;
2921 dst.type = OP_MEM;
2922 emulate_fpu_insn_memdst("fisttp", dst.val);
2923 break;
2924 case 2: /* fist m16i */
2925 ea.bytes = 2;
2926 dst = ea;
2927 dst.type = OP_MEM;
2928 emulate_fpu_insn_memdst("fist", dst.val);
2929 break;
2930 case 3: /* fistp m16i */
2931 ea.bytes = 2;
2932 dst = ea;
2933 dst.type = OP_MEM;
2934 emulate_fpu_insn_memdst("fistp", dst.val);
2935 break;
2936 case 4: /* fbld m80dec */
2937 ea.bytes = 10;
2938 dst = ea;
2939 if ( (rc = ops->read(src.mem.seg, src.mem.off,
2940 &src.val, src.bytes, ctxt)) != 0 )
2941 goto done;
2942 emulate_fpu_insn_memdst("fbld", src.val);
2943 break;
2944 case 5: /* fild m64i */
2945 ea.bytes = 8;
2946 src = ea;
2947 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2948 src.bytes, ctxt)) != 0 )
2949 goto done;
2950 emulate_fpu_insn_memsrc("fildll", src.val);
2951 break;
2952 case 6: /* fbstp packed bcd */
2953 ea.bytes = 10;
2954 dst = ea;
2955 dst.type = OP_MEM;
2956 emulate_fpu_insn_memdst("fbstp", dst.val);
2957 break;
2958 case 7: /* fistp m64i */
2959 ea.bytes = 8;
2960 dst = ea;
2961 dst.type = OP_MEM;
2962 emulate_fpu_insn_memdst("fistpll", dst.val);
2963 break;
2964 default:
2965 goto cannot_emulate;
2968 break;
2970 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2971 int rel = insn_fetch_type(int8_t);
2972 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2973 if ( b == 0xe1 )
2974 do_jmp = !do_jmp; /* loopz */
2975 else if ( b == 0xe2 )
2976 do_jmp = 1; /* loop */
2977 switch ( ad_bytes )
2979 case 2:
2980 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2981 break;
2982 case 4:
2983 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
2984 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
2985 break;
2986 default: /* case 8: */
2987 do_jmp &= --_regs.ecx != 0;
2988 break;
2990 if ( do_jmp )
2991 jmp_rel(rel);
2992 break;
2995 case 0xe3: /* jcxz/jecxz (short) */ {
2996 int rel = insn_fetch_type(int8_t);
2997 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2998 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2999 jmp_rel(rel);
3000 break;
3003 case 0xe4: /* in imm8,%al */
3004 case 0xe5: /* in imm8,%eax */
3005 case 0xe6: /* out %al,imm8 */
3006 case 0xe7: /* out %eax,imm8 */
3007 case 0xec: /* in %dx,%al */
3008 case 0xed: /* in %dx,%eax */
3009 case 0xee: /* out %al,%dx */
3010 case 0xef: /* out %eax,%dx */ {
3011 unsigned int port = ((b < 0xe8)
3012 ? insn_fetch_type(uint8_t)
3013 : (uint16_t)_regs.edx);
3014 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
3015 if ( (rc = ioport_access_check(port, op_bytes, ctxt, ops)) != 0 )
3016 goto done;
3017 if ( b & 2 )
3019 /* out */
3020 fail_if(ops->write_io == NULL);
3021 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
3023 else
3025 /* in */
3026 dst.type = OP_REG;
3027 dst.bytes = op_bytes;
3028 dst.reg = (unsigned long *)&_regs.eax;
3029 fail_if(ops->read_io == NULL);
3030 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
3032 if ( rc != 0 )
3033 goto done;
3034 break;
3037 case 0xe8: /* call (near) */ {
3038 int rel = (((op_bytes == 2) && !mode_64bit())
3039 ? (int32_t)insn_fetch_type(int16_t)
3040 : insn_fetch_type(int32_t));
3041 op_bytes = mode_64bit() ? 8 : op_bytes;
3042 src.val = _regs.eip;
3043 jmp_rel(rel);
3044 goto push;
3047 case 0xe9: /* jmp (near) */ {
3048 int rel = (((op_bytes == 2) && !mode_64bit())
3049 ? (int32_t)insn_fetch_type(int16_t)
3050 : insn_fetch_type(int32_t));
3051 jmp_rel(rel);
3052 break;
3055 case 0xea: /* jmp (far, absolute) */ {
3056 uint16_t sel;
3057 uint32_t eip;
3058 generate_exception_if(mode_64bit(), EXC_UD, -1);
3059 eip = insn_fetch_bytes(op_bytes);
3060 sel = insn_fetch_type(uint16_t);
3061 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
3062 goto done;
3063 _regs.eip = eip;
3064 break;
3067 case 0xeb: /* jmp (short) */ {
3068 int rel = insn_fetch_type(int8_t);
3069 jmp_rel(rel);
3070 break;
3073 case 0xf1: /* int1 (icebp) */
3074 src.val = EXC_DB;
3075 goto swint;
3077 case 0xf4: /* hlt */
3078 ctxt->retire.flags.hlt = 1;
3079 break;
3081 case 0xf5: /* cmc */
3082 _regs.eflags ^= EFLG_CF;
3083 break;
3085 case 0xf6 ... 0xf7: /* Grp3 */
3086 switch ( modrm_reg & 7 )
3088 case 0 ... 1: /* test */
3089 /* Special case in Grp3: test has an immediate source operand. */
3090 src.type = OP_IMM;
3091 src.bytes = (d & ByteOp) ? 1 : op_bytes;
3092 if ( src.bytes == 8 ) src.bytes = 4;
3093 switch ( src.bytes )
3095 case 1: src.val = insn_fetch_type(int8_t); break;
3096 case 2: src.val = insn_fetch_type(int16_t); break;
3097 case 4: src.val = insn_fetch_type(int32_t); break;
3099 goto test;
3100 case 2: /* not */
3101 dst.val = ~dst.val;
3102 break;
3103 case 3: /* neg */
3104 emulate_1op("neg", dst, _regs.eflags);
3105 break;
3106 case 4: /* mul */
3107 src = dst;
3108 dst.type = OP_REG;
3109 dst.reg = (unsigned long *)&_regs.eax;
3110 dst.val = *dst.reg;
3111 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3112 switch ( src.bytes )
3114 case 1:
3115 dst.val = (uint8_t)dst.val;
3116 dst.val *= src.val;
3117 if ( (uint8_t)dst.val != (uint16_t)dst.val )
3118 _regs.eflags |= EFLG_OF|EFLG_CF;
3119 dst.bytes = 2;
3120 break;
3121 case 2:
3122 dst.val = (uint16_t)dst.val;
3123 dst.val *= src.val;
3124 if ( (uint16_t)dst.val != (uint32_t)dst.val )
3125 _regs.eflags |= EFLG_OF|EFLG_CF;
3126 *(uint16_t *)&_regs.edx = dst.val >> 16;
3127 break;
3128 #ifdef __x86_64__
3129 case 4:
3130 dst.val = (uint32_t)dst.val;
3131 dst.val *= src.val;
3132 if ( (uint32_t)dst.val != dst.val )
3133 _regs.eflags |= EFLG_OF|EFLG_CF;
3134 _regs.edx = (uint32_t)(dst.val >> 32);
3135 break;
3136 #endif
3137 default: {
3138 unsigned long m[2] = { src.val, dst.val };
3139 if ( mul_dbl(m) )
3140 _regs.eflags |= EFLG_OF|EFLG_CF;
3141 _regs.edx = m[1];
3142 dst.val = m[0];
3143 break;
3146 break;
3147 case 5: /* imul */
3148 src = dst;
3149 dst.type = OP_REG;
3150 dst.reg = (unsigned long *)&_regs.eax;
3151 dst.val = *dst.reg;
3152 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3153 switch ( src.bytes )
3155 case 1:
3156 dst.val = ((uint16_t)(int8_t)src.val *
3157 (uint16_t)(int8_t)dst.val);
3158 if ( (int8_t)dst.val != (uint16_t)dst.val )
3159 _regs.eflags |= EFLG_OF|EFLG_CF;
3160 dst.bytes = 2;
3161 break;
3162 case 2:
3163 dst.val = ((uint32_t)(int16_t)src.val *
3164 (uint32_t)(int16_t)dst.val);
3165 if ( (int16_t)dst.val != (uint32_t)dst.val )
3166 _regs.eflags |= EFLG_OF|EFLG_CF;
3167 *(uint16_t *)&_regs.edx = dst.val >> 16;
3168 break;
3169 #ifdef __x86_64__
3170 case 4:
3171 dst.val = ((uint64_t)(int32_t)src.val *
3172 (uint64_t)(int32_t)dst.val);
3173 if ( (int32_t)dst.val != dst.val )
3174 _regs.eflags |= EFLG_OF|EFLG_CF;
3175 _regs.edx = (uint32_t)(dst.val >> 32);
3176 break;
3177 #endif
3178 default: {
3179 unsigned long m[2] = { src.val, dst.val };
3180 if ( imul_dbl(m) )
3181 _regs.eflags |= EFLG_OF|EFLG_CF;
3182 _regs.edx = m[1];
3183 dst.val = m[0];
3184 break;
3187 break;
3188 case 6: /* div */ {
3189 unsigned long u[2], v;
3190 src = dst;
3191 dst.type = OP_REG;
3192 dst.reg = (unsigned long *)&_regs.eax;
3193 switch ( src.bytes )
3195 case 1:
3196 u[0] = (uint16_t)_regs.eax;
3197 u[1] = 0;
3198 v = (uint8_t)src.val;
3199 generate_exception_if(
3200 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
3201 EXC_DE, -1);
3202 dst.val = (uint8_t)u[0];
3203 ((uint8_t *)&_regs.eax)[1] = u[1];
3204 break;
3205 case 2:
3206 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
3207 u[1] = 0;
3208 v = (uint16_t)src.val;
3209 generate_exception_if(
3210 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
3211 EXC_DE, -1);
3212 dst.val = (uint16_t)u[0];
3213 *(uint16_t *)&_regs.edx = u[1];
3214 break;
3215 #ifdef __x86_64__
3216 case 4:
3217 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
3218 u[1] = 0;
3219 v = (uint32_t)src.val;
3220 generate_exception_if(
3221 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
3222 EXC_DE, -1);
3223 dst.val = (uint32_t)u[0];
3224 _regs.edx = (uint32_t)u[1];
3225 break;
3226 #endif
3227 default:
3228 u[0] = _regs.eax;
3229 u[1] = _regs.edx;
3230 v = src.val;
3231 generate_exception_if(div_dbl(u, v), EXC_DE, -1);
3232 dst.val = u[0];
3233 _regs.edx = u[1];
3234 break;
3236 break;
3238 case 7: /* idiv */ {
3239 unsigned long u[2], v;
3240 src = dst;
3241 dst.type = OP_REG;
3242 dst.reg = (unsigned long *)&_regs.eax;
3243 switch ( src.bytes )
3245 case 1:
3246 u[0] = (int16_t)_regs.eax;
3247 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3248 v = (int8_t)src.val;
3249 generate_exception_if(
3250 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
3251 EXC_DE, -1);
3252 dst.val = (int8_t)u[0];
3253 ((int8_t *)&_regs.eax)[1] = u[1];
3254 break;
3255 case 2:
3256 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
3257 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3258 v = (int16_t)src.val;
3259 generate_exception_if(
3260 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
3261 EXC_DE, -1);
3262 dst.val = (int16_t)u[0];
3263 *(int16_t *)&_regs.edx = u[1];
3264 break;
3265 #ifdef __x86_64__
3266 case 4:
3267 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
3268 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3269 v = (int32_t)src.val;
3270 generate_exception_if(
3271 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
3272 EXC_DE, -1);
3273 dst.val = (int32_t)u[0];
3274 _regs.edx = (uint32_t)u[1];
3275 break;
3276 #endif
3277 default:
3278 u[0] = _regs.eax;
3279 u[1] = _regs.edx;
3280 v = src.val;
3281 generate_exception_if(idiv_dbl(u, v), EXC_DE, -1);
3282 dst.val = u[0];
3283 _regs.edx = u[1];
3284 break;
3286 break;
3288 default:
3289 goto cannot_emulate;
3291 break;
3293 case 0xf8: /* clc */
3294 _regs.eflags &= ~EFLG_CF;
3295 break;
3297 case 0xf9: /* stc */
3298 _regs.eflags |= EFLG_CF;
3299 break;
3301 case 0xfa: /* cli */
3302 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3303 _regs.eflags &= ~EFLG_IF;
3304 break;
3306 case 0xfb: /* sti */
3307 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3308 if ( !(_regs.eflags & EFLG_IF) )
3310 _regs.eflags |= EFLG_IF;
3311 ctxt->retire.flags.sti = 1;
3313 break;
3315 case 0xfc: /* cld */
3316 _regs.eflags &= ~EFLG_DF;
3317 break;
3319 case 0xfd: /* std */
3320 _regs.eflags |= EFLG_DF;
3321 break;
3323 case 0xfe: /* Grp4 */
3324 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD, -1);
3325 case 0xff: /* Grp5 */
3326 switch ( modrm_reg & 7 )
3328 case 0: /* inc */
3329 emulate_1op("inc", dst, _regs.eflags);
3330 break;
3331 case 1: /* dec */
3332 emulate_1op("dec", dst, _regs.eflags);
3333 break;
3334 case 2: /* call (near) */
3335 case 4: /* jmp (near) */
3336 if ( (dst.bytes != 8) && mode_64bit() )
3338 dst.bytes = op_bytes = 8;
3339 if ( dst.type == OP_REG )
3340 dst.val = *dst.reg;
3341 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
3342 &dst.val, 8, ctxt, ops)) != 0 )
3343 goto done;
3345 src.val = _regs.eip;
3346 _regs.eip = dst.val;
3347 if ( (modrm_reg & 7) == 2 )
3348 goto push; /* call */
3349 dst.type = OP_NONE;
3350 break;
3351 case 3: /* call (far, absolute indirect) */
3352 case 5: /* jmp (far, absolute indirect) */ {
3353 unsigned long sel;
3355 generate_exception_if(dst.type != OP_MEM, EXC_UD, -1);
3357 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off+dst.bytes,
3358 &sel, 2, ctxt, ops)) )
3359 goto done;
3361 if ( (modrm_reg & 7) == 3 ) /* call */
3363 struct segment_register reg;
3364 fail_if(ops->read_segment == NULL);
3365 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
3366 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
3367 &reg.sel, op_bytes, ctxt)) ||
3368 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
3369 &_regs.eip, op_bytes, ctxt)) )
3370 goto done;
3373 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
3374 goto done;
3375 _regs.eip = dst.val;
3377 dst.type = OP_NONE;
3378 break;
3380 case 6: /* push */
3381 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
3382 if ( mode_64bit() && (dst.bytes == 4) )
3384 dst.bytes = 8;
3385 if ( dst.type == OP_REG )
3386 dst.val = *dst.reg;
3387 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
3388 &dst.val, 8, ctxt, ops)) != 0 )
3389 goto done;
3391 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
3392 &dst.val, dst.bytes, ctxt)) != 0 )
3393 goto done;
3394 dst.type = OP_NONE;
3395 break;
3396 case 7:
3397 generate_exception_if(1, EXC_UD, -1);
3398 default:
3399 goto cannot_emulate;
3401 break;
3404 writeback:
3405 switch ( dst.type )
3407 case OP_REG:
3408 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
3409 switch ( dst.bytes )
3411 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
3412 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
3413 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
3414 case 8: *dst.reg = dst.val; break;
3416 break;
3417 case OP_MEM:
3418 if ( !(d & Mov) && (dst.orig_val == dst.val) &&
3419 !ctxt->force_writeback )
3420 /* nothing to do */;
3421 else if ( lock_prefix )
3422 rc = ops->cmpxchg(
3423 dst.mem.seg, dst.mem.off, &dst.orig_val,
3424 &dst.val, dst.bytes, ctxt);
3425 else
3426 rc = ops->write(
3427 dst.mem.seg, dst.mem.off, &dst.val, dst.bytes, ctxt);
3428 if ( rc != 0 )
3429 goto done;
3430 default:
3431 break;
3434 /* Inject #DB if single-step tracing was enabled at instruction start. */
3435 if ( (ctxt->regs->eflags & EFLG_TF) && (rc == X86EMUL_OKAY) &&
3436 (ops->inject_hw_exception != NULL) )
3437 rc = ops->inject_hw_exception(EXC_DB, -1, ctxt) ? : X86EMUL_EXCEPTION;
3439 /* Commit shadow register state. */
3440 _regs.eflags &= ~EFLG_RF;
3441 *ctxt->regs = _regs;
3443 done:
3444 return rc;
3446 twobyte_insn:
3447 switch ( b )
3449 case 0x00: /* Grp6 */
3450 fail_if((modrm_reg & 6) != 2);
3451 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
3452 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3453 if ( (rc = load_seg((modrm_reg & 1) ? x86_seg_tr : x86_seg_ldtr,
3454 src.val, ctxt, ops)) != 0 )
3455 goto done;
3456 break;
3458 case 0x01: /* Grp7 */ {
3459 struct segment_register reg;
3460 unsigned long base, limit, cr0, cr0w;
3462 if ( modrm == 0xdf ) /* invlpga */
3464 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
3465 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3466 fail_if(ops->invlpg == NULL);
3467 if ( (rc = ops->invlpg(x86_seg_none, truncate_ea(_regs.eax),
3468 ctxt)) )
3469 goto done;
3470 break;
3473 switch ( modrm_reg & 7 )
3475 case 0: /* sgdt */
3476 case 1: /* sidt */
3477 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3478 fail_if(ops->read_segment == NULL);
3479 if ( (rc = ops->read_segment((modrm_reg & 1) ?
3480 x86_seg_idtr : x86_seg_gdtr,
3481 &reg, ctxt)) )
3482 goto done;
3483 if ( op_bytes == 2 )
3484 reg.base &= 0xffffff;
3485 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
3486 &reg.limit, 2, ctxt)) ||
3487 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
3488 &reg.base, mode_64bit() ? 8 : 4, ctxt)) )
3489 goto done;
3490 break;
3491 case 2: /* lgdt */
3492 case 3: /* lidt */
3493 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3494 fail_if(ops->write_segment == NULL);
3495 memset(&reg, 0, sizeof(reg));
3496 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3497 &limit, 2, ctxt, ops)) ||
3498 (rc = read_ulong(ea.mem.seg, ea.mem.off+2,
3499 &base, mode_64bit() ? 8 : 4, ctxt, ops)) )
3500 goto done;
3501 reg.base = base;
3502 reg.limit = limit;
3503 if ( op_bytes == 2 )
3504 reg.base &= 0xffffff;
3505 if ( (rc = ops->write_segment((modrm_reg & 1) ?
3506 x86_seg_idtr : x86_seg_gdtr,
3507 &reg, ctxt)) )
3508 goto done;
3509 break;
3510 case 4: /* smsw */
3511 if ( ea.type == OP_MEM )
3512 ea.bytes = 2;
3513 dst = ea;
3514 fail_if(ops->read_cr == NULL);
3515 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
3516 goto done;
3517 d |= Mov; /* force writeback */
3518 break;
3519 case 6: /* lmsw */
3520 fail_if(ops->read_cr == NULL);
3521 fail_if(ops->write_cr == NULL);
3522 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
3523 goto done;
3524 if ( ea.type == OP_REG )
3525 cr0w = *ea.reg;
3526 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
3527 &cr0w, 2, ctxt, ops)) )
3528 goto done;
3529 /* LMSW can: (1) set bits 0-3; (2) clear bits 1-3. */
3530 cr0 = (cr0 & ~0xe) | (cr0w & 0xf);
3531 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
3532 goto done;
3533 break;
3534 case 7: /* invlpg */
3535 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3536 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3537 fail_if(ops->invlpg == NULL);
3538 if ( (rc = ops->invlpg(ea.mem.seg, ea.mem.off, ctxt)) )
3539 goto done;
3540 break;
3541 default:
3542 goto cannot_emulate;
3544 break;
3547 case 0x06: /* clts */
3548 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3549 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
3550 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
3551 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
3552 goto done;
3553 break;
3555 case 0x08: /* invd */
3556 case 0x09: /* wbinvd */
3557 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3558 fail_if(ops->wbinvd == NULL);
3559 if ( (rc = ops->wbinvd(ctxt)) != 0 )
3560 goto done;
3561 break;
3563 case 0x0d: /* GrpP (prefetch) */
3564 case 0x18: /* Grp16 (prefetch/nop) */
3565 case 0x19 ... 0x1f: /* nop (amd-defined) */
3566 break;
3568 case 0x20: /* mov cr,reg */
3569 case 0x21: /* mov dr,reg */
3570 case 0x22: /* mov reg,cr */
3571 case 0x23: /* mov reg,dr */
3572 generate_exception_if(ea.type != OP_REG, EXC_UD, -1);
3573 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3574 modrm_reg |= lock_prefix << 3;
3575 if ( b & 2 )
3577 /* Write to CR/DR. */
3578 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
3579 if ( !mode_64bit() )
3580 src.val = (uint32_t)src.val;
3581 rc = ((b & 1)
3582 ? (ops->write_dr
3583 ? ops->write_dr(modrm_reg, src.val, ctxt)
3584 : X86EMUL_UNHANDLEABLE)
3585 : (ops->write_cr
3586 ? ops->write_cr(modrm_reg, src.val, ctxt)
3587 : X86EMUL_UNHANDLEABLE));
3589 else
3591 /* Read from CR/DR. */
3592 dst.type = OP_REG;
3593 dst.bytes = mode_64bit() ? 8 : 4;
3594 dst.reg = decode_register(modrm_rm, &_regs, 0);
3595 rc = ((b & 1)
3596 ? (ops->read_dr
3597 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
3598 : X86EMUL_UNHANDLEABLE)
3599 : (ops->read_cr
3600 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
3601 : X86EMUL_UNHANDLEABLE));
3603 if ( rc != 0 )
3604 goto done;
3605 break;
3607 case 0x30: /* wrmsr */ {
3608 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
3609 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3610 fail_if(ops->write_msr == NULL);
3611 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
3612 goto done;
3613 break;
3616 case 0x31: /* rdtsc */ {
3617 unsigned long cr4;
3618 uint64_t val;
3619 fail_if(ops->read_cr == NULL);
3620 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
3621 goto done;
3622 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP, 0);
3623 fail_if(ops->read_msr == NULL);
3624 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
3625 goto done;
3626 _regs.edx = (uint32_t)(val >> 32);
3627 _regs.eax = (uint32_t)(val >> 0);
3628 break;
3631 case 0x32: /* rdmsr */ {
3632 uint64_t val;
3633 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3634 fail_if(ops->read_msr == NULL);
3635 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
3636 goto done;
3637 _regs.edx = (uint32_t)(val >> 32);
3638 _regs.eax = (uint32_t)(val >> 0);
3639 break;
3642 case 0x40 ... 0x4f: /* cmovcc */
3643 dst.val = src.val;
3644 if ( !test_cc(b, _regs.eflags) )
3645 dst.type = OP_NONE;
3646 break;
3648 case 0x6f: /* movq mm/m64,mm */ {
3649 uint8_t stub[] = { 0x0f, 0x6f, modrm, 0xc3 };
3650 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3651 uint64_t val;
3652 if ( ea.type == OP_MEM )
3654 unsigned long lval, hval;
3655 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3656 &lval, 4, ctxt, ops)) ||
3657 (rc = read_ulong(ea.mem.seg, ea.mem.off+4,
3658 &hval, 4, ctxt, ops)) )
3659 goto done;
3660 val = ((uint64_t)hval << 32) | (uint32_t)lval;
3661 stub[2] = modrm & 0x38; /* movq (%eax),%mmN */
3663 get_fpu(X86EMUL_FPU_mmx, &fic);
3664 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3665 put_fpu(&fic);
3666 break;
3669 case 0x7f: /* movq mm,mm/m64 */ {
3670 uint8_t stub[] = { 0x0f, 0x7f, modrm, 0xc3 };
3671 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3672 uint64_t val;
3673 if ( ea.type == OP_MEM )
3674 stub[2] = modrm & 0x38; /* movq %mmN,(%eax) */
3675 get_fpu(X86EMUL_FPU_mmx, &fic);
3676 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3677 put_fpu(&fic);
3678 if ( ea.type == OP_MEM )
3680 unsigned long lval = (uint32_t)val, hval = (uint32_t)(val >> 32);
3681 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0, &lval, 4, ctxt)) ||
3682 (rc = ops->write(ea.mem.seg, ea.mem.off+4, &hval, 4, ctxt)) )
3683 goto done;
3685 break;
3688 case 0x80 ... 0x8f: /* jcc (near) */ {
3689 int rel = (((op_bytes == 2) && !mode_64bit())
3690 ? (int32_t)insn_fetch_type(int16_t)
3691 : insn_fetch_type(int32_t));
3692 if ( test_cc(b, _regs.eflags) )
3693 jmp_rel(rel);
3694 break;
3697 case 0x90 ... 0x9f: /* setcc */
3698 dst.val = test_cc(b, _regs.eflags);
3699 break;
3701 case 0xa0: /* push %%fs */
3702 src.val = x86_seg_fs;
3703 goto push_seg;
3705 case 0xa1: /* pop %%fs */
3706 src.val = x86_seg_fs;
3707 goto pop_seg;
3709 case 0xa2: /* cpuid */ {
3710 unsigned int eax = _regs.eax, ebx = _regs.ebx;
3711 unsigned int ecx = _regs.ecx, edx = _regs.edx;
3712 fail_if(ops->cpuid == NULL);
3713 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
3714 goto done;
3715 _regs.eax = eax; _regs.ebx = ebx;
3716 _regs.ecx = ecx; _regs.edx = edx;
3717 break;
3720 case 0xa8: /* push %%gs */
3721 src.val = x86_seg_gs;
3722 goto push_seg;
3724 case 0xa9: /* pop %%gs */
3725 src.val = x86_seg_gs;
3726 goto pop_seg;
3728 case 0xb0 ... 0xb1: /* cmpxchg */
3729 /* Save real source value, then compare EAX against destination. */
3730 src.orig_val = src.val;
3731 src.val = _regs.eax;
3732 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
3733 if ( _regs.eflags & EFLG_ZF )
3735 /* Success: write back to memory. */
3736 dst.val = src.orig_val;
3738 else
3740 /* Failure: write the value we saw to EAX. */
3741 dst.type = OP_REG;
3742 dst.reg = (unsigned long *)&_regs.eax;
3744 break;
3746 case 0xa3: bt: /* bt */
3747 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
3748 dst.type = OP_NONE;
3749 break;
3751 case 0xa4: /* shld imm8,r,r/m */
3752 case 0xa5: /* shld %%cl,r,r/m */
3753 case 0xac: /* shrd imm8,r,r/m */
3754 case 0xad: /* shrd %%cl,r,r/m */ {
3755 uint8_t shift, width = dst.bytes << 3;
3756 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
3757 if ( (shift &= width - 1) == 0 )
3758 break;
3759 dst.orig_val = truncate_word(dst.val, dst.bytes);
3760 dst.val = ((shift == width) ? src.val :
3761 (b & 8) ?
3762 /* shrd */
3763 ((dst.orig_val >> shift) |
3764 truncate_word(src.val << (width - shift), dst.bytes)) :
3765 /* shld */
3766 ((dst.orig_val << shift) |
3767 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
3768 dst.val = truncate_word(dst.val, dst.bytes);
3769 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
3770 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
3771 _regs.eflags |= EFLG_CF;
3772 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
3773 _regs.eflags |= EFLG_OF;
3774 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
3775 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
3776 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
3777 break;
3780 case 0xb3: btr: /* btr */
3781 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
3782 break;
3784 case 0xab: bts: /* bts */
3785 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
3786 break;
3788 case 0xaf: /* imul */
3789 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3790 switch ( dst.bytes )
3792 case 2:
3793 dst.val = ((uint32_t)(int16_t)src.val *
3794 (uint32_t)(int16_t)dst.val);
3795 if ( (int16_t)dst.val != (uint32_t)dst.val )
3796 _regs.eflags |= EFLG_OF|EFLG_CF;
3797 break;
3798 #ifdef __x86_64__
3799 case 4:
3800 dst.val = ((uint64_t)(int32_t)src.val *
3801 (uint64_t)(int32_t)dst.val);
3802 if ( (int32_t)dst.val != dst.val )
3803 _regs.eflags |= EFLG_OF|EFLG_CF;
3804 break;
3805 #endif
3806 default: {
3807 unsigned long m[2] = { src.val, dst.val };
3808 if ( imul_dbl(m) )
3809 _regs.eflags |= EFLG_OF|EFLG_CF;
3810 dst.val = m[0];
3811 break;
3814 break;
3816 case 0xb2: /* lss */
3817 dst.val = x86_seg_ss;
3818 goto les;
3820 case 0xb4: /* lfs */
3821 dst.val = x86_seg_fs;
3822 goto les;
3824 case 0xb5: /* lgs */
3825 dst.val = x86_seg_gs;
3826 goto les;
3828 case 0xb6: /* movzx rm8,r{16,32,64} */
3829 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3830 dst.reg = decode_register(modrm_reg, &_regs, 0);
3831 dst.bytes = op_bytes;
3832 dst.val = (uint8_t)src.val;
3833 break;
3835 case 0xbc: /* bsf */ {
3836 int zf;
3837 asm ( "bsf %2,%0; setz %b1"
3838 : "=r" (dst.val), "=q" (zf)
3839 : "r" (src.val), "1" (0) );
3840 _regs.eflags &= ~EFLG_ZF;
3841 if ( zf )
3843 _regs.eflags |= EFLG_ZF;
3844 dst.type = OP_NONE;
3846 break;
3849 case 0xbd: /* bsr */ {
3850 int zf;
3851 asm ( "bsr %2,%0; setz %b1"
3852 : "=r" (dst.val), "=q" (zf)
3853 : "r" (src.val), "1" (0) );
3854 _regs.eflags &= ~EFLG_ZF;
3855 if ( zf )
3857 _regs.eflags |= EFLG_ZF;
3858 dst.type = OP_NONE;
3860 break;
3863 case 0xb7: /* movzx rm16,r{16,32,64} */
3864 dst.val = (uint16_t)src.val;
3865 break;
3867 case 0xbb: btc: /* btc */
3868 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
3869 break;
3871 case 0xba: /* Grp8 */
3872 switch ( modrm_reg & 7 )
3874 case 4: goto bt;
3875 case 5: goto bts;
3876 case 6: goto btr;
3877 case 7: goto btc;
3878 default: generate_exception_if(1, EXC_UD, -1);
3880 break;
3882 case 0xbe: /* movsx rm8,r{16,32,64} */
3883 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3884 dst.reg = decode_register(modrm_reg, &_regs, 0);
3885 dst.bytes = op_bytes;
3886 dst.val = (int8_t)src.val;
3887 break;
3889 case 0xbf: /* movsx rm16,r{16,32,64} */
3890 dst.val = (int16_t)src.val;
3891 break;
3893 case 0xc0 ... 0xc1: /* xadd */
3894 /* Write back the register source. */
3895 switch ( dst.bytes )
3897 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
3898 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
3899 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
3900 case 8: *src.reg = dst.val; break;
3902 goto add;
3904 case 0xc3: /* movnti */
3905 /* Ignore the non-temporal hint for now. */
3906 generate_exception_if(dst.bytes <= 2, EXC_UD, -1);
3907 dst.val = src.val;
3908 break;
3910 case 0xc7: /* Grp9 (cmpxchg8b/cmpxchg16b) */ {
3911 unsigned long old[2], exp[2], new[2];
3912 unsigned int i;
3914 generate_exception_if((modrm_reg & 7) != 1, EXC_UD, -1);
3915 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3916 op_bytes *= 2;
3918 /* Get actual old value. */
3919 for ( i = 0; i < (op_bytes/sizeof(long)); i++ )
3920 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off + i*sizeof(long),
3921 &old[i], sizeof(long), ctxt, ops)) != 0 )
3922 goto done;
3924 /* Get expected and proposed values. */
3925 if ( op_bytes == 8 )
3927 ((uint32_t *)exp)[0] = _regs.eax; ((uint32_t *)exp)[1] = _regs.edx;
3928 ((uint32_t *)new)[0] = _regs.ebx; ((uint32_t *)new)[1] = _regs.ecx;
3930 else
3932 exp[0] = _regs.eax; exp[1] = _regs.edx;
3933 new[0] = _regs.ebx; new[1] = _regs.ecx;
3936 if ( memcmp(old, exp, op_bytes) )
3938 /* Expected != actual: store actual to rDX:rAX and clear ZF. */
3939 _regs.eax = (op_bytes == 8) ? ((uint32_t *)old)[0] : old[0];
3940 _regs.edx = (op_bytes == 8) ? ((uint32_t *)old)[1] : old[1];
3941 _regs.eflags &= ~EFLG_ZF;
3943 else
3945 /* Expected == actual: attempt atomic cmpxchg and set ZF. */
3946 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3947 new, op_bytes, ctxt)) != 0 )
3948 goto done;
3949 _regs.eflags |= EFLG_ZF;
3951 break;
3954 case 0xc8 ... 0xcf: /* bswap */
3955 dst.type = OP_REG;
3956 dst.reg = decode_register(
3957 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3958 switch ( dst.bytes = op_bytes )
3960 default: /* case 2: */
3961 /* Undefined behaviour. Writes zero on all tested CPUs. */
3962 dst.val = 0;
3963 break;
3964 case 4:
3965 #ifdef __x86_64__
3966 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3967 break;
3968 case 8:
3969 #endif
3970 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3971 break;
3973 break;
3975 goto writeback;
3977 cannot_emulate:
3978 return X86EMUL_UNHANDLEABLE;