ia64/xen-unstable

view xen/arch/x86/x86_emulate/x86_emulate.c @ 18339:6e3c97f43f9c

x86_emulate: Do not request emulation of REP instructions beyond the
point at which the index register (SI/DI) wraps. This can cause a
discontinuity in the address range accessed by the repeated
instruction.

Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Tue Aug 19 15:56:31 2008 +0100 (2008-08-19)
parents 2e4ecfc83460
children 8d993552673a
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 /* Operand sizes: 8-bit operands or specified/overridden size. */
25 #define ByteOp (1<<0) /* 8-bit operands. */
26 /* Destination operand type. */
27 #define DstBitBase (0<<1) /* Memory operand, bit string. */
28 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
29 #define DstReg (2<<1) /* Register operand. */
30 #define DstMem (3<<1) /* Memory operand. */
31 #define DstMask (3<<1)
32 /* Source operand type. */
33 #define SrcNone (0<<3) /* No source operand. */
34 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
35 #define SrcReg (1<<3) /* Register operand. */
36 #define SrcMem (2<<3) /* Memory operand. */
37 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
38 #define SrcImm (4<<3) /* Immediate operand. */
39 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
40 #define SrcMask (7<<3)
41 /* Generic ModRM decode. */
42 #define ModRM (1<<6)
43 /* Destination is only written; never read. */
44 #define Mov (1<<7)
46 static uint8_t opcode_table[256] = {
47 /* 0x00 - 0x07 */
48 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
49 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
50 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
51 /* 0x08 - 0x0F */
52 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
53 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
54 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
55 /* 0x10 - 0x17 */
56 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
57 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
58 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
59 /* 0x18 - 0x1F */
60 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
61 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
62 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
63 /* 0x20 - 0x27 */
64 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
65 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
66 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
67 /* 0x28 - 0x2F */
68 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
69 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
70 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
71 /* 0x30 - 0x37 */
72 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
73 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
74 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
75 /* 0x38 - 0x3F */
76 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
77 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
78 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
79 /* 0x40 - 0x4F */
80 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
81 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
82 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
83 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
84 /* 0x50 - 0x5F */
85 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
86 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
87 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
88 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
89 /* 0x60 - 0x67 */
90 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
91 0, 0, 0, 0,
92 /* 0x68 - 0x6F */
93 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
94 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
95 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
96 /* 0x70 - 0x77 */
97 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
98 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
99 /* 0x78 - 0x7F */
100 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
101 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
102 /* 0x80 - 0x87 */
103 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
104 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
105 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
106 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
107 /* 0x88 - 0x8F */
108 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
109 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
110 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
111 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
112 /* 0x90 - 0x97 */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x98 - 0x9F */
116 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
117 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
118 /* 0xA0 - 0xA7 */
119 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
120 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
121 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
122 ByteOp|ImplicitOps, ImplicitOps,
123 /* 0xA8 - 0xAF */
124 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
125 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
126 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
127 ByteOp|ImplicitOps, ImplicitOps,
128 /* 0xB0 - 0xB7 */
129 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
130 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
131 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
132 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
133 /* 0xB8 - 0xBF */
134 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
135 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
136 /* 0xC0 - 0xC7 */
137 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
138 ImplicitOps, ImplicitOps,
139 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
140 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
141 /* 0xC8 - 0xCF */
142 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
143 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
144 /* 0xD0 - 0xD7 */
145 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
146 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
147 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
148 /* 0xD8 - 0xDF */
149 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
150 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
151 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
152 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
153 /* 0xE0 - 0xE7 */
154 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
155 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
156 /* 0xE8 - 0xEF */
157 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
158 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
159 /* 0xF0 - 0xF7 */
160 0, ImplicitOps, 0, 0,
161 ImplicitOps, ImplicitOps,
162 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
163 /* 0xF8 - 0xFF */
164 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
165 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
166 };
168 static uint8_t twobyte_table[256] = {
169 /* 0x00 - 0x07 */
170 0, ImplicitOps|ModRM, 0, 0, 0, 0, ImplicitOps, 0,
171 /* 0x08 - 0x0F */
172 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
173 /* 0x10 - 0x17 */
174 0, 0, 0, 0, 0, 0, 0, 0,
175 /* 0x18 - 0x1F */
176 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
177 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
178 /* 0x20 - 0x27 */
179 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
180 0, 0, 0, 0,
181 /* 0x28 - 0x2F */
182 0, 0, 0, 0, 0, 0, 0, 0,
183 /* 0x30 - 0x37 */
184 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
185 /* 0x38 - 0x3F */
186 0, 0, 0, 0, 0, 0, 0, 0,
187 /* 0x40 - 0x47 */
188 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
189 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
190 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
191 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
192 /* 0x48 - 0x4F */
193 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
194 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
195 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
196 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
197 /* 0x50 - 0x5F */
198 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
199 /* 0x60 - 0x6F */
200 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
201 /* 0x70 - 0x7F */
202 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
203 /* 0x80 - 0x87 */
204 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
205 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
206 /* 0x88 - 0x8F */
207 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
208 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
209 /* 0x90 - 0x97 */
210 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
211 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
212 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
213 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
214 /* 0x98 - 0x9F */
215 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
216 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
217 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
218 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
219 /* 0xA0 - 0xA7 */
220 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
221 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
222 /* 0xA8 - 0xAF */
223 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
224 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
225 /* 0xB0 - 0xB7 */
226 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
227 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
228 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
229 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
230 /* 0xB8 - 0xBF */
231 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
232 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
233 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
234 /* 0xC0 - 0xC7 */
235 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
236 0, 0, 0, ImplicitOps|ModRM,
237 /* 0xC8 - 0xCF */
238 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
239 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
240 /* 0xD0 - 0xDF */
241 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
242 /* 0xE0 - 0xEF */
243 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
244 /* 0xF0 - 0xFF */
245 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
246 };
248 /* Type, address-of, and value of an instruction's operand. */
249 struct operand {
250 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
251 unsigned int bytes;
253 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
254 union {
255 unsigned long val;
256 uint32_t bigval[4];
257 };
259 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
260 union {
261 unsigned long orig_val;
262 uint32_t orig_bigval[4];
263 };
265 union {
266 /* OP_REG: Pointer to register field. */
267 unsigned long *reg;
268 /* OP_MEM: Segment and offset. */
269 struct {
270 enum x86_segment seg;
271 unsigned long off;
272 } mem;
273 };
274 };
276 /* MSRs. */
277 #define MSR_TSC 0x10
279 /* Control register flags. */
280 #define CR0_PE (1<<0)
281 #define CR4_TSD (1<<2)
283 /* EFLAGS bit definitions. */
284 #define EFLG_VIP (1<<20)
285 #define EFLG_VIF (1<<19)
286 #define EFLG_AC (1<<18)
287 #define EFLG_VM (1<<17)
288 #define EFLG_RF (1<<16)
289 #define EFLG_NT (1<<14)
290 #define EFLG_IOPL (3<<12)
291 #define EFLG_OF (1<<11)
292 #define EFLG_DF (1<<10)
293 #define EFLG_IF (1<<9)
294 #define EFLG_TF (1<<8)
295 #define EFLG_SF (1<<7)
296 #define EFLG_ZF (1<<6)
297 #define EFLG_AF (1<<4)
298 #define EFLG_PF (1<<2)
299 #define EFLG_CF (1<<0)
301 /* Exception definitions. */
302 #define EXC_DE 0
303 #define EXC_DB 1
304 #define EXC_BP 3
305 #define EXC_OF 4
306 #define EXC_BR 5
307 #define EXC_UD 6
308 #define EXC_TS 10
309 #define EXC_NP 11
310 #define EXC_SS 12
311 #define EXC_GP 13
312 #define EXC_PF 14
313 #define EXC_MF 16
315 /*
316 * Instruction emulation:
317 * Most instructions are emulated directly via a fragment of inline assembly
318 * code. This allows us to save/restore EFLAGS and thus very easily pick up
319 * any modified flags.
320 */
322 #if defined(__x86_64__)
323 #define _LO32 "k" /* force 32-bit operand */
324 #define _STK "%%rsp" /* stack pointer */
325 #define _BYTES_PER_LONG "8"
326 #elif defined(__i386__)
327 #define _LO32 "" /* force 32-bit operand */
328 #define _STK "%%esp" /* stack pointer */
329 #define _BYTES_PER_LONG "4"
330 #endif
332 /*
333 * These EFLAGS bits are restored from saved value during emulation, and
334 * any changes are written back to the saved value after emulation.
335 */
336 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
338 /* Before executing instruction: restore necessary bits in EFLAGS. */
339 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
340 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
341 "movl %"_sav",%"_LO32 _tmp"; " \
342 "push %"_tmp"; " \
343 "push %"_tmp"; " \
344 "movl %"_msk",%"_LO32 _tmp"; " \
345 "andl %"_LO32 _tmp",("_STK"); " \
346 "pushf; " \
347 "notl %"_LO32 _tmp"; " \
348 "andl %"_LO32 _tmp",("_STK"); " \
349 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
350 "pop %"_tmp"; " \
351 "orl %"_LO32 _tmp",("_STK"); " \
352 "popf; " \
353 "pop %"_sav"; "
355 /* After executing instruction: write-back necessary bits in EFLAGS. */
356 #define _POST_EFLAGS(_sav, _msk, _tmp) \
357 /* _sav |= EFLAGS & _msk; */ \
358 "pushf; " \
359 "pop %"_tmp"; " \
360 "andl %"_msk",%"_LO32 _tmp"; " \
361 "orl %"_LO32 _tmp",%"_sav"; "
363 /* Raw emulation: instruction has two explicit operands. */
364 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
365 do{ unsigned long _tmp; \
366 switch ( (_dst).bytes ) \
367 { \
368 case 2: \
369 asm volatile ( \
370 _PRE_EFLAGS("0","4","2") \
371 _op"w %"_wx"3,%1; " \
372 _POST_EFLAGS("0","4","2") \
373 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
374 : _wy ((_src).val), "i" (EFLAGS_MASK), \
375 "m" (_eflags), "m" ((_dst).val) ); \
376 break; \
377 case 4: \
378 asm volatile ( \
379 _PRE_EFLAGS("0","4","2") \
380 _op"l %"_lx"3,%1; " \
381 _POST_EFLAGS("0","4","2") \
382 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
383 : _ly ((_src).val), "i" (EFLAGS_MASK), \
384 "m" (_eflags), "m" ((_dst).val) ); \
385 break; \
386 case 8: \
387 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
388 break; \
389 } \
390 } while (0)
391 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
392 do{ unsigned long _tmp; \
393 switch ( (_dst).bytes ) \
394 { \
395 case 1: \
396 asm volatile ( \
397 _PRE_EFLAGS("0","4","2") \
398 _op"b %"_bx"3,%1; " \
399 _POST_EFLAGS("0","4","2") \
400 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
401 : _by ((_src).val), "i" (EFLAGS_MASK), \
402 "m" (_eflags), "m" ((_dst).val) ); \
403 break; \
404 default: \
405 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
406 break; \
407 } \
408 } while (0)
409 /* Source operand is byte-sized and may be restricted to just %cl. */
410 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
411 __emulate_2op(_op, _src, _dst, _eflags, \
412 "b", "c", "b", "c", "b", "c", "b", "c")
413 /* Source operand is byte, word, long or quad sized. */
414 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
415 __emulate_2op(_op, _src, _dst, _eflags, \
416 "b", "q", "w", "r", _LO32, "r", "", "r")
417 /* Source operand is word, long or quad sized. */
418 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
419 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
420 "w", "r", _LO32, "r", "", "r")
422 /* Instruction has only one explicit operand (no source operand). */
423 #define emulate_1op(_op,_dst,_eflags) \
424 do{ unsigned long _tmp; \
425 switch ( (_dst).bytes ) \
426 { \
427 case 1: \
428 asm volatile ( \
429 _PRE_EFLAGS("0","3","2") \
430 _op"b %1; " \
431 _POST_EFLAGS("0","3","2") \
432 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
433 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
434 break; \
435 case 2: \
436 asm volatile ( \
437 _PRE_EFLAGS("0","3","2") \
438 _op"w %1; " \
439 _POST_EFLAGS("0","3","2") \
440 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
441 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
442 break; \
443 case 4: \
444 asm volatile ( \
445 _PRE_EFLAGS("0","3","2") \
446 _op"l %1; " \
447 _POST_EFLAGS("0","3","2") \
448 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
449 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
450 break; \
451 case 8: \
452 __emulate_1op_8byte(_op, _dst, _eflags); \
453 break; \
454 } \
455 } while (0)
457 /* Emulate an instruction with quadword operands (x86/64 only). */
458 #if defined(__x86_64__)
459 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
460 do{ asm volatile ( \
461 _PRE_EFLAGS("0","4","2") \
462 _op"q %"_qx"3,%1; " \
463 _POST_EFLAGS("0","4","2") \
464 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
465 : _qy ((_src).val), "i" (EFLAGS_MASK), \
466 "m" (_eflags), "m" ((_dst).val) ); \
467 } while (0)
468 #define __emulate_1op_8byte(_op, _dst, _eflags) \
469 do{ asm volatile ( \
470 _PRE_EFLAGS("0","3","2") \
471 _op"q %1; " \
472 _POST_EFLAGS("0","3","2") \
473 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
474 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
475 } while (0)
476 #elif defined(__i386__)
477 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
478 #define __emulate_1op_8byte(_op, _dst, _eflags)
479 #endif /* __i386__ */
481 /* Fetch next part of the instruction being emulated. */
482 #define insn_fetch_bytes(_size) \
483 ({ unsigned long _x = 0, _eip = _regs.eip; \
484 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
485 _regs.eip += (_size); /* real hardware doesn't truncate */ \
486 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
487 EXC_GP, 0); \
488 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
489 if ( rc ) goto done; \
490 _x; \
491 })
492 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
494 #define truncate_word(ea, byte_width) \
495 ({ unsigned long __ea = (ea); \
496 unsigned int _width = (byte_width); \
497 ((_width == sizeof(unsigned long)) ? __ea : \
498 (__ea & ((1UL << (_width << 3)) - 1))); \
499 })
500 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
502 #define mode_64bit() (def_ad_bytes == 8)
504 #define fail_if(p) \
505 do { \
506 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
507 if ( rc ) goto done; \
508 } while (0)
510 #define generate_exception_if(p, e, ec) \
511 ({ if ( (p) ) { \
512 fail_if(ops->inject_hw_exception == NULL); \
513 rc = ops->inject_hw_exception(e, ec, ctxt) ? : X86EMUL_EXCEPTION; \
514 goto done; \
515 } \
516 })
518 /*
519 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
520 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
521 */
522 static int even_parity(uint8_t v)
523 {
524 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
525 return v;
526 }
528 /* Update address held in a register, based on addressing mode. */
529 #define _register_address_increment(reg, inc, byte_width) \
530 do { \
531 int _inc = (inc); /* signed type ensures sign extension to long */ \
532 unsigned int _width = (byte_width); \
533 if ( _width == sizeof(unsigned long) ) \
534 (reg) += _inc; \
535 else if ( mode_64bit() ) \
536 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
537 else \
538 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
539 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
540 } while (0)
541 #define register_address_increment(reg, inc) \
542 _register_address_increment((reg), (inc), ad_bytes)
544 #define sp_pre_dec(dec) ({ \
545 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
546 truncate_word(_regs.esp, ctxt->sp_size/8); \
547 })
548 #define sp_post_inc(inc) ({ \
549 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
550 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
551 __esp; \
552 })
554 #define jmp_rel(rel) \
555 do { \
556 int _rel = (int)(rel); \
557 _regs.eip += _rel; \
558 if ( !mode_64bit() ) \
559 _regs.eip = ((op_bytes == 2) \
560 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
561 } while (0)
563 struct fpu_insn_ctxt {
564 uint8_t insn_bytes;
565 uint8_t exn_raised;
566 };
568 static void fpu_handle_exception(void *_fic, struct cpu_user_regs *regs)
569 {
570 struct fpu_insn_ctxt *fic = _fic;
571 fic->exn_raised = 1;
572 regs->eip += fic->insn_bytes;
573 }
575 #define get_fpu(_type, _fic) \
576 do{ (_fic)->exn_raised = 0; \
577 fail_if(ops->get_fpu == NULL); \
578 rc = ops->get_fpu(fpu_handle_exception, _fic, _type, ctxt); \
579 if ( rc ) goto done; \
580 } while (0)
581 #define put_fpu(_fic) \
582 do{ \
583 if ( ops->put_fpu != NULL ) \
584 ops->put_fpu(ctxt); \
585 generate_exception_if((_fic)->exn_raised, EXC_MF, -1); \
586 } while (0)
588 #define emulate_fpu_insn(_op) \
589 do{ struct fpu_insn_ctxt fic; \
590 get_fpu(X86EMUL_FPU_fpu, &fic); \
591 asm volatile ( \
592 "movb $2f-1f,%0 \n" \
593 "1: " _op " \n" \
594 "2: \n" \
595 : "=m" (fic.insn_bytes) : : "memory" ); \
596 put_fpu(&fic); \
597 } while (0)
599 #define emulate_fpu_insn_memdst(_op, _arg) \
600 do{ struct fpu_insn_ctxt fic; \
601 get_fpu(X86EMUL_FPU_fpu, &fic); \
602 asm volatile ( \
603 "movb $2f-1f,%0 \n" \
604 "1: " _op " %1 \n" \
605 "2: \n" \
606 : "=m" (fic.insn_bytes), "=m" (_arg) \
607 : : "memory" ); \
608 put_fpu(&fic); \
609 } while (0)
611 #define emulate_fpu_insn_memsrc(_op, _arg) \
612 do{ struct fpu_insn_ctxt fic; \
613 get_fpu(X86EMUL_FPU_fpu, &fic); \
614 asm volatile ( \
615 "movb $2f-1f,%0 \n" \
616 "1: " _op " %1 \n" \
617 "2: \n" \
618 : "=m" (fic.insn_bytes) \
619 : "m" (_arg) : "memory" ); \
620 put_fpu(&fic); \
621 } while (0)
623 #define emulate_fpu_insn_stub(_bytes...) \
624 do{ uint8_t stub[] = { _bytes, 0xc3 }; \
625 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 }; \
626 get_fpu(X86EMUL_FPU_fpu, &fic); \
627 (*(void(*)(void))stub)(); \
628 put_fpu(&fic); \
629 } while (0)
631 static unsigned long __get_rep_prefix(
632 struct cpu_user_regs *int_regs,
633 struct cpu_user_regs *ext_regs,
634 int ad_bytes)
635 {
636 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
637 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
638 int_regs->ecx);
640 /* Skip the instruction if no repetitions are required. */
641 if ( ecx == 0 )
642 ext_regs->eip = int_regs->eip;
644 return ecx;
645 }
647 #define get_rep_prefix() ({ \
648 unsigned long max_reps = 1; \
649 if ( rep_prefix ) \
650 max_reps = __get_rep_prefix(&_regs, ctxt->regs, ad_bytes); \
651 if ( max_reps == 0 ) \
652 goto done; \
653 max_reps; \
654 })
656 static void __put_rep_prefix(
657 struct cpu_user_regs *int_regs,
658 struct cpu_user_regs *ext_regs,
659 int ad_bytes,
660 unsigned long reps_completed)
661 {
662 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
663 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
664 int_regs->ecx);
666 /* Reduce counter appropriately, and repeat instruction if non-zero. */
667 ecx -= reps_completed;
668 if ( ecx != 0 )
669 int_regs->eip = ext_regs->eip;
671 if ( ad_bytes == 2 )
672 *(uint16_t *)&int_regs->ecx = ecx;
673 else if ( ad_bytes == 4 )
674 int_regs->ecx = (uint32_t)ecx;
675 else
676 int_regs->ecx = ecx;
677 }
679 #define put_rep_prefix(reps_completed) ({ \
680 if ( rep_prefix ) \
681 __put_rep_prefix(&_regs, ctxt->regs, ad_bytes, reps_completed); \
682 })
684 /* Clip maximum repetitions so that the index register only just wraps. */
685 #define truncate_ea_and_reps(ea, reps, bytes_per_rep) ({ \
686 unsigned long __todo = (ctxt->regs->eflags & EF_DF) ? (ea) : ~(ea); \
687 __todo = truncate_word(__todo, ad_bytes); \
688 __todo = (__todo / (bytes_per_rep)) + 1; \
689 (reps) = (__todo < (reps)) ? __todo : (reps); \
690 truncate_word((ea), ad_bytes); \
691 })
693 /* Compatibility function: read guest memory, zero-extend result to a ulong. */
694 static int read_ulong(
695 enum x86_segment seg,
696 unsigned long offset,
697 unsigned long *val,
698 unsigned int bytes,
699 struct x86_emulate_ctxt *ctxt,
700 struct x86_emulate_ops *ops)
701 {
702 *val = 0;
703 return ops->read(seg, offset, val, bytes, ctxt);
704 }
706 /*
707 * Unsigned multiplication with double-word result.
708 * IN: Multiplicand=m[0], Multiplier=m[1]
709 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
710 */
711 static int mul_dbl(unsigned long m[2])
712 {
713 int rc;
714 asm ( "mul %4; seto %b2"
715 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
716 : "0" (m[0]), "1" (m[1]), "2" (0) );
717 return rc;
718 }
720 /*
721 * Signed multiplication with double-word result.
722 * IN: Multiplicand=m[0], Multiplier=m[1]
723 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
724 */
725 static int imul_dbl(unsigned long m[2])
726 {
727 int rc;
728 asm ( "imul %4; seto %b2"
729 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
730 : "0" (m[0]), "1" (m[1]), "2" (0) );
731 return rc;
732 }
734 /*
735 * Unsigned division of double-word dividend.
736 * IN: Dividend=u[1]:u[0], Divisor=v
737 * OUT: Return 1: #DE
738 * Return 0: Quotient=u[0], Remainder=u[1]
739 */
740 static int div_dbl(unsigned long u[2], unsigned long v)
741 {
742 if ( (v == 0) || (u[1] >= v) )
743 return 1;
744 asm ( "div %4"
745 : "=a" (u[0]), "=d" (u[1])
746 : "0" (u[0]), "1" (u[1]), "r" (v) );
747 return 0;
748 }
750 /*
751 * Signed division of double-word dividend.
752 * IN: Dividend=u[1]:u[0], Divisor=v
753 * OUT: Return 1: #DE
754 * Return 0: Quotient=u[0], Remainder=u[1]
755 * NB. We don't use idiv directly as it's moderately hard to work out
756 * ahead of time whether it will #DE, which we cannot allow to happen.
757 */
758 static int idiv_dbl(unsigned long u[2], unsigned long v)
759 {
760 int negu = (long)u[1] < 0, negv = (long)v < 0;
762 /* u = abs(u) */
763 if ( negu )
764 {
765 u[1] = ~u[1];
766 if ( (u[0] = -u[0]) == 0 )
767 u[1]++;
768 }
770 /* abs(u) / abs(v) */
771 if ( div_dbl(u, negv ? -v : v) )
772 return 1;
774 /* Remainder has same sign as dividend. It cannot overflow. */
775 if ( negu )
776 u[1] = -u[1];
778 /* Quotient is overflowed if sign bit is set. */
779 if ( negu ^ negv )
780 {
781 if ( (long)u[0] >= 0 )
782 u[0] = -u[0];
783 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
784 return 1;
785 }
786 else if ( (long)u[0] < 0 )
787 return 1;
789 return 0;
790 }
792 static int
793 test_cc(
794 unsigned int condition, unsigned int flags)
795 {
796 int rc = 0;
798 switch ( (condition & 15) >> 1 )
799 {
800 case 0: /* o */
801 rc |= (flags & EFLG_OF);
802 break;
803 case 1: /* b/c/nae */
804 rc |= (flags & EFLG_CF);
805 break;
806 case 2: /* z/e */
807 rc |= (flags & EFLG_ZF);
808 break;
809 case 3: /* be/na */
810 rc |= (flags & (EFLG_CF|EFLG_ZF));
811 break;
812 case 4: /* s */
813 rc |= (flags & EFLG_SF);
814 break;
815 case 5: /* p/pe */
816 rc |= (flags & EFLG_PF);
817 break;
818 case 7: /* le/ng */
819 rc |= (flags & EFLG_ZF);
820 /* fall through */
821 case 6: /* l/nge */
822 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
823 break;
824 }
826 /* Odd condition identifiers (lsb == 1) have inverted sense. */
827 return (!!rc ^ (condition & 1));
828 }
830 static int
831 get_cpl(
832 struct x86_emulate_ctxt *ctxt,
833 struct x86_emulate_ops *ops)
834 {
835 struct segment_register reg;
837 if ( ctxt->regs->eflags & EFLG_VM )
838 return 3;
840 if ( (ops->read_segment == NULL) ||
841 ops->read_segment(x86_seg_ss, &reg, ctxt) )
842 return -1;
844 return reg.attr.fields.dpl;
845 }
847 static int
848 _mode_iopl(
849 struct x86_emulate_ctxt *ctxt,
850 struct x86_emulate_ops *ops)
851 {
852 int cpl = get_cpl(ctxt, ops);
853 if ( cpl == -1 )
854 return -1;
855 return (cpl <= ((ctxt->regs->eflags >> 12) & 3));
856 }
858 #define mode_ring0() ({ \
859 int _cpl = get_cpl(ctxt, ops); \
860 fail_if(_cpl < 0); \
861 (_cpl == 0); \
862 })
863 #define mode_iopl() ({ \
864 int _iopl = _mode_iopl(ctxt, ops); \
865 fail_if(_iopl < 0); \
866 _iopl; \
867 })
869 static int ioport_access_check(
870 unsigned int first_port,
871 unsigned int bytes,
872 struct x86_emulate_ctxt *ctxt,
873 struct x86_emulate_ops *ops)
874 {
875 unsigned long iobmp;
876 struct segment_register tr;
877 int rc = X86EMUL_OKAY;
879 if ( !(ctxt->regs->eflags & EFLG_VM) && mode_iopl() )
880 return X86EMUL_OKAY;
882 fail_if(ops->read_segment == NULL);
883 if ( (rc = ops->read_segment(x86_seg_tr, &tr, ctxt)) != 0 )
884 return rc;
886 /* Ensure that the TSS is valid and has an io-bitmap-offset field. */
887 if ( !tr.attr.fields.p ||
888 ((tr.attr.fields.type & 0xd) != 0x9) ||
889 (tr.limit < 0x67) )
890 goto raise_exception;
892 if ( (rc = read_ulong(x86_seg_none, tr.base + 0x66,
893 &iobmp, 2, ctxt, ops)) )
894 return rc;
896 /* Ensure TSS includes two bytes including byte containing first port. */
897 iobmp += first_port / 8;
898 if ( tr.limit <= iobmp )
899 goto raise_exception;
901 if ( (rc = read_ulong(x86_seg_none, tr.base + iobmp,
902 &iobmp, 2, ctxt, ops)) )
903 return rc;
904 if ( (iobmp & (((1<<bytes)-1) << (first_port&7))) != 0 )
905 goto raise_exception;
907 done:
908 return rc;
910 raise_exception:
911 fail_if(ops->inject_hw_exception == NULL);
912 return ops->inject_hw_exception(EXC_GP, 0, ctxt) ? : X86EMUL_EXCEPTION;
913 }
915 static int
916 in_realmode(
917 struct x86_emulate_ctxt *ctxt,
918 struct x86_emulate_ops *ops)
919 {
920 unsigned long cr0;
921 int rc;
923 if ( ops->read_cr == NULL )
924 return 0;
926 rc = ops->read_cr(0, &cr0, ctxt);
927 return (!rc && !(cr0 & CR0_PE));
928 }
930 static int
931 in_protmode(
932 struct x86_emulate_ctxt *ctxt,
933 struct x86_emulate_ops *ops)
934 {
935 return !(in_realmode(ctxt, ops) || (ctxt->regs->eflags & EFLG_VM));
936 }
938 static int
939 realmode_load_seg(
940 enum x86_segment seg,
941 uint16_t sel,
942 struct x86_emulate_ctxt *ctxt,
943 struct x86_emulate_ops *ops)
944 {
945 struct segment_register reg;
946 int rc;
948 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
949 return rc;
951 reg.sel = sel;
952 reg.base = (uint32_t)sel << 4;
954 return ops->write_segment(seg, &reg, ctxt);
955 }
957 static int
958 protmode_load_seg(
959 enum x86_segment seg,
960 uint16_t sel,
961 struct x86_emulate_ctxt *ctxt,
962 struct x86_emulate_ops *ops)
963 {
964 struct segment_register desctab, ss, segr;
965 struct { uint32_t a, b; } desc;
966 unsigned long val;
967 uint8_t dpl, rpl, cpl;
968 uint32_t new_desc_b;
969 int rc, fault_type = EXC_TS;
971 /* NULL selector? */
972 if ( (sel & 0xfffc) == 0 )
973 {
974 if ( (seg == x86_seg_cs) || (seg == x86_seg_ss) )
975 goto raise_exn;
976 memset(&segr, 0, sizeof(segr));
977 return ops->write_segment(seg, &segr, ctxt);
978 }
980 /* LDT descriptor must be in the GDT. */
981 if ( (seg == x86_seg_ldtr) && (sel & 4) )
982 goto raise_exn;
984 if ( (rc = ops->read_segment(x86_seg_ss, &ss, ctxt)) ||
985 (rc = ops->read_segment((sel & 4) ? x86_seg_ldtr : x86_seg_gdtr,
986 &desctab, ctxt)) )
987 return rc;
989 /* Check against descriptor table limit. */
990 if ( ((sel & 0xfff8) + 7) > desctab.limit )
991 goto raise_exn;
993 do {
994 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8),
995 &val, 4, ctxt, ops)) )
996 return rc;
997 desc.a = val;
998 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
999 &val, 4, ctxt, ops)) )
1000 return rc;
1001 desc.b = val;
1003 /* Segment present in memory? */
1004 if ( !(desc.b & (1u<<15)) )
1006 fault_type = EXC_NP;
1007 goto raise_exn;
1010 /* LDT descriptor is a system segment. All others are code/data. */
1011 if ( (desc.b & (1u<<12)) == ((seg == x86_seg_ldtr) << 12) )
1012 goto raise_exn;
1014 dpl = (desc.b >> 13) & 3;
1015 rpl = sel & 3;
1016 cpl = ss.attr.fields.dpl;
1018 switch ( seg )
1020 case x86_seg_cs:
1021 /* Code segment? */
1022 if ( !(desc.b & (1u<<11)) )
1023 goto raise_exn;
1024 /* Non-conforming segment: check DPL against RPL. */
1025 if ( ((desc.b & (6u<<9)) != (6u<<9)) && (dpl != rpl) )
1026 goto raise_exn;
1027 break;
1028 case x86_seg_ss:
1029 /* Writable data segment? */
1030 if ( (desc.b & (5u<<9)) != (1u<<9) )
1031 goto raise_exn;
1032 if ( (dpl != cpl) || (dpl != rpl) )
1033 goto raise_exn;
1034 break;
1035 case x86_seg_ldtr:
1036 /* LDT system segment? */
1037 if ( (desc.b & (15u<<8)) != (2u<<8) )
1038 goto raise_exn;
1039 goto skip_accessed_flag;
1040 default:
1041 /* Readable code or data segment? */
1042 if ( (desc.b & (5u<<9)) == (4u<<9) )
1043 goto raise_exn;
1044 /* Non-conforming segment: check DPL against RPL and CPL. */
1045 if ( ((desc.b & (6u<<9)) != (6u<<9)) &&
1046 ((dpl < cpl) || (dpl < rpl)) )
1047 goto raise_exn;
1048 break;
1051 /* Ensure Accessed flag is set. */
1052 new_desc_b = desc.b | 0x100;
1053 rc = ((desc.b & 0x100) ? X86EMUL_OKAY :
1054 ops->cmpxchg(
1055 x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1056 &desc.b, &new_desc_b, 4, ctxt));
1057 } while ( rc == X86EMUL_CMPXCHG_FAILED );
1059 if ( rc )
1060 return rc;
1062 /* Force the Accessed flag in our local copy. */
1063 desc.b |= 0x100;
1065 skip_accessed_flag:
1066 segr.base = (((desc.b << 0) & 0xff000000u) |
1067 ((desc.b << 16) & 0x00ff0000u) |
1068 ((desc.a >> 16) & 0x0000ffffu));
1069 segr.attr.bytes = (((desc.b >> 8) & 0x00ffu) |
1070 ((desc.b >> 12) & 0x0f00u));
1071 segr.limit = (desc.b & 0x000f0000u) | (desc.a & 0x0000ffffu);
1072 if ( segr.attr.fields.g )
1073 segr.limit = (segr.limit << 12) | 0xfffu;
1074 segr.sel = sel;
1075 return ops->write_segment(seg, &segr, ctxt);
1077 raise_exn:
1078 if ( ops->inject_hw_exception == NULL )
1079 return X86EMUL_UNHANDLEABLE;
1080 if ( (rc = ops->inject_hw_exception(fault_type, sel & 0xfffc, ctxt)) )
1081 return rc;
1082 return X86EMUL_EXCEPTION;
1085 static int
1086 load_seg(
1087 enum x86_segment seg,
1088 uint16_t sel,
1089 struct x86_emulate_ctxt *ctxt,
1090 struct x86_emulate_ops *ops)
1092 if ( (ops->read_segment == NULL) ||
1093 (ops->write_segment == NULL) )
1094 return X86EMUL_UNHANDLEABLE;
1096 if ( in_protmode(ctxt, ops) )
1097 return protmode_load_seg(seg, sel, ctxt, ops);
1099 return realmode_load_seg(seg, sel, ctxt, ops);
1102 void *
1103 decode_register(
1104 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
1106 void *p;
1108 switch ( modrm_reg )
1110 case 0: p = &regs->eax; break;
1111 case 1: p = &regs->ecx; break;
1112 case 2: p = &regs->edx; break;
1113 case 3: p = &regs->ebx; break;
1114 case 4: p = (highbyte_regs ?
1115 ((unsigned char *)&regs->eax + 1) :
1116 (unsigned char *)&regs->esp); break;
1117 case 5: p = (highbyte_regs ?
1118 ((unsigned char *)&regs->ecx + 1) :
1119 (unsigned char *)&regs->ebp); break;
1120 case 6: p = (highbyte_regs ?
1121 ((unsigned char *)&regs->edx + 1) :
1122 (unsigned char *)&regs->esi); break;
1123 case 7: p = (highbyte_regs ?
1124 ((unsigned char *)&regs->ebx + 1) :
1125 (unsigned char *)&regs->edi); break;
1126 #if defined(__x86_64__)
1127 case 8: p = &regs->r8; break;
1128 case 9: p = &regs->r9; break;
1129 case 10: p = &regs->r10; break;
1130 case 11: p = &regs->r11; break;
1131 case 12: p = &regs->r12; break;
1132 case 13: p = &regs->r13; break;
1133 case 14: p = &regs->r14; break;
1134 case 15: p = &regs->r15; break;
1135 #endif
1136 default: p = NULL; break;
1139 return p;
1142 #define decode_segment_failed x86_seg_tr
1143 enum x86_segment
1144 decode_segment(
1145 uint8_t modrm_reg)
1147 switch ( modrm_reg )
1149 case 0: return x86_seg_es;
1150 case 1: return x86_seg_cs;
1151 case 2: return x86_seg_ss;
1152 case 3: return x86_seg_ds;
1153 case 4: return x86_seg_fs;
1154 case 5: return x86_seg_gs;
1155 default: break;
1157 return decode_segment_failed;
1160 int
1161 x86_emulate(
1162 struct x86_emulate_ctxt *ctxt,
1163 struct x86_emulate_ops *ops)
1165 /* Shadow copy of register state. Committed on successful emulation. */
1166 struct cpu_user_regs _regs = *ctxt->regs;
1168 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
1169 uint8_t modrm = 0, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
1170 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
1171 #define REPE_PREFIX 1
1172 #define REPNE_PREFIX 2
1173 unsigned int lock_prefix = 0, rep_prefix = 0;
1174 int override_seg = -1, rc = X86EMUL_OKAY;
1175 struct operand src, dst;
1177 /* Data operand effective address (usually computed from ModRM). */
1178 struct operand ea;
1180 /* Default is a memory operand relative to segment DS. */
1181 ea.type = OP_MEM;
1182 ea.mem.seg = x86_seg_ds;
1183 ea.mem.off = 0;
1185 ctxt->retire.byte = 0;
1187 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
1188 if ( op_bytes == 8 )
1190 op_bytes = def_op_bytes = 4;
1191 #ifndef __x86_64__
1192 return X86EMUL_UNHANDLEABLE;
1193 #endif
1196 /* Prefix bytes. */
1197 for ( ; ; )
1199 switch ( b = insn_fetch_type(uint8_t) )
1201 case 0x66: /* operand-size override */
1202 op_bytes = def_op_bytes ^ 6;
1203 break;
1204 case 0x67: /* address-size override */
1205 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
1206 break;
1207 case 0x2e: /* CS override */
1208 override_seg = x86_seg_cs;
1209 break;
1210 case 0x3e: /* DS override */
1211 override_seg = x86_seg_ds;
1212 break;
1213 case 0x26: /* ES override */
1214 override_seg = x86_seg_es;
1215 break;
1216 case 0x64: /* FS override */
1217 override_seg = x86_seg_fs;
1218 break;
1219 case 0x65: /* GS override */
1220 override_seg = x86_seg_gs;
1221 break;
1222 case 0x36: /* SS override */
1223 override_seg = x86_seg_ss;
1224 break;
1225 case 0xf0: /* LOCK */
1226 lock_prefix = 1;
1227 break;
1228 case 0xf2: /* REPNE/REPNZ */
1229 rep_prefix = REPNE_PREFIX;
1230 break;
1231 case 0xf3: /* REP/REPE/REPZ */
1232 rep_prefix = REPE_PREFIX;
1233 break;
1234 case 0x40 ... 0x4f: /* REX */
1235 if ( !mode_64bit() )
1236 goto done_prefixes;
1237 rex_prefix = b;
1238 continue;
1239 default:
1240 goto done_prefixes;
1243 /* Any legacy prefix after a REX prefix nullifies its effect. */
1244 rex_prefix = 0;
1246 done_prefixes:
1248 if ( rex_prefix & 8 ) /* REX.W */
1249 op_bytes = 8;
1251 /* Opcode byte(s). */
1252 d = opcode_table[b];
1253 if ( d == 0 )
1255 /* Two-byte opcode? */
1256 if ( b == 0x0f )
1258 twobyte = 1;
1259 b = insn_fetch_type(uint8_t);
1260 d = twobyte_table[b];
1263 /* Unrecognised? */
1264 if ( d == 0 )
1265 goto cannot_emulate;
1268 /* Lock prefix is allowed only on RMW instructions. */
1269 generate_exception_if((d & Mov) && lock_prefix, EXC_GP, 0);
1271 /* ModRM and SIB bytes. */
1272 if ( d & ModRM )
1274 modrm = insn_fetch_type(uint8_t);
1275 modrm_mod = (modrm & 0xc0) >> 6;
1276 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
1277 modrm_rm = modrm & 0x07;
1279 if ( modrm_mod == 3 )
1281 modrm_rm |= (rex_prefix & 1) << 3;
1282 ea.type = OP_REG;
1283 ea.reg = decode_register(
1284 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
1286 else if ( ad_bytes == 2 )
1288 /* 16-bit ModR/M decode. */
1289 switch ( modrm_rm )
1291 case 0:
1292 ea.mem.off = _regs.ebx + _regs.esi;
1293 break;
1294 case 1:
1295 ea.mem.off = _regs.ebx + _regs.edi;
1296 break;
1297 case 2:
1298 ea.mem.seg = x86_seg_ss;
1299 ea.mem.off = _regs.ebp + _regs.esi;
1300 break;
1301 case 3:
1302 ea.mem.seg = x86_seg_ss;
1303 ea.mem.off = _regs.ebp + _regs.edi;
1304 break;
1305 case 4:
1306 ea.mem.off = _regs.esi;
1307 break;
1308 case 5:
1309 ea.mem.off = _regs.edi;
1310 break;
1311 case 6:
1312 if ( modrm_mod == 0 )
1313 break;
1314 ea.mem.seg = x86_seg_ss;
1315 ea.mem.off = _regs.ebp;
1316 break;
1317 case 7:
1318 ea.mem.off = _regs.ebx;
1319 break;
1321 switch ( modrm_mod )
1323 case 0:
1324 if ( modrm_rm == 6 )
1325 ea.mem.off = insn_fetch_type(int16_t);
1326 break;
1327 case 1:
1328 ea.mem.off += insn_fetch_type(int8_t);
1329 break;
1330 case 2:
1331 ea.mem.off += insn_fetch_type(int16_t);
1332 break;
1334 ea.mem.off = truncate_ea(ea.mem.off);
1336 else
1338 /* 32/64-bit ModR/M decode. */
1339 if ( modrm_rm == 4 )
1341 sib = insn_fetch_type(uint8_t);
1342 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1343 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1344 if ( sib_index != 4 )
1345 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1346 ea.mem.off <<= (sib >> 6) & 3;
1347 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1348 ea.mem.off += insn_fetch_type(int32_t);
1349 else if ( sib_base == 4 )
1351 ea.mem.seg = x86_seg_ss;
1352 ea.mem.off += _regs.esp;
1353 if ( !twobyte && (b == 0x8f) )
1354 /* POP <rm> computes its EA post increment. */
1355 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1356 ? 8 : op_bytes);
1358 else if ( sib_base == 5 )
1360 ea.mem.seg = x86_seg_ss;
1361 ea.mem.off += _regs.ebp;
1363 else
1364 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1366 else
1368 modrm_rm |= (rex_prefix & 1) << 3;
1369 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1370 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1371 ea.mem.seg = x86_seg_ss;
1373 switch ( modrm_mod )
1375 case 0:
1376 if ( (modrm_rm & 7) != 5 )
1377 break;
1378 ea.mem.off = insn_fetch_type(int32_t);
1379 if ( !mode_64bit() )
1380 break;
1381 /* Relative to RIP of next instruction. Argh! */
1382 ea.mem.off += _regs.eip;
1383 if ( (d & SrcMask) == SrcImm )
1384 ea.mem.off += (d & ByteOp) ? 1 :
1385 ((op_bytes == 8) ? 4 : op_bytes);
1386 else if ( (d & SrcMask) == SrcImmByte )
1387 ea.mem.off += 1;
1388 else if ( !twobyte && ((b & 0xfe) == 0xf6) &&
1389 ((modrm_reg & 7) <= 1) )
1390 /* Special case in Grp3: test has immediate operand. */
1391 ea.mem.off += (d & ByteOp) ? 1
1392 : ((op_bytes == 8) ? 4 : op_bytes);
1393 else if ( twobyte && ((b & 0xf7) == 0xa4) )
1394 /* SHLD/SHRD with immediate byte third operand. */
1395 ea.mem.off++;
1396 break;
1397 case 1:
1398 ea.mem.off += insn_fetch_type(int8_t);
1399 break;
1400 case 2:
1401 ea.mem.off += insn_fetch_type(int32_t);
1402 break;
1404 ea.mem.off = truncate_ea(ea.mem.off);
1408 if ( override_seg != -1 )
1409 ea.mem.seg = override_seg;
1411 /* Special instructions do their own operand decoding. */
1412 if ( (d & DstMask) == ImplicitOps )
1413 goto special_insn;
1415 /* Decode and fetch the source operand: register, memory or immediate. */
1416 switch ( d & SrcMask )
1418 case SrcNone:
1419 break;
1420 case SrcReg:
1421 src.type = OP_REG;
1422 if ( d & ByteOp )
1424 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1425 src.val = *(uint8_t *)src.reg;
1426 src.bytes = 1;
1428 else
1430 src.reg = decode_register(modrm_reg, &_regs, 0);
1431 switch ( (src.bytes = op_bytes) )
1433 case 2: src.val = *(uint16_t *)src.reg; break;
1434 case 4: src.val = *(uint32_t *)src.reg; break;
1435 case 8: src.val = *(uint64_t *)src.reg; break;
1438 break;
1439 case SrcMem16:
1440 ea.bytes = 2;
1441 goto srcmem_common;
1442 case SrcMem:
1443 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1444 srcmem_common:
1445 src = ea;
1446 if ( src.type == OP_REG )
1448 switch ( src.bytes )
1450 case 1: src.val = *(uint8_t *)src.reg; break;
1451 case 2: src.val = *(uint16_t *)src.reg; break;
1452 case 4: src.val = *(uint32_t *)src.reg; break;
1453 case 8: src.val = *(uint64_t *)src.reg; break;
1456 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1457 &src.val, src.bytes, ctxt, ops)) )
1458 goto done;
1459 break;
1460 case SrcImm:
1461 src.type = OP_IMM;
1462 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1463 if ( src.bytes == 8 ) src.bytes = 4;
1464 /* NB. Immediates are sign-extended as necessary. */
1465 switch ( src.bytes )
1467 case 1: src.val = insn_fetch_type(int8_t); break;
1468 case 2: src.val = insn_fetch_type(int16_t); break;
1469 case 4: src.val = insn_fetch_type(int32_t); break;
1471 break;
1472 case SrcImmByte:
1473 src.type = OP_IMM;
1474 src.bytes = 1;
1475 src.val = insn_fetch_type(int8_t);
1476 break;
1479 /* Decode and fetch the destination operand: register or memory. */
1480 switch ( d & DstMask )
1482 case DstReg:
1483 dst.type = OP_REG;
1484 if ( d & ByteOp )
1486 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1487 dst.val = *(uint8_t *)dst.reg;
1488 dst.bytes = 1;
1490 else
1492 dst.reg = decode_register(modrm_reg, &_regs, 0);
1493 switch ( (dst.bytes = op_bytes) )
1495 case 2: dst.val = *(uint16_t *)dst.reg; break;
1496 case 4: dst.val = *(uint32_t *)dst.reg; break;
1497 case 8: dst.val = *(uint64_t *)dst.reg; break;
1500 break;
1501 case DstBitBase:
1502 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1504 src.val &= (op_bytes << 3) - 1;
1506 else
1508 /*
1509 * EA += BitOffset DIV op_bytes*8
1510 * BitOffset = BitOffset MOD op_bytes*8
1511 * DIV truncates towards negative infinity.
1512 * MOD always produces a positive result.
1513 */
1514 if ( op_bytes == 2 )
1515 src.val = (int16_t)src.val;
1516 else if ( op_bytes == 4 )
1517 src.val = (int32_t)src.val;
1518 if ( (long)src.val < 0 )
1520 unsigned long byte_offset;
1521 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1522 ea.mem.off -= byte_offset;
1523 src.val = (byte_offset << 3) + src.val;
1525 else
1527 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1528 src.val &= (op_bytes << 3) - 1;
1531 /* Becomes a normal DstMem operation from here on. */
1532 d = (d & ~DstMask) | DstMem;
1533 case DstMem:
1534 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1535 dst = ea;
1536 if ( dst.type == OP_REG )
1538 switch ( dst.bytes )
1540 case 1: dst.val = *(uint8_t *)dst.reg; break;
1541 case 2: dst.val = *(uint16_t *)dst.reg; break;
1542 case 4: dst.val = *(uint32_t *)dst.reg; break;
1543 case 8: dst.val = *(uint64_t *)dst.reg; break;
1546 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1548 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
1549 &dst.val, dst.bytes, ctxt, ops)) )
1550 goto done;
1551 dst.orig_val = dst.val;
1553 break;
1556 /* LOCK prefix allowed only on instructions with memory destination. */
1557 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP, 0);
1559 if ( twobyte )
1560 goto twobyte_insn;
1562 switch ( b )
1564 case 0x04 ... 0x05: /* add imm,%%eax */
1565 dst.reg = (unsigned long *)&_regs.eax;
1566 dst.val = _regs.eax;
1567 case 0x00 ... 0x03: add: /* add */
1568 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1569 break;
1571 case 0x0c ... 0x0d: /* or imm,%%eax */
1572 dst.reg = (unsigned long *)&_regs.eax;
1573 dst.val = _regs.eax;
1574 case 0x08 ... 0x0b: or: /* or */
1575 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1576 break;
1578 case 0x14 ... 0x15: /* adc imm,%%eax */
1579 dst.reg = (unsigned long *)&_regs.eax;
1580 dst.val = _regs.eax;
1581 case 0x10 ... 0x13: adc: /* adc */
1582 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1583 break;
1585 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1586 dst.reg = (unsigned long *)&_regs.eax;
1587 dst.val = _regs.eax;
1588 case 0x18 ... 0x1b: sbb: /* sbb */
1589 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1590 break;
1592 case 0x24 ... 0x25: /* and imm,%%eax */
1593 dst.reg = (unsigned long *)&_regs.eax;
1594 dst.val = _regs.eax;
1595 case 0x20 ... 0x23: and: /* and */
1596 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1597 break;
1599 case 0x2c ... 0x2d: /* sub imm,%%eax */
1600 dst.reg = (unsigned long *)&_regs.eax;
1601 dst.val = _regs.eax;
1602 case 0x28 ... 0x2b: sub: /* sub */
1603 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1604 break;
1606 case 0x34 ... 0x35: /* xor imm,%%eax */
1607 dst.reg = (unsigned long *)&_regs.eax;
1608 dst.val = _regs.eax;
1609 case 0x30 ... 0x33: xor: /* xor */
1610 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1611 break;
1613 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1614 dst.reg = (unsigned long *)&_regs.eax;
1615 dst.val = _regs.eax;
1616 case 0x38 ... 0x3b: cmp: /* cmp */
1617 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1618 dst.type = OP_NONE;
1619 break;
1621 case 0x62: /* bound */ {
1622 unsigned long src_val2;
1623 int lb, ub, idx;
1624 generate_exception_if(mode_64bit() || (src.type != OP_MEM),
1625 EXC_UD, -1);
1626 if ( (rc = read_ulong(src.mem.seg, src.mem.off + op_bytes,
1627 &src_val2, op_bytes, ctxt, ops)) )
1628 goto done;
1629 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1630 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1631 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1632 generate_exception_if((idx < lb) || (idx > ub), EXC_BR, -1);
1633 dst.type = OP_NONE;
1634 break;
1637 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1638 if ( mode_64bit() )
1640 /* movsxd */
1641 if ( src.type == OP_REG )
1642 src.val = *(int32_t *)src.reg;
1643 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1644 &src.val, 4, ctxt, ops)) )
1645 goto done;
1646 dst.val = (int32_t)src.val;
1648 else
1650 /* arpl */
1651 uint16_t src_val = dst.val;
1652 dst = src;
1653 _regs.eflags &= ~EFLG_ZF;
1654 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1655 if ( _regs.eflags & EFLG_ZF )
1656 dst.val = (dst.val & ~3) | (src_val & 3);
1657 else
1658 dst.type = OP_NONE;
1659 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
1661 break;
1663 case 0x69: /* imul imm16/32 */
1664 case 0x6b: /* imul imm8 */ {
1665 unsigned long src1; /* ModR/M source operand */
1666 if ( ea.type == OP_REG )
1667 src1 = *ea.reg;
1668 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
1669 &src1, op_bytes, ctxt, ops)) )
1670 goto done;
1671 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1672 switch ( dst.bytes )
1674 case 2:
1675 dst.val = ((uint32_t)(int16_t)src.val *
1676 (uint32_t)(int16_t)src1);
1677 if ( (int16_t)dst.val != (uint32_t)dst.val )
1678 _regs.eflags |= EFLG_OF|EFLG_CF;
1679 break;
1680 #ifdef __x86_64__
1681 case 4:
1682 dst.val = ((uint64_t)(int32_t)src.val *
1683 (uint64_t)(int32_t)src1);
1684 if ( (int32_t)dst.val != dst.val )
1685 _regs.eflags |= EFLG_OF|EFLG_CF;
1686 break;
1687 #endif
1688 default: {
1689 unsigned long m[2] = { src.val, src1 };
1690 if ( imul_dbl(m) )
1691 _regs.eflags |= EFLG_OF|EFLG_CF;
1692 dst.val = m[0];
1693 break;
1696 break;
1699 case 0x82: /* Grp1 (x86/32 only) */
1700 generate_exception_if(mode_64bit(), EXC_UD, -1);
1701 case 0x80: case 0x81: case 0x83: /* Grp1 */
1702 switch ( modrm_reg & 7 )
1704 case 0: goto add;
1705 case 1: goto or;
1706 case 2: goto adc;
1707 case 3: goto sbb;
1708 case 4: goto and;
1709 case 5: goto sub;
1710 case 6: goto xor;
1711 case 7: goto cmp;
1713 break;
1715 case 0xa8 ... 0xa9: /* test imm,%%eax */
1716 dst.reg = (unsigned long *)&_regs.eax;
1717 dst.val = _regs.eax;
1718 case 0x84 ... 0x85: test: /* test */
1719 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1720 dst.type = OP_NONE;
1721 break;
1723 case 0x86 ... 0x87: xchg: /* xchg */
1724 /* Write back the register source. */
1725 switch ( dst.bytes )
1727 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1728 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1729 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1730 case 8: *src.reg = dst.val; break;
1732 /* Write back the memory destination with implicit LOCK prefix. */
1733 dst.val = src.val;
1734 lock_prefix = 1;
1735 break;
1737 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1738 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
1739 case 0x88 ... 0x8b: /* mov */
1740 dst.val = src.val;
1741 break;
1743 case 0x8c: /* mov Sreg,r/m */ {
1744 struct segment_register reg;
1745 enum x86_segment seg = decode_segment(modrm_reg);
1746 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
1747 fail_if(ops->read_segment == NULL);
1748 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
1749 goto done;
1750 dst.val = reg.sel;
1751 if ( dst.type == OP_MEM )
1752 dst.bytes = 2;
1753 break;
1756 case 0x8e: /* mov r/m,Sreg */ {
1757 enum x86_segment seg = decode_segment(modrm_reg);
1758 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
1759 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
1760 goto done;
1761 if ( seg == x86_seg_ss )
1762 ctxt->retire.flags.mov_ss = 1;
1763 dst.type = OP_NONE;
1764 break;
1767 case 0x8d: /* lea */
1768 dst.val = ea.mem.off;
1769 break;
1771 case 0x8f: /* pop (sole member of Grp1a) */
1772 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
1773 /* 64-bit mode: POP defaults to a 64-bit operand. */
1774 if ( mode_64bit() && (dst.bytes == 4) )
1775 dst.bytes = 8;
1776 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
1777 &dst.val, dst.bytes, ctxt, ops)) != 0 )
1778 goto done;
1779 break;
1781 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1782 dst.reg = decode_register(
1783 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1784 dst.val = src.val;
1785 break;
1787 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1788 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1789 src.val = ((uint32_t)src.val |
1790 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1791 dst.reg = decode_register(
1792 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1793 dst.val = src.val;
1794 break;
1796 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1797 switch ( modrm_reg & 7 )
1799 case 0: /* rol */
1800 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1801 break;
1802 case 1: /* ror */
1803 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1804 break;
1805 case 2: /* rcl */
1806 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1807 break;
1808 case 3: /* rcr */
1809 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1810 break;
1811 case 4: /* sal/shl */
1812 case 6: /* sal/shl */
1813 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1814 break;
1815 case 5: /* shr */
1816 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1817 break;
1818 case 7: /* sar */
1819 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1820 break;
1822 break;
1824 case 0xc4: /* les */ {
1825 unsigned long sel;
1826 dst.val = x86_seg_es;
1827 les: /* dst.val identifies the segment */
1828 generate_exception_if(src.type != OP_MEM, EXC_UD, -1);
1829 if ( (rc = read_ulong(src.mem.seg, src.mem.off + src.bytes,
1830 &sel, 2, ctxt, ops)) != 0 )
1831 goto done;
1832 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
1833 goto done;
1834 dst.val = src.val;
1835 break;
1838 case 0xc5: /* lds */
1839 dst.val = x86_seg_ds;
1840 goto les;
1842 case 0xd0 ... 0xd1: /* Grp2 */
1843 src.val = 1;
1844 goto grp2;
1846 case 0xd2 ... 0xd3: /* Grp2 */
1847 src.val = _regs.ecx;
1848 goto grp2;
1850 case 0xf6 ... 0xf7: /* Grp3 */
1851 switch ( modrm_reg & 7 )
1853 case 0 ... 1: /* test */
1854 /* Special case in Grp3: test has an immediate source operand. */
1855 src.type = OP_IMM;
1856 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1857 if ( src.bytes == 8 ) src.bytes = 4;
1858 switch ( src.bytes )
1860 case 1: src.val = insn_fetch_type(int8_t); break;
1861 case 2: src.val = insn_fetch_type(int16_t); break;
1862 case 4: src.val = insn_fetch_type(int32_t); break;
1864 goto test;
1865 case 2: /* not */
1866 dst.val = ~dst.val;
1867 break;
1868 case 3: /* neg */
1869 emulate_1op("neg", dst, _regs.eflags);
1870 break;
1871 case 4: /* mul */
1872 src = dst;
1873 dst.type = OP_REG;
1874 dst.reg = (unsigned long *)&_regs.eax;
1875 dst.val = *dst.reg;
1876 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1877 switch ( src.bytes )
1879 case 1:
1880 dst.val = (uint8_t)dst.val;
1881 dst.val *= src.val;
1882 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1883 _regs.eflags |= EFLG_OF|EFLG_CF;
1884 dst.bytes = 2;
1885 break;
1886 case 2:
1887 dst.val = (uint16_t)dst.val;
1888 dst.val *= src.val;
1889 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1890 _regs.eflags |= EFLG_OF|EFLG_CF;
1891 *(uint16_t *)&_regs.edx = dst.val >> 16;
1892 break;
1893 #ifdef __x86_64__
1894 case 4:
1895 dst.val = (uint32_t)dst.val;
1896 dst.val *= src.val;
1897 if ( (uint32_t)dst.val != dst.val )
1898 _regs.eflags |= EFLG_OF|EFLG_CF;
1899 _regs.edx = (uint32_t)(dst.val >> 32);
1900 break;
1901 #endif
1902 default: {
1903 unsigned long m[2] = { src.val, dst.val };
1904 if ( mul_dbl(m) )
1905 _regs.eflags |= EFLG_OF|EFLG_CF;
1906 _regs.edx = m[1];
1907 dst.val = m[0];
1908 break;
1911 break;
1912 case 5: /* imul */
1913 src = dst;
1914 dst.type = OP_REG;
1915 dst.reg = (unsigned long *)&_regs.eax;
1916 dst.val = *dst.reg;
1917 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1918 switch ( src.bytes )
1920 case 1:
1921 dst.val = ((uint16_t)(int8_t)src.val *
1922 (uint16_t)(int8_t)dst.val);
1923 if ( (int8_t)dst.val != (uint16_t)dst.val )
1924 _regs.eflags |= EFLG_OF|EFLG_CF;
1925 dst.bytes = 2;
1926 break;
1927 case 2:
1928 dst.val = ((uint32_t)(int16_t)src.val *
1929 (uint32_t)(int16_t)dst.val);
1930 if ( (int16_t)dst.val != (uint32_t)dst.val )
1931 _regs.eflags |= EFLG_OF|EFLG_CF;
1932 *(uint16_t *)&_regs.edx = dst.val >> 16;
1933 break;
1934 #ifdef __x86_64__
1935 case 4:
1936 dst.val = ((uint64_t)(int32_t)src.val *
1937 (uint64_t)(int32_t)dst.val);
1938 if ( (int32_t)dst.val != dst.val )
1939 _regs.eflags |= EFLG_OF|EFLG_CF;
1940 _regs.edx = (uint32_t)(dst.val >> 32);
1941 break;
1942 #endif
1943 default: {
1944 unsigned long m[2] = { src.val, dst.val };
1945 if ( imul_dbl(m) )
1946 _regs.eflags |= EFLG_OF|EFLG_CF;
1947 _regs.edx = m[1];
1948 dst.val = m[0];
1949 break;
1952 break;
1953 case 6: /* div */ {
1954 unsigned long u[2], v;
1955 src = dst;
1956 dst.type = OP_REG;
1957 dst.reg = (unsigned long *)&_regs.eax;
1958 switch ( src.bytes )
1960 case 1:
1961 u[0] = (uint16_t)_regs.eax;
1962 u[1] = 0;
1963 v = (uint8_t)src.val;
1964 generate_exception_if(
1965 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1966 EXC_DE, -1);
1967 dst.val = (uint8_t)u[0];
1968 ((uint8_t *)&_regs.eax)[1] = u[1];
1969 break;
1970 case 2:
1971 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1972 u[1] = 0;
1973 v = (uint16_t)src.val;
1974 generate_exception_if(
1975 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1976 EXC_DE, -1);
1977 dst.val = (uint16_t)u[0];
1978 *(uint16_t *)&_regs.edx = u[1];
1979 break;
1980 #ifdef __x86_64__
1981 case 4:
1982 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1983 u[1] = 0;
1984 v = (uint32_t)src.val;
1985 generate_exception_if(
1986 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1987 EXC_DE, -1);
1988 dst.val = (uint32_t)u[0];
1989 _regs.edx = (uint32_t)u[1];
1990 break;
1991 #endif
1992 default:
1993 u[0] = _regs.eax;
1994 u[1] = _regs.edx;
1995 v = src.val;
1996 generate_exception_if(div_dbl(u, v), EXC_DE, -1);
1997 dst.val = u[0];
1998 _regs.edx = u[1];
1999 break;
2001 break;
2003 case 7: /* idiv */ {
2004 unsigned long u[2], v;
2005 src = dst;
2006 dst.type = OP_REG;
2007 dst.reg = (unsigned long *)&_regs.eax;
2008 switch ( src.bytes )
2010 case 1:
2011 u[0] = (int16_t)_regs.eax;
2012 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
2013 v = (int8_t)src.val;
2014 generate_exception_if(
2015 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
2016 EXC_DE, -1);
2017 dst.val = (int8_t)u[0];
2018 ((int8_t *)&_regs.eax)[1] = u[1];
2019 break;
2020 case 2:
2021 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
2022 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
2023 v = (int16_t)src.val;
2024 generate_exception_if(
2025 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
2026 EXC_DE, -1);
2027 dst.val = (int16_t)u[0];
2028 *(int16_t *)&_regs.edx = u[1];
2029 break;
2030 #ifdef __x86_64__
2031 case 4:
2032 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
2033 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
2034 v = (int32_t)src.val;
2035 generate_exception_if(
2036 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
2037 EXC_DE, -1);
2038 dst.val = (int32_t)u[0];
2039 _regs.edx = (uint32_t)u[1];
2040 break;
2041 #endif
2042 default:
2043 u[0] = _regs.eax;
2044 u[1] = _regs.edx;
2045 v = src.val;
2046 generate_exception_if(idiv_dbl(u, v), EXC_DE, -1);
2047 dst.val = u[0];
2048 _regs.edx = u[1];
2049 break;
2051 break;
2053 default:
2054 goto cannot_emulate;
2056 break;
2058 case 0xfe: /* Grp4 */
2059 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD, -1);
2060 case 0xff: /* Grp5 */
2061 switch ( modrm_reg & 7 )
2063 case 0: /* inc */
2064 emulate_1op("inc", dst, _regs.eflags);
2065 break;
2066 case 1: /* dec */
2067 emulate_1op("dec", dst, _regs.eflags);
2068 break;
2069 case 2: /* call (near) */
2070 case 4: /* jmp (near) */
2071 if ( (dst.bytes != 8) && mode_64bit() )
2073 dst.bytes = op_bytes = 8;
2074 if ( dst.type == OP_REG )
2075 dst.val = *dst.reg;
2076 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
2077 &dst.val, 8, ctxt, ops)) != 0 )
2078 goto done;
2080 src.val = _regs.eip;
2081 _regs.eip = dst.val;
2082 if ( (modrm_reg & 7) == 2 )
2083 goto push; /* call */
2084 dst.type = OP_NONE;
2085 break;
2086 case 3: /* call (far, absolute indirect) */
2087 case 5: /* jmp (far, absolute indirect) */ {
2088 unsigned long sel;
2090 generate_exception_if(dst.type != OP_MEM, EXC_UD, -1);
2092 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off+dst.bytes,
2093 &sel, 2, ctxt, ops)) )
2094 goto done;
2096 if ( (modrm_reg & 7) == 3 ) /* call */
2098 struct segment_register reg;
2099 fail_if(ops->read_segment == NULL);
2100 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2101 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2102 &reg.sel, op_bytes, ctxt)) ||
2103 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2104 &_regs.eip, op_bytes, ctxt)) )
2105 goto done;
2108 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2109 goto done;
2110 _regs.eip = dst.val;
2112 dst.type = OP_NONE;
2113 break;
2115 case 6: /* push */
2116 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
2117 if ( mode_64bit() && (dst.bytes == 4) )
2119 dst.bytes = 8;
2120 if ( dst.type == OP_REG )
2121 dst.val = *dst.reg;
2122 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
2123 &dst.val, 8, ctxt, ops)) != 0 )
2124 goto done;
2126 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2127 &dst.val, dst.bytes, ctxt)) != 0 )
2128 goto done;
2129 dst.type = OP_NONE;
2130 break;
2131 case 7:
2132 generate_exception_if(1, EXC_UD, -1);
2133 default:
2134 goto cannot_emulate;
2136 break;
2139 writeback:
2140 switch ( dst.type )
2142 case OP_REG:
2143 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
2144 switch ( dst.bytes )
2146 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2147 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2148 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2149 case 8: *dst.reg = dst.val; break;
2151 break;
2152 case OP_MEM:
2153 if ( !(d & Mov) && (dst.orig_val == dst.val) &&
2154 !ctxt->force_writeback )
2155 /* nothing to do */;
2156 else if ( lock_prefix )
2157 rc = ops->cmpxchg(
2158 dst.mem.seg, dst.mem.off, &dst.orig_val,
2159 &dst.val, dst.bytes, ctxt);
2160 else
2161 rc = ops->write(
2162 dst.mem.seg, dst.mem.off, &dst.val, dst.bytes, ctxt);
2163 if ( rc != 0 )
2164 goto done;
2165 default:
2166 break;
2169 /* Inject #DB if single-step tracing was enabled at instruction start. */
2170 if ( (ctxt->regs->eflags & EFLG_TF) && (rc == X86EMUL_OKAY) &&
2171 (ops->inject_hw_exception != NULL) )
2172 rc = ops->inject_hw_exception(EXC_DB, -1, ctxt) ? : X86EMUL_EXCEPTION;
2174 /* Commit shadow register state. */
2175 _regs.eflags &= ~EFLG_RF;
2176 *ctxt->regs = _regs;
2178 done:
2179 return rc;
2181 special_insn:
2182 dst.type = OP_NONE;
2184 /*
2185 * The only implicit-operands instructions allowed a LOCK prefix are
2186 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
2187 */
2188 generate_exception_if(lock_prefix &&
2189 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
2190 (b != 0xc7), /* CMPXCHG{8,16}B */
2191 EXC_GP, 0);
2193 if ( twobyte )
2194 goto twobyte_special_insn;
2196 switch ( b )
2198 case 0x06: /* push %%es */ {
2199 struct segment_register reg;
2200 src.val = x86_seg_es;
2201 push_seg:
2202 fail_if(ops->read_segment == NULL);
2203 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
2204 return rc;
2205 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
2206 if ( mode_64bit() && (op_bytes == 4) )
2207 op_bytes = 8;
2208 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2209 &reg.sel, op_bytes, ctxt)) != 0 )
2210 goto done;
2211 break;
2214 case 0x07: /* pop %%es */
2215 src.val = x86_seg_es;
2216 pop_seg:
2217 fail_if(ops->write_segment == NULL);
2218 /* 64-bit mode: POP defaults to a 64-bit operand. */
2219 if ( mode_64bit() && (op_bytes == 4) )
2220 op_bytes = 8;
2221 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2222 &dst.val, op_bytes, ctxt, ops)) != 0 )
2223 goto done;
2224 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
2225 return rc;
2226 break;
2228 case 0x0e: /* push %%cs */
2229 src.val = x86_seg_cs;
2230 goto push_seg;
2232 case 0x16: /* push %%ss */
2233 src.val = x86_seg_ss;
2234 goto push_seg;
2236 case 0x17: /* pop %%ss */
2237 src.val = x86_seg_ss;
2238 ctxt->retire.flags.mov_ss = 1;
2239 goto pop_seg;
2241 case 0x1e: /* push %%ds */
2242 src.val = x86_seg_ds;
2243 goto push_seg;
2245 case 0x1f: /* pop %%ds */
2246 src.val = x86_seg_ds;
2247 goto pop_seg;
2249 case 0x27: /* daa */ {
2250 uint8_t al = _regs.eax;
2251 unsigned long eflags = _regs.eflags;
2252 generate_exception_if(mode_64bit(), EXC_UD, -1);
2253 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
2254 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
2256 *(uint8_t *)&_regs.eax += 6;
2257 _regs.eflags |= EFLG_AF;
2259 if ( (al > 0x99) || (eflags & EFLG_CF) )
2261 *(uint8_t *)&_regs.eax += 0x60;
2262 _regs.eflags |= EFLG_CF;
2264 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2265 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2266 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2267 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2268 break;
2271 case 0x2f: /* das */ {
2272 uint8_t al = _regs.eax;
2273 unsigned long eflags = _regs.eflags;
2274 generate_exception_if(mode_64bit(), EXC_UD, -1);
2275 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
2276 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
2278 _regs.eflags |= EFLG_AF;
2279 if ( (al < 6) || (eflags & EFLG_CF) )
2280 _regs.eflags |= EFLG_CF;
2281 *(uint8_t *)&_regs.eax -= 6;
2283 if ( (al > 0x99) || (eflags & EFLG_CF) )
2285 *(uint8_t *)&_regs.eax -= 0x60;
2286 _regs.eflags |= EFLG_CF;
2288 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2289 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2290 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2291 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2292 break;
2295 case 0x37: /* aaa */
2296 case 0x3f: /* aas */
2297 generate_exception_if(mode_64bit(), EXC_UD, -1);
2298 _regs.eflags &= ~EFLG_CF;
2299 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
2301 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
2302 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
2303 _regs.eflags |= EFLG_CF | EFLG_AF;
2305 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
2306 break;
2308 case 0x40 ... 0x4f: /* inc/dec reg */
2309 dst.type = OP_REG;
2310 dst.reg = decode_register(b & 7, &_regs, 0);
2311 dst.bytes = op_bytes;
2312 dst.val = *dst.reg;
2313 if ( b & 8 )
2314 emulate_1op("dec", dst, _regs.eflags);
2315 else
2316 emulate_1op("inc", dst, _regs.eflags);
2317 break;
2319 case 0x50 ... 0x57: /* push reg */
2320 src.val = *(unsigned long *)decode_register(
2321 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2322 goto push;
2324 case 0x58 ... 0x5f: /* pop reg */
2325 dst.type = OP_REG;
2326 dst.reg = decode_register(
2327 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2328 dst.bytes = op_bytes;
2329 if ( mode_64bit() && (dst.bytes == 4) )
2330 dst.bytes = 8;
2331 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2332 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2333 goto done;
2334 break;
2336 case 0x60: /* pusha */ {
2337 int i;
2338 unsigned long regs[] = {
2339 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
2340 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
2341 generate_exception_if(mode_64bit(), EXC_UD, -1);
2342 for ( i = 0; i < 8; i++ )
2343 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2344 &regs[i], op_bytes, ctxt)) != 0 )
2345 goto done;
2346 break;
2349 case 0x61: /* popa */ {
2350 int i;
2351 unsigned long dummy_esp, *regs[] = {
2352 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
2353 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
2354 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
2355 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
2356 generate_exception_if(mode_64bit(), EXC_UD, -1);
2357 for ( i = 0; i < 8; i++ )
2359 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2360 &dst.val, op_bytes, ctxt, ops)) != 0 )
2361 goto done;
2362 switch ( op_bytes )
2364 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
2365 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
2366 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
2367 case 8: *regs[i] = dst.val; break;
2370 break;
2373 case 0x68: /* push imm{16,32,64} */
2374 src.val = ((op_bytes == 2)
2375 ? (int32_t)insn_fetch_type(int16_t)
2376 : insn_fetch_type(int32_t));
2377 goto push;
2379 case 0x6a: /* push imm8 */
2380 src.val = insn_fetch_type(int8_t);
2381 push:
2382 d |= Mov; /* force writeback */
2383 dst.type = OP_MEM;
2384 dst.bytes = op_bytes;
2385 if ( mode_64bit() && (dst.bytes == 4) )
2386 dst.bytes = 8;
2387 dst.val = src.val;
2388 dst.mem.seg = x86_seg_ss;
2389 dst.mem.off = sp_pre_dec(dst.bytes);
2390 break;
2392 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */ {
2393 unsigned long nr_reps = get_rep_prefix();
2394 unsigned int port = (uint16_t)_regs.edx;
2395 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2396 dst.mem.seg = x86_seg_es;
2397 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
2398 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
2399 goto done;
2400 if ( (nr_reps > 1) && (ops->rep_ins != NULL) &&
2401 ((rc = ops->rep_ins(port, dst.mem.seg, dst.mem.off, dst.bytes,
2402 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2404 if ( rc != 0 )
2405 goto done;
2407 else
2409 fail_if(ops->read_io == NULL);
2410 if ( (rc = ops->read_io(port, dst.bytes, &dst.val, ctxt)) != 0 )
2411 goto done;
2412 dst.type = OP_MEM;
2413 nr_reps = 1;
2415 register_address_increment(
2416 _regs.edi,
2417 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2418 put_rep_prefix(nr_reps);
2419 break;
2422 case 0x6e ... 0x6f: /* outs %esi,%dx */ {
2423 unsigned long nr_reps = get_rep_prefix();
2424 unsigned int port = (uint16_t)_regs.edx;
2425 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2426 ea.mem.off = truncate_ea_and_reps(_regs.esi, nr_reps, dst.bytes);
2427 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
2428 goto done;
2429 if ( (nr_reps > 1) && (ops->rep_outs != NULL) &&
2430 ((rc = ops->rep_outs(ea.mem.seg, ea.mem.off, port, dst.bytes,
2431 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2433 if ( rc != 0 )
2434 goto done;
2436 else
2438 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2439 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2440 goto done;
2441 fail_if(ops->write_io == NULL);
2442 if ( (rc = ops->write_io(port, dst.bytes, dst.val, ctxt)) != 0 )
2443 goto done;
2444 nr_reps = 1;
2446 register_address_increment(
2447 _regs.esi,
2448 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2449 put_rep_prefix(nr_reps);
2450 break;
2453 case 0x70 ... 0x7f: /* jcc (short) */ {
2454 int rel = insn_fetch_type(int8_t);
2455 if ( test_cc(b, _regs.eflags) )
2456 jmp_rel(rel);
2457 break;
2460 case 0x90: /* nop / xchg %%r8,%%rax */
2461 if ( !(rex_prefix & 1) )
2462 break; /* nop */
2464 case 0x91 ... 0x97: /* xchg reg,%%rax */
2465 src.type = dst.type = OP_REG;
2466 src.bytes = dst.bytes = op_bytes;
2467 src.reg = (unsigned long *)&_regs.eax;
2468 src.val = *src.reg;
2469 dst.reg = decode_register(
2470 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2471 dst.val = *dst.reg;
2472 goto xchg;
2474 case 0x98: /* cbw/cwde/cdqe */
2475 switch ( op_bytes )
2477 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2478 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2479 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2481 break;
2483 case 0x99: /* cwd/cdq/cqo */
2484 switch ( op_bytes )
2486 case 2:
2487 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2488 break;
2489 case 4:
2490 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2491 break;
2492 case 8:
2493 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2494 break;
2496 break;
2498 case 0x9a: /* call (far, absolute) */ {
2499 struct segment_register reg;
2500 uint16_t sel;
2501 uint32_t eip;
2503 fail_if(ops->read_segment == NULL);
2504 generate_exception_if(mode_64bit(), EXC_UD, -1);
2506 eip = insn_fetch_bytes(op_bytes);
2507 sel = insn_fetch_type(uint16_t);
2509 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2510 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2511 &reg.sel, op_bytes, ctxt)) ||
2512 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2513 &_regs.eip, op_bytes, ctxt)) )
2514 goto done;
2516 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2517 goto done;
2518 _regs.eip = eip;
2519 break;
2522 case 0x9b: /* wait/fwait */
2523 emulate_fpu_insn("fwait");
2524 break;
2526 case 0x9c: /* pushf */
2527 src.val = _regs.eflags;
2528 goto push;
2530 case 0x9d: /* popf */ {
2531 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2532 if ( !mode_ring0() )
2533 mask |= EFLG_IOPL;
2534 if ( !mode_iopl() )
2535 mask |= EFLG_IF;
2536 /* 64-bit mode: POP defaults to a 64-bit operand. */
2537 if ( mode_64bit() && (op_bytes == 4) )
2538 op_bytes = 8;
2539 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2540 &dst.val, op_bytes, ctxt, ops)) != 0 )
2541 goto done;
2542 if ( op_bytes == 2 )
2543 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2544 dst.val &= 0x257fd5;
2545 _regs.eflags &= mask;
2546 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2547 break;
2550 case 0x9e: /* sahf */
2551 *(uint8_t *)&_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2552 break;
2554 case 0x9f: /* lahf */
2555 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2556 break;
2558 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2559 /* Source EA is not encoded via ModRM. */
2560 dst.type = OP_REG;
2561 dst.reg = (unsigned long *)&_regs.eax;
2562 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2563 if ( (rc = read_ulong(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2564 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2565 goto done;
2566 break;
2568 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2569 /* Destination EA is not encoded via ModRM. */
2570 dst.type = OP_MEM;
2571 dst.mem.seg = ea.mem.seg;
2572 dst.mem.off = insn_fetch_bytes(ad_bytes);
2573 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2574 dst.val = (unsigned long)_regs.eax;
2575 break;
2577 case 0xa4 ... 0xa5: /* movs */ {
2578 unsigned long nr_reps = get_rep_prefix();
2579 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2580 dst.mem.seg = x86_seg_es;
2581 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
2582 if ( (nr_reps > 1) && (ops->rep_movs != NULL) &&
2583 ((rc = ops->rep_movs(ea.mem.seg, truncate_ea(_regs.esi),
2584 dst.mem.seg, dst.mem.off, dst.bytes,
2585 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2587 if ( rc != 0 )
2588 goto done;
2590 else
2592 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2593 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2594 goto done;
2595 dst.type = OP_MEM;
2596 nr_reps = 1;
2598 register_address_increment(
2599 _regs.esi,
2600 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2601 register_address_increment(
2602 _regs.edi,
2603 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2604 put_rep_prefix(nr_reps);
2605 break;
2608 case 0xa6 ... 0xa7: /* cmps */ {
2609 unsigned long next_eip = _regs.eip;
2610 get_rep_prefix();
2611 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2612 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2613 &dst.val, dst.bytes, ctxt, ops)) ||
2614 (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2615 &src.val, src.bytes, ctxt, ops)) )
2616 goto done;
2617 register_address_increment(
2618 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2619 register_address_increment(
2620 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2621 put_rep_prefix(1);
2622 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2623 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2624 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2625 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2626 _regs.eip = next_eip;
2627 break;
2630 case 0xaa ... 0xab: /* stos */ {
2631 /* unsigned long max_reps = */get_rep_prefix();
2632 dst.type = OP_MEM;
2633 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2634 dst.mem.seg = x86_seg_es;
2635 dst.mem.off = truncate_ea(_regs.edi);
2636 dst.val = _regs.eax;
2637 register_address_increment(
2638 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2639 put_rep_prefix(1);
2640 break;
2643 case 0xac ... 0xad: /* lods */ {
2644 /* unsigned long max_reps = */get_rep_prefix();
2645 dst.type = OP_REG;
2646 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2647 dst.reg = (unsigned long *)&_regs.eax;
2648 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2649 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2650 goto done;
2651 register_address_increment(
2652 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2653 put_rep_prefix(1);
2654 break;
2657 case 0xae ... 0xaf: /* scas */ {
2658 unsigned long next_eip = _regs.eip;
2659 get_rep_prefix();
2660 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2661 dst.val = _regs.eax;
2662 if ( (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2663 &src.val, src.bytes, ctxt, ops)) != 0 )
2664 goto done;
2665 register_address_increment(
2666 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2667 put_rep_prefix(1);
2668 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2669 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2670 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2671 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2672 _regs.eip = next_eip;
2673 break;
2676 case 0xc2: /* ret imm16 (near) */
2677 case 0xc3: /* ret (near) */ {
2678 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2679 op_bytes = mode_64bit() ? 8 : op_bytes;
2680 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2681 &dst.val, op_bytes, ctxt, ops)) != 0 )
2682 goto done;
2683 _regs.eip = dst.val;
2684 break;
2687 case 0xc8: /* enter imm16,imm8 */ {
2688 uint16_t size = insn_fetch_type(uint16_t);
2689 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2690 int i;
2692 dst.type = OP_REG;
2693 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2694 dst.reg = (unsigned long *)&_regs.ebp;
2695 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2696 &_regs.ebp, dst.bytes, ctxt)) )
2697 goto done;
2698 dst.val = _regs.esp;
2700 if ( depth > 0 )
2702 for ( i = 1; i < depth; i++ )
2704 unsigned long ebp, temp_data;
2705 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2706 if ( (rc = read_ulong(x86_seg_ss, ebp,
2707 &temp_data, dst.bytes, ctxt, ops)) ||
2708 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2709 &temp_data, dst.bytes, ctxt)) )
2710 goto done;
2712 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2713 &dst.val, dst.bytes, ctxt)) )
2714 goto done;
2717 sp_pre_dec(size);
2718 break;
2721 case 0xc9: /* leave */
2722 /* First writeback, to %%esp. */
2723 dst.type = OP_REG;
2724 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2725 dst.reg = (unsigned long *)&_regs.esp;
2726 dst.val = _regs.ebp;
2728 /* Flush first writeback, since there is a second. */
2729 switch ( dst.bytes )
2731 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2732 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2733 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2734 case 8: *dst.reg = dst.val; break;
2737 /* Second writeback, to %%ebp. */
2738 dst.reg = (unsigned long *)&_regs.ebp;
2739 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2740 &dst.val, dst.bytes, ctxt, ops)) )
2741 goto done;
2742 break;
2744 case 0xca: /* ret imm16 (far) */
2745 case 0xcb: /* ret (far) */ {
2746 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2747 op_bytes = mode_64bit() ? 8 : op_bytes;
2748 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2749 &dst.val, op_bytes, ctxt, ops)) ||
2750 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2751 &src.val, op_bytes, ctxt, ops)) ||
2752 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2753 goto done;
2754 _regs.eip = dst.val;
2755 break;
2758 case 0xcc: /* int3 */
2759 src.val = EXC_BP;
2760 goto swint;
2762 case 0xcd: /* int imm8 */
2763 src.val = insn_fetch_type(uint8_t);
2764 swint:
2765 fail_if(ops->inject_sw_interrupt == NULL);
2766 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2767 ctxt) ? : X86EMUL_EXCEPTION;
2768 goto done;
2770 case 0xce: /* into */
2771 generate_exception_if(mode_64bit(), EXC_UD, -1);
2772 if ( !(_regs.eflags & EFLG_OF) )
2773 break;
2774 src.val = EXC_OF;
2775 goto swint;
2777 case 0xcf: /* iret */ {
2778 unsigned long cs, eip, eflags;
2779 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2780 if ( !mode_ring0() )
2781 mask |= EFLG_IOPL;
2782 if ( !mode_iopl() )
2783 mask |= EFLG_IF;
2784 fail_if(!in_realmode(ctxt, ops));
2785 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2786 &eip, op_bytes, ctxt, ops)) ||
2787 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2788 &cs, op_bytes, ctxt, ops)) ||
2789 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2790 &eflags, op_bytes, ctxt, ops)) )
2791 goto done;
2792 if ( op_bytes == 2 )
2793 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2794 eflags &= 0x257fd5;
2795 _regs.eflags &= mask;
2796 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2797 _regs.eip = eip;
2798 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2799 goto done;
2800 break;
2803 case 0xd4: /* aam */ {
2804 unsigned int base = insn_fetch_type(uint8_t);
2805 uint8_t al = _regs.eax;
2806 generate_exception_if(mode_64bit(), EXC_UD, -1);
2807 generate_exception_if(base == 0, EXC_DE, -1);
2808 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2809 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2810 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2811 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2812 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2813 break;
2816 case 0xd5: /* aad */ {
2817 unsigned int base = insn_fetch_type(uint8_t);
2818 uint16_t ax = _regs.eax;
2819 generate_exception_if(mode_64bit(), EXC_UD, -1);
2820 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2821 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2822 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2823 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2824 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2825 break;
2828 case 0xd6: /* salc */
2829 generate_exception_if(mode_64bit(), EXC_UD, -1);
2830 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2831 break;
2833 case 0xd7: /* xlat */ {
2834 unsigned long al = (uint8_t)_regs.eax;
2835 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.ebx + al),
2836 &al, 1, ctxt, ops)) != 0 )
2837 goto done;
2838 *(uint8_t *)&_regs.eax = al;
2839 break;
2842 case 0xd8: /* FPU 0xd8 */
2843 switch ( modrm )
2845 case 0xc0 ... 0xc7: /* fadd %stN,%stN */
2846 case 0xc8 ... 0xcf: /* fmul %stN,%stN */
2847 case 0xd0 ... 0xd7: /* fcom %stN,%stN */
2848 case 0xd8 ... 0xdf: /* fcomp %stN,%stN */
2849 case 0xe0 ... 0xe7: /* fsub %stN,%stN */
2850 case 0xe8 ... 0xef: /* fsubr %stN,%stN */
2851 case 0xf0 ... 0xf7: /* fdiv %stN,%stN */
2852 case 0xf8 ... 0xff: /* fdivr %stN,%stN */
2853 emulate_fpu_insn_stub(0xd8, modrm);
2854 break;
2855 default:
2856 fail_if(modrm >= 0xc0);
2857 ea.bytes = 4;
2858 src = ea;
2859 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2860 src.bytes, ctxt)) != 0 )
2861 goto done;
2862 switch ( modrm_reg & 7 )
2864 case 0: /* fadd */
2865 emulate_fpu_insn_memsrc("fadds", src.val);
2866 break;
2867 case 1: /* fmul */
2868 emulate_fpu_insn_memsrc("fmuls", src.val);
2869 break;
2870 case 2: /* fcom */
2871 emulate_fpu_insn_memsrc("fcoms", src.val);
2872 break;
2873 case 3: /* fcomp */
2874 emulate_fpu_insn_memsrc("fcomps", src.val);
2875 break;
2876 case 4: /* fsub */
2877 emulate_fpu_insn_memsrc("fsubs", src.val);
2878 break;
2879 case 5: /* fsubr */
2880 emulate_fpu_insn_memsrc("fsubrs", src.val);
2881 break;
2882 case 6: /* fdiv */
2883 emulate_fpu_insn_memsrc("fdivs", src.val);
2884 break;
2885 case 7: /* fdivr */
2886 emulate_fpu_insn_memsrc("fdivrs", src.val);
2887 break;
2888 default:
2889 goto cannot_emulate;
2892 break;
2894 case 0xd9: /* FPU 0xd9 */
2895 switch ( modrm )
2897 case 0xc0 ... 0xc7: /* fld %stN */
2898 case 0xc8 ... 0xcf: /* fxch %stN */
2899 case 0xd0: /* fnop */
2900 case 0xe0: /* fchs */
2901 case 0xe1: /* fabs */
2902 case 0xe4: /* ftst */
2903 case 0xe5: /* fxam */
2904 case 0xe8: /* fld1 */
2905 case 0xe9: /* fldl2t */
2906 case 0xea: /* fldl2e */
2907 case 0xeb: /* fldpi */
2908 case 0xec: /* fldlg2 */
2909 case 0xed: /* fldln2 */
2910 case 0xee: /* fldz */
2911 case 0xf0: /* f2xm1 */
2912 case 0xf1: /* fyl2x */
2913 case 0xf2: /* fptan */
2914 case 0xf3: /* fpatan */
2915 case 0xf4: /* fxtract */
2916 case 0xf5: /* fprem1 */
2917 case 0xf6: /* fdecstp */
2918 case 0xf7: /* fincstp */
2919 case 0xf8: /* fprem */
2920 case 0xf9: /* fyl2xp1 */
2921 case 0xfa: /* fsqrt */
2922 case 0xfb: /* fsincos */
2923 case 0xfc: /* frndint */
2924 case 0xfd: /* fscale */
2925 case 0xfe: /* fsin */
2926 case 0xff: /* fcos */
2927 emulate_fpu_insn_stub(0xd9, modrm);
2928 break;
2929 default:
2930 fail_if(modrm >= 0xc0);
2931 switch ( modrm_reg & 7 )
2933 case 0: /* fld m32fp */
2934 ea.bytes = 4;
2935 src = ea;
2936 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &src.val,
2937 src.bytes, ctxt)) != 0 )
2938 goto done;
2939 emulate_fpu_insn_memsrc("flds", src.val);
2940 break;
2941 case 2: /* fstp m32fp */
2942 ea.bytes = 4;
2943 dst = ea;
2944 dst.type = OP_MEM;
2945 emulate_fpu_insn_memdst("fsts", dst.val);
2946 break;
2947 case 3: /* fstp m32fp */
2948 ea.bytes = 4;
2949 dst = ea;
2950 dst.type = OP_MEM;
2951 emulate_fpu_insn_memdst("fstps", dst.val);
2952 break;
2953 /* case 4: fldenv - TODO */
2954 case 5: /* fldcw m2byte */
2955 ea.bytes = 2;
2956 src = ea;
2957 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2958 src.bytes, ctxt)) != 0 )
2959 goto done;
2960 emulate_fpu_insn_memsrc("fldcw", src.val);
2961 break;
2962 /* case 6: fstenv - TODO */
2963 case 7: /* fnstcw m2byte */
2964 ea.bytes = 2;
2965 dst = ea;
2966 dst.type = OP_MEM;
2967 emulate_fpu_insn_memdst("fnstcw", dst.val);
2968 break;
2969 default:
2970 goto cannot_emulate;
2973 break;
2975 case 0xda: /* FPU 0xda */
2976 switch ( modrm )
2978 case 0xc0 ... 0xc7: /* fcmovb %stN */
2979 case 0xc8 ... 0xcf: /* fcmove %stN */
2980 case 0xd0 ... 0xd7: /* fcmovbe %stN */
2981 case 0xd8 ... 0xdf: /* fcmovu %stN */
2982 case 0xe9: /* fucompp */
2983 emulate_fpu_insn_stub(0xda, modrm);
2984 break;
2985 default:
2986 fail_if(modrm >= 0xc0);
2987 ea.bytes = 8;
2988 src = ea;
2989 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2990 src.bytes, ctxt)) != 0 )
2991 goto done;
2992 switch ( modrm_reg & 7 )
2994 case 0: /* fiadd m64i */
2995 emulate_fpu_insn_memsrc("fiaddl", src.val);
2996 break;
2997 case 1: /* fimul m64i */
2998 emulate_fpu_insn_memsrc("fimul", src.val);
2999 break;
3000 case 2: /* ficom m64i */
3001 emulate_fpu_insn_memsrc("ficoml", src.val);
3002 break;
3003 case 3: /* ficomp m64i */
3004 emulate_fpu_insn_memsrc("ficompl", src.val);
3005 break;
3006 case 4: /* fisub m64i */
3007 emulate_fpu_insn_memsrc("fisubl", src.val);
3008 break;
3009 case 5: /* fisubr m64i */
3010 emulate_fpu_insn_memsrc("fisubrl", src.val);
3011 break;
3012 case 6: /* fidiv m64i */
3013 emulate_fpu_insn_memsrc("fidivl", src.val);
3014 break;
3015 case 7: /* fidivr m64i */
3016 emulate_fpu_insn_memsrc("fidivrl", src.val);
3017 break;
3018 default:
3019 goto cannot_emulate;
3022 break;
3024 case 0xdb: /* FPU 0xdb */
3025 switch ( modrm )
3027 case 0xc0 ... 0xc7: /* fcmovnb %stN */
3028 case 0xc8 ... 0xcf: /* fcmovne %stN */
3029 case 0xd0 ... 0xd7: /* fcmovnbe %stN */
3030 case 0xd8 ... 0xdf: /* fcmovnu %stN */
3031 emulate_fpu_insn_stub(0xdb, modrm);
3032 break;
3033 case 0xe2: /* fnclex */
3034 emulate_fpu_insn("fnclex");
3035 break;
3036 case 0xe3: /* fninit */
3037 emulate_fpu_insn("fninit");
3038 break;
3039 case 0xe4: /* fsetpm - 287 only, ignored by 387 */
3040 break;
3041 case 0xe8 ... 0xef: /* fucomi %stN */
3042 case 0xf0 ... 0xf7: /* fcomi %stN */
3043 emulate_fpu_insn_stub(0xdb, modrm);
3044 break;
3045 default:
3046 fail_if(modrm >= 0xc0);
3047 switch ( modrm_reg & 7 )
3049 case 0: /* fild m32i */
3050 ea.bytes = 4;
3051 src = ea;
3052 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
3053 src.bytes, ctxt)) != 0 )
3054 goto done;
3055 emulate_fpu_insn_memsrc("fildl", src.val);
3056 break;
3057 case 1: /* fisttp m32i */
3058 ea.bytes = 4;
3059 dst = ea;
3060 dst.type = OP_MEM;
3061 emulate_fpu_insn_memdst("fisttpl", dst.val);
3062 break;
3063 case 2: /* fist m32i */
3064 ea.bytes = 4;
3065 dst = ea;
3066 dst.type = OP_MEM;
3067 emulate_fpu_insn_memdst("fistl", dst.val);
3068 break;
3069 case 3: /* fistp m32i */
3070 ea.bytes = 4;
3071 dst = ea;
3072 dst.type = OP_MEM;
3073 emulate_fpu_insn_memdst("fistpl", dst.val);
3074 break;
3075 case 5: /* fld m80fp */
3076 ea.bytes = 10;
3077 src = ea;
3078 if ( (rc = ops->read(src.mem.seg, src.mem.off,
3079 &src.val, src.bytes, ctxt)) != 0 )
3080 goto done;
3081 emulate_fpu_insn_memdst("fldt", src.val);
3082 break;
3083 case 7: /* fstp m80fp */
3084 ea.bytes = 10;
3085 dst.type = OP_MEM;
3086 dst = ea;
3087 emulate_fpu_insn_memdst("fstpt", dst.val);
3088 break;
3089 default:
3090 goto cannot_emulate;
3093 break;
3095 case 0xdc: /* FPU 0xdc */
3096 switch ( modrm )
3098 case 0xc0 ... 0xc7: /* fadd %stN */
3099 case 0xc8 ... 0xcf: /* fmul %stN */
3100 case 0xe0 ... 0xe7: /* fsubr %stN */
3101 case 0xe8 ... 0xef: /* fsub %stN */
3102 case 0xf0 ... 0xf7: /* fdivr %stN */
3103 case 0xf8 ... 0xff: /* fdiv %stN */
3104 emulate_fpu_insn_stub(0xdc, modrm);
3105 break;
3106 default:
3107 fail_if(modrm >= 0xc0);
3108 ea.bytes = 8;
3109 src = ea;
3110 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
3111 src.bytes, ctxt)) != 0 )
3112 goto done;
3113 switch ( modrm_reg & 7 )
3115 case 0: /* fadd m64fp */
3116 emulate_fpu_insn_memsrc("faddl", src.val);
3117 break;
3118 case 1: /* fmul m64fp */
3119 emulate_fpu_insn_memsrc("fmull", src.val);
3120 break;
3121 case 2: /* fcom m64fp */
3122 emulate_fpu_insn_memsrc("fcoml", src.val);
3123 break;
3124 case 3: /* fcomp m64fp */
3125 emulate_fpu_insn_memsrc("fcompl", src.val);
3126 break;
3127 case 4: /* fsub m64fp */
3128 emulate_fpu_insn_memsrc("fsubl", src.val);
3129 break;
3130 case 5: /* fsubr m64fp */
3131 emulate_fpu_insn_memsrc("fsubrl", src.val);
3132 break;
3133 case 6: /* fdiv m64fp */
3134 emulate_fpu_insn_memsrc("fdivl", src.val);
3135 break;
3136 case 7: /* fdivr m64fp */
3137 emulate_fpu_insn_memsrc("fdivrl", src.val);
3138 break;
3141 break;
3143 case 0xdd: /* FPU 0xdd */
3144 switch ( modrm )
3146 case 0xc0 ... 0xc7: /* ffree %stN */
3147 case 0xd0 ... 0xd7: /* fst %stN */
3148 case 0xd8 ... 0xdf: /* fstp %stN */
3149 case 0xe0 ... 0xe7: /* fucom %stN */
3150 case 0xe8 ... 0xef: /* fucomp %stN */
3151 emulate_fpu_insn_stub(0xdd, modrm);
3152 break;
3153 default:
3154 fail_if(modrm >= 0xc0);
3155 switch ( modrm_reg & 7 )
3157 case 0: /* fld m64fp */;
3158 ea.bytes = 8;
3159 src = ea;
3160 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
3161 src.bytes, ctxt)) != 0 )
3162 goto done;
3163 emulate_fpu_insn_memsrc("fldl", src.val);
3164 break;
3165 case 1: /* fisttp m64i */
3166 ea.bytes = 8;
3167 dst = ea;
3168 dst.type = OP_MEM;
3169 emulate_fpu_insn_memdst("fisttpll", dst.val);
3170 break;
3171 case 2: /* fst m64fp */
3172 ea.bytes = 8;
3173 dst = ea;
3174 dst.type = OP_MEM;
3175 emulate_fpu_insn_memsrc("fstl", dst.val);
3176 break;
3177 case 3: /* fstp m64fp */
3178 ea.bytes = 8;
3179 dst = ea;
3180 dst.type = OP_MEM;
3181 emulate_fpu_insn_memdst("fstpl", dst.val);
3182 break;
3183 case 7: /* fnstsw m2byte */
3184 ea.bytes = 2;
3185 dst = ea;
3186 dst.type = OP_MEM;
3187 emulate_fpu_insn_memdst("fnstsw", dst.val);
3188 break;
3189 default:
3190 goto cannot_emulate;
3193 break;
3195 case 0xde: /* FPU 0xde */
3196 switch ( modrm )
3198 case 0xc0 ... 0xc7: /* faddp %stN */
3199 case 0xc8 ... 0xcf: /* fmulp %stN */
3200 case 0xd9: /* fcompp */
3201 case 0xe0 ... 0xe7: /* fsubrp %stN */
3202 case 0xe8 ... 0xef: /* fsubp %stN */
3203 case 0xf0 ... 0xf7: /* fdivrp %stN */
3204 case 0xf8 ... 0xff: /* fdivp %stN */
3205 emulate_fpu_insn_stub(0xde, modrm);
3206 break;
3207 default:
3208 fail_if(modrm >= 0xc0);
3209 ea.bytes = 2;
3210 src = ea;
3211 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
3212 src.bytes, ctxt)) != 0 )
3213 goto done;
3214 switch ( modrm_reg & 7 )
3216 case 0: /* fiadd m16i */
3217 emulate_fpu_insn_memsrc("fiadd", src.val);
3218 break;
3219 case 1: /* fimul m16i */
3220 emulate_fpu_insn_memsrc("fimul", src.val);
3221 break;
3222 case 2: /* ficom m16i */
3223 emulate_fpu_insn_memsrc("ficom", src.val);
3224 break;
3225 case 3: /* ficomp m16i */
3226 emulate_fpu_insn_memsrc("ficomp", src.val);
3227 break;
3228 case 4: /* fisub m16i */
3229 emulate_fpu_insn_memsrc("fisub", src.val);
3230 break;
3231 case 5: /* fisubr m16i */
3232 emulate_fpu_insn_memsrc("fisubr", src.val);
3233 break;
3234 case 6: /* fidiv m16i */
3235 emulate_fpu_insn_memsrc("fidiv", src.val);
3236 break;
3237 case 7: /* fidivr m16i */
3238 emulate_fpu_insn_memsrc("fidivr", src.val);
3239 break;
3240 default:
3241 goto cannot_emulate;
3244 break;
3246 case 0xdf: /* FPU 0xdf */
3247 switch ( modrm )
3249 case 0xe0:
3250 /* fnstsw %ax */
3251 dst.bytes = 2;
3252 dst.type = OP_REG;
3253 dst.reg = (unsigned long *)&_regs.eax;
3254 emulate_fpu_insn_memdst("fnstsw", dst.val);
3255 break;
3256 case 0xf0 ... 0xf7: /* fcomip %stN */
3257 case 0xf8 ... 0xff: /* fucomip %stN */
3258 emulate_fpu_insn_stub(0xdf, modrm);
3259 break;
3260 default:
3261 fail_if(modrm >= 0xc0);
3262 switch ( modrm_reg & 7 )
3264 case 0: /* fild m16i */
3265 ea.bytes = 2;
3266 src = ea;
3267 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
3268 src.bytes, ctxt)) != 0 )
3269 goto done;
3270 emulate_fpu_insn_memsrc("fild", src.val);
3271 break;
3272 case 1: /* fisttp m16i */
3273 ea.bytes = 2;
3274 dst = ea;
3275 dst.type = OP_MEM;
3276 emulate_fpu_insn_memdst("fisttp", dst.val);
3277 break;
3278 case 2: /* fist m16i */
3279 ea.bytes = 2;
3280 dst = ea;
3281 dst.type = OP_MEM;
3282 emulate_fpu_insn_memdst("fist", dst.val);
3283 break;
3284 case 3: /* fistp m16i */
3285 ea.bytes = 2;
3286 dst = ea;
3287 dst.type = OP_MEM;
3288 emulate_fpu_insn_memdst("fistp", dst.val);
3289 break;
3290 case 4: /* fbld m80dec */
3291 ea.bytes = 10;
3292 dst = ea;
3293 if ( (rc = ops->read(src.mem.seg, src.mem.off,
3294 &src.val, src.bytes, ctxt)) != 0 )
3295 goto done;
3296 emulate_fpu_insn_memdst("fbld", src.val);
3297 break;
3298 case 5: /* fild m64i */
3299 ea.bytes = 8;
3300 src = ea;
3301 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
3302 src.bytes, ctxt)) != 0 )
3303 goto done;
3304 emulate_fpu_insn_memsrc("fildll", src.val);
3305 break;
3306 case 6: /* fbstp packed bcd */
3307 ea.bytes = 10;
3308 dst = ea;
3309 dst.type = OP_MEM;
3310 emulate_fpu_insn_memdst("fbstp", dst.val);
3311 break;
3312 case 7: /* fistp m64i */
3313 ea.bytes = 8;
3314 dst = ea;
3315 dst.type = OP_MEM;
3316 emulate_fpu_insn_memdst("fistpll", dst.val);
3317 break;
3318 default:
3319 goto cannot_emulate;
3322 break;
3324 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
3325 int rel = insn_fetch_type(int8_t);
3326 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
3327 if ( b == 0xe1 )
3328 do_jmp = !do_jmp; /* loopz */
3329 else if ( b == 0xe2 )
3330 do_jmp = 1; /* loop */
3331 switch ( ad_bytes )
3333 case 2:
3334 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
3335 break;
3336 case 4:
3337 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
3338 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
3339 break;
3340 default: /* case 8: */
3341 do_jmp &= --_regs.ecx != 0;
3342 break;
3344 if ( do_jmp )
3345 jmp_rel(rel);
3346 break;
3349 case 0xe3: /* jcxz/jecxz (short) */ {
3350 int rel = insn_fetch_type(int8_t);
3351 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
3352 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
3353 jmp_rel(rel);
3354 break;
3357 case 0xe4: /* in imm8,%al */
3358 case 0xe5: /* in imm8,%eax */
3359 case 0xe6: /* out %al,imm8 */
3360 case 0xe7: /* out %eax,imm8 */
3361 case 0xec: /* in %dx,%al */
3362 case 0xed: /* in %dx,%eax */
3363 case 0xee: /* out %al,%dx */
3364 case 0xef: /* out %eax,%dx */ {
3365 unsigned int port = ((b < 0xe8)
3366 ? insn_fetch_type(uint8_t)
3367 : (uint16_t)_regs.edx);
3368 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
3369 if ( (rc = ioport_access_check(port, op_bytes, ctxt, ops)) != 0 )
3370 goto done;
3371 if ( b & 2 )
3373 /* out */
3374 fail_if(ops->write_io == NULL);
3375 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
3377 else
3379 /* in */
3380 dst.type = OP_REG;
3381 dst.bytes = op_bytes;
3382 dst.reg = (unsigned long *)&_regs.eax;
3383 fail_if(ops->read_io == NULL);
3384 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
3386 if ( rc != 0 )
3387 goto done;
3388 break;
3391 case 0xe8: /* call (near) */ {
3392 int rel = (((op_bytes == 2) && !mode_64bit())
3393 ? (int32_t)insn_fetch_type(int16_t)
3394 : insn_fetch_type(int32_t));
3395 op_bytes = mode_64bit() ? 8 : op_bytes;
3396 src.val = _regs.eip;
3397 jmp_rel(rel);
3398 goto push;
3401 case 0xe9: /* jmp (near) */ {
3402 int rel = (((op_bytes == 2) && !mode_64bit())
3403 ? (int32_t)insn_fetch_type(int16_t)
3404 : insn_fetch_type(int32_t));
3405 jmp_rel(rel);
3406 break;
3409 case 0xea: /* jmp (far, absolute) */ {
3410 uint16_t sel;
3411 uint32_t eip;
3412 generate_exception_if(mode_64bit(), EXC_UD, -1);
3413 eip = insn_fetch_bytes(op_bytes);
3414 sel = insn_fetch_type(uint16_t);
3415 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
3416 goto done;
3417 _regs.eip = eip;
3418 break;
3421 case 0xeb: /* jmp (short) */ {
3422 int rel = insn_fetch_type(int8_t);
3423 jmp_rel(rel);
3424 break;
3427 case 0xf1: /* int1 (icebp) */
3428 src.val = EXC_DB;
3429 goto swint;
3431 case 0xf4: /* hlt */
3432 ctxt->retire.flags.hlt = 1;
3433 break;
3435 case 0xf5: /* cmc */
3436 _regs.eflags ^= EFLG_CF;
3437 break;
3439 case 0xf8: /* clc */
3440 _regs.eflags &= ~EFLG_CF;
3441 break;
3443 case 0xf9: /* stc */
3444 _regs.eflags |= EFLG_CF;
3445 break;
3447 case 0xfa: /* cli */
3448 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3449 _regs.eflags &= ~EFLG_IF;
3450 break;
3452 case 0xfb: /* sti */
3453 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3454 if ( !(_regs.eflags & EFLG_IF) )
3456 _regs.eflags |= EFLG_IF;
3457 ctxt->retire.flags.sti = 1;
3459 break;
3461 case 0xfc: /* cld */
3462 _regs.eflags &= ~EFLG_DF;
3463 break;
3465 case 0xfd: /* std */
3466 _regs.eflags |= EFLG_DF;
3467 break;
3469 goto writeback;
3471 twobyte_insn:
3472 switch ( b )
3474 case 0x40 ... 0x4f: /* cmovcc */
3475 dst.val = src.val;
3476 if ( !test_cc(b, _regs.eflags) )
3477 dst.type = OP_NONE;
3478 break;
3480 case 0x90 ... 0x9f: /* setcc */
3481 dst.val = test_cc(b, _regs.eflags);
3482 break;
3484 case 0xb0 ... 0xb1: /* cmpxchg */
3485 /* Save real source value, then compare EAX against destination. */
3486 src.orig_val = src.val;
3487 src.val = _regs.eax;
3488 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
3489 if ( _regs.eflags & EFLG_ZF )
3491 /* Success: write back to memory. */
3492 dst.val = src.orig_val;
3494 else
3496 /* Failure: write the value we saw to EAX. */
3497 dst.type = OP_REG;
3498 dst.reg = (unsigned long *)&_regs.eax;
3500 break;
3502 case 0xa3: bt: /* bt */
3503 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
3504 dst.type = OP_NONE;
3505 break;
3507 case 0xa4: /* shld imm8,r,r/m */
3508 case 0xa5: /* shld %%cl,r,r/m */
3509 case 0xac: /* shrd imm8,r,r/m */
3510 case 0xad: /* shrd %%cl,r,r/m */ {
3511 uint8_t shift, width = dst.bytes << 3;
3512 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
3513 if ( (shift &= width - 1) == 0 )
3514 break;
3515 dst.orig_val = truncate_word(dst.val, dst.bytes);
3516 dst.val = ((shift == width) ? src.val :
3517 (b & 8) ?
3518 /* shrd */
3519 ((dst.orig_val >> shift) |
3520 truncate_word(src.val << (width - shift), dst.bytes)) :
3521 /* shld */
3522 ((dst.orig_val << shift) |
3523 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
3524 dst.val = truncate_word(dst.val, dst.bytes);
3525 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
3526 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
3527 _regs.eflags |= EFLG_CF;
3528 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
3529 _regs.eflags |= EFLG_OF;
3530 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
3531 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
3532 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
3533 break;
3536 case 0xb3: btr: /* btr */
3537 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
3538 break;
3540 case 0xab: bts: /* bts */
3541 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
3542 break;
3544 case 0xaf: /* imul */
3545 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3546 switch ( dst.bytes )
3548 case 2:
3549 dst.val = ((uint32_t)(int16_t)src.val *
3550 (uint32_t)(int16_t)dst.val);
3551 if ( (int16_t)dst.val != (uint32_t)dst.val )
3552 _regs.eflags |= EFLG_OF|EFLG_CF;
3553 break;
3554 #ifdef __x86_64__
3555 case 4:
3556 dst.val = ((uint64_t)(int32_t)src.val *
3557 (uint64_t)(int32_t)dst.val);
3558 if ( (int32_t)dst.val != dst.val )
3559 _regs.eflags |= EFLG_OF|EFLG_CF;
3560 break;
3561 #endif
3562 default: {
3563 unsigned long m[2] = { src.val, dst.val };
3564 if ( imul_dbl(m) )
3565 _regs.eflags |= EFLG_OF|EFLG_CF;
3566 dst.val = m[0];
3567 break;
3570 break;
3572 case 0xb2: /* lss */
3573 dst.val = x86_seg_ss;
3574 goto les;
3576 case 0xb4: /* lfs */
3577 dst.val = x86_seg_fs;
3578 goto les;
3580 case 0xb5: /* lgs */
3581 dst.val = x86_seg_gs;
3582 goto les;
3584 case 0xb6: /* movzx rm8,r{16,32,64} */
3585 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3586 dst.reg = decode_register(modrm_reg, &_regs, 0);
3587 dst.bytes = op_bytes;
3588 dst.val = (uint8_t)src.val;
3589 break;
3591 case 0xbc: /* bsf */ {
3592 int zf;
3593 asm ( "bsf %2,%0; setz %b1"
3594 : "=r" (dst.val), "=q" (zf)
3595 : "r" (src.val), "1" (0) );
3596 _regs.eflags &= ~EFLG_ZF;
3597 if ( zf )
3599 _regs.eflags |= EFLG_ZF;
3600 dst.type = OP_NONE;
3602 break;
3605 case 0xbd: /* bsr */ {
3606 int zf;
3607 asm ( "bsr %2,%0; setz %b1"
3608 : "=r" (dst.val), "=q" (zf)
3609 : "r" (src.val), "1" (0) );
3610 _regs.eflags &= ~EFLG_ZF;
3611 if ( zf )
3613 _regs.eflags |= EFLG_ZF;
3614 dst.type = OP_NONE;
3616 break;
3619 case 0xb7: /* movzx rm16,r{16,32,64} */
3620 dst.val = (uint16_t)src.val;
3621 break;
3623 case 0xbb: btc: /* btc */
3624 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
3625 break;
3627 case 0xba: /* Grp8 */
3628 switch ( modrm_reg & 7 )
3630 case 4: goto bt;
3631 case 5: goto bts;
3632 case 6: goto btr;
3633 case 7: goto btc;
3634 default: generate_exception_if(1, EXC_UD, -1);
3636 break;
3638 case 0xbe: /* movsx rm8,r{16,32,64} */
3639 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3640 dst.reg = decode_register(modrm_reg, &_regs, 0);
3641 dst.bytes = op_bytes;
3642 dst.val = (int8_t)src.val;
3643 break;
3645 case 0xbf: /* movsx rm16,r{16,32,64} */
3646 dst.val = (int16_t)src.val;
3647 break;
3649 case 0xc0 ... 0xc1: /* xadd */
3650 /* Write back the register source. */
3651 switch ( dst.bytes )
3653 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
3654 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
3655 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
3656 case 8: *src.reg = dst.val; break;
3658 goto add;
3660 goto writeback;
3662 twobyte_special_insn:
3663 switch ( b )
3665 case 0x01: /* Grp7 */ {
3666 struct segment_register reg;
3667 unsigned long base, limit, cr0, cr0w;
3669 if ( modrm == 0xdf ) /* invlpga */
3671 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
3672 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3673 fail_if(ops->invlpg == NULL);
3674 if ( (rc = ops->invlpg(x86_seg_none, truncate_ea(_regs.eax),
3675 ctxt)) )
3676 goto done;
3677 break;
3680 switch ( modrm_reg & 7 )
3682 case 0: /* sgdt */
3683 case 1: /* sidt */
3684 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3685 fail_if(ops->read_segment == NULL);
3686 if ( (rc = ops->read_segment((modrm_reg & 1) ?
3687 x86_seg_idtr : x86_seg_gdtr,
3688 &reg, ctxt)) )
3689 goto done;
3690 if ( op_bytes == 2 )
3691 reg.base &= 0xffffff;
3692 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
3693 &reg.limit, 2, ctxt)) ||
3694 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
3695 &reg.base, mode_64bit() ? 8 : 4, ctxt)) )
3696 goto done;
3697 break;
3698 case 2: /* lgdt */
3699 case 3: /* lidt */
3700 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3701 fail_if(ops->write_segment == NULL);
3702 memset(&reg, 0, sizeof(reg));
3703 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3704 &limit, 2, ctxt, ops)) ||
3705 (rc = read_ulong(ea.mem.seg, ea.mem.off+2,
3706 &base, mode_64bit() ? 8 : 4, ctxt, ops)) )
3707 goto done;
3708 reg.base = base;
3709 reg.limit = limit;
3710 if ( op_bytes == 2 )
3711 reg.base &= 0xffffff;
3712 if ( (rc = ops->write_segment((modrm_reg & 1) ?
3713 x86_seg_idtr : x86_seg_gdtr,
3714 &reg, ctxt)) )
3715 goto done;
3716 break;
3717 case 4: /* smsw */
3718 if ( ea.type == OP_MEM )
3719 ea.bytes = 2;
3720 dst = ea;
3721 fail_if(ops->read_cr == NULL);
3722 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
3723 goto done;
3724 d |= Mov; /* force writeback */
3725 break;
3726 case 6: /* lmsw */
3727 fail_if(ops->read_cr == NULL);
3728 fail_if(ops->write_cr == NULL);
3729 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
3730 goto done;
3731 if ( ea.type == OP_REG )
3732 cr0w = *ea.reg;
3733 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
3734 &cr0w, 2, ctxt, ops)) )
3735 goto done;
3736 /* LMSW can: (1) set bits 0-3; (2) clear bits 1-3. */
3737 cr0 = (cr0 & ~0xe) | (cr0w & 0xf);
3738 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
3739 goto done;
3740 break;
3741 case 7: /* invlpg */
3742 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3743 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3744 fail_if(ops->invlpg == NULL);
3745 if ( (rc = ops->invlpg(ea.mem.seg, ea.mem.off, ctxt)) )
3746 goto done;
3747 break;
3748 default:
3749 goto cannot_emulate;
3751 break;
3754 case 0x06: /* clts */
3755 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3756 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
3757 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
3758 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
3759 goto done;
3760 break;
3762 case 0x08: /* invd */
3763 case 0x09: /* wbinvd */
3764 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3765 fail_if(ops->wbinvd == NULL);
3766 if ( (rc = ops->wbinvd(ctxt)) != 0 )
3767 goto done;
3768 break;
3770 case 0x0d: /* GrpP (prefetch) */
3771 case 0x18: /* Grp16 (prefetch/nop) */
3772 case 0x19 ... 0x1f: /* nop (amd-defined) */
3773 break;
3775 case 0x20: /* mov cr,reg */
3776 case 0x21: /* mov dr,reg */
3777 case 0x22: /* mov reg,cr */
3778 case 0x23: /* mov reg,dr */
3779 generate_exception_if(ea.type != OP_REG, EXC_UD, -1);
3780 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3781 modrm_reg |= lock_prefix << 3;
3782 if ( b & 2 )
3784 /* Write to CR/DR. */
3785 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
3786 if ( !mode_64bit() )
3787 src.val = (uint32_t)src.val;
3788 rc = ((b & 1)
3789 ? (ops->write_dr
3790 ? ops->write_dr(modrm_reg, src.val, ctxt)
3791 : X86EMUL_UNHANDLEABLE)
3792 : (ops->write_cr
3793 ? ops->write_cr(modrm_reg, src.val, ctxt)
3794 : X86EMUL_UNHANDLEABLE));
3796 else
3798 /* Read from CR/DR. */
3799 dst.type = OP_REG;
3800 dst.bytes = mode_64bit() ? 8 : 4;
3801 dst.reg = decode_register(modrm_rm, &_regs, 0);
3802 rc = ((b & 1)
3803 ? (ops->read_dr
3804 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
3805 : X86EMUL_UNHANDLEABLE)
3806 : (ops->read_cr
3807 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
3808 : X86EMUL_UNHANDLEABLE));
3810 if ( rc != 0 )
3811 goto done;
3812 break;
3814 case 0x30: /* wrmsr */ {
3815 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
3816 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3817 fail_if(ops->write_msr == NULL);
3818 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
3819 goto done;
3820 break;
3823 case 0x31: /* rdtsc */ {
3824 unsigned long cr4;
3825 uint64_t val;
3826 fail_if(ops->read_cr == NULL);
3827 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
3828 goto done;
3829 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP, 0);
3830 fail_if(ops->read_msr == NULL);
3831 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
3832 goto done;
3833 _regs.edx = (uint32_t)(val >> 32);
3834 _regs.eax = (uint32_t)(val >> 0);
3835 break;
3838 case 0x32: /* rdmsr */ {
3839 uint64_t val;
3840 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3841 fail_if(ops->read_msr == NULL);
3842 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
3843 goto done;
3844 _regs.edx = (uint32_t)(val >> 32);
3845 _regs.eax = (uint32_t)(val >> 0);
3846 break;
3849 case 0x6f: /* movq mm/m64,mm */ {
3850 uint8_t stub[] = { 0x0f, 0x6f, modrm, 0xc3 };
3851 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3852 uint64_t val;
3853 if ( ea.type == OP_MEM )
3855 unsigned long lval, hval;
3856 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3857 &lval, 4, ctxt, ops)) ||
3858 (rc = read_ulong(ea.mem.seg, ea.mem.off+4,
3859 &hval, 4, ctxt, ops)) )
3860 goto done;
3861 val = ((uint64_t)hval << 32) | (uint32_t)lval;
3862 stub[2] = modrm & 0x38; /* movq (%eax),%mmN */
3864 get_fpu(X86EMUL_FPU_mmx, &fic);
3865 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3866 put_fpu(&fic);
3867 break;
3870 case 0x7f: /* movq mm,mm/m64 */ {
3871 uint8_t stub[] = { 0x0f, 0x7f, modrm, 0xc3 };
3872 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3873 uint64_t val;
3874 if ( ea.type == OP_MEM )
3875 stub[2] = modrm & 0x38; /* movq %mmN,(%eax) */
3876 get_fpu(X86EMUL_FPU_mmx, &fic);
3877 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3878 put_fpu(&fic);
3879 if ( ea.type == OP_MEM )
3881 unsigned long lval = (uint32_t)val, hval = (uint32_t)(val >> 32);
3882 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0, &lval, 4, ctxt)) ||
3883 (rc = ops->write(ea.mem.seg, ea.mem.off+4, &hval, 4, ctxt)) )
3884 goto done;
3886 break;
3889 case 0x80 ... 0x8f: /* jcc (near) */ {
3890 int rel = (((op_bytes == 2) && !mode_64bit())
3891 ? (int32_t)insn_fetch_type(int16_t)
3892 : insn_fetch_type(int32_t));
3893 if ( test_cc(b, _regs.eflags) )
3894 jmp_rel(rel);
3895 break;
3898 case 0xa0: /* push %%fs */
3899 src.val = x86_seg_fs;
3900 goto push_seg;
3902 case 0xa1: /* pop %%fs */
3903 src.val = x86_seg_fs;
3904 goto pop_seg;
3906 case 0xa2: /* cpuid */ {
3907 unsigned int eax = _regs.eax, ebx = _regs.ebx;
3908 unsigned int ecx = _regs.ecx, edx = _regs.edx;
3909 fail_if(ops->cpuid == NULL);
3910 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
3911 goto done;
3912 _regs.eax = eax; _regs.ebx = ebx;
3913 _regs.ecx = ecx; _regs.edx = edx;
3914 break;
3917 case 0xa8: /* push %%gs */
3918 src.val = x86_seg_gs;
3919 goto push_seg;
3921 case 0xa9: /* pop %%gs */
3922 src.val = x86_seg_gs;
3923 goto pop_seg;
3925 case 0xc7: /* Grp9 (cmpxchg8b/cmpxchg16b) */ {
3926 unsigned long old[2], exp[2], new[2];
3927 unsigned int i;
3929 generate_exception_if((modrm_reg & 7) != 1, EXC_UD, -1);
3930 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3931 op_bytes *= 2;
3933 /* Get actual old value. */
3934 for ( i = 0; i < (op_bytes/sizeof(long)); i++ )
3935 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off + i*sizeof(long),
3936 &old[i], sizeof(long), ctxt, ops)) != 0 )
3937 goto done;
3939 /* Get expected and proposed values. */
3940 if ( op_bytes == 8 )
3942 ((uint32_t *)exp)[0] = _regs.eax; ((uint32_t *)exp)[1] = _regs.edx;
3943 ((uint32_t *)new)[0] = _regs.ebx; ((uint32_t *)new)[1] = _regs.ecx;
3945 else
3947 exp[0] = _regs.eax; exp[1] = _regs.edx;
3948 new[0] = _regs.ebx; new[1] = _regs.ecx;
3951 if ( memcmp(old, exp, op_bytes) )
3953 /* Expected != actual: store actual to rDX:rAX and clear ZF. */
3954 _regs.eax = (op_bytes == 8) ? ((uint32_t *)old)[0] : old[0];
3955 _regs.edx = (op_bytes == 8) ? ((uint32_t *)old)[1] : old[1];
3956 _regs.eflags &= ~EFLG_ZF;
3958 else
3960 /* Expected == actual: attempt atomic cmpxchg and set ZF. */
3961 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3962 new, op_bytes, ctxt)) != 0 )
3963 goto done;
3964 _regs.eflags |= EFLG_ZF;
3966 break;
3969 case 0xc8 ... 0xcf: /* bswap */
3970 dst.type = OP_REG;
3971 dst.reg = decode_register(
3972 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3973 switch ( dst.bytes = op_bytes )
3975 default: /* case 2: */
3976 /* Undefined behaviour. Writes zero on all tested CPUs. */
3977 dst.val = 0;
3978 break;
3979 case 4:
3980 #ifdef __x86_64__
3981 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3982 break;
3983 case 8:
3984 #endif
3985 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3986 break;
3988 break;
3990 goto writeback;
3992 cannot_emulate:
3993 return X86EMUL_UNHANDLEABLE;