ia64/xen-unstable

view xen/arch/x86/x86_emulate/x86_emulate.c @ 19848:5839491bbf20

[IA64] replace MAX_VCPUS with d->max_vcpus where necessary.

don't use MAX_VCPUS, and use vcpu::max_vcpus.
The changeset of 2f9e1348aa98 introduced max_vcpus to allow more vcpus
per guest. This patch is ia64 counter part.

Signed-off-by: Isaku Yamahata <yamahata@valinux.co.jp>
author Isaku Yamahata <yamahata@valinux.co.jp>
date Mon Jun 29 11:26:05 2009 +0900 (2009-06-29)
parents fe68405201d2
children
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 /* Operand sizes: 8-bit operands or specified/overridden size. */
25 #define ByteOp (1<<0) /* 8-bit operands. */
26 /* Destination operand type. */
27 #define DstNone (0<<1) /* No destination operand. */
28 #define DstImplicit (0<<1) /* Destination operand is implicit in the opcode. */
29 #define DstBitBase (1<<1) /* Memory operand, bit string. */
30 #define DstReg (2<<1) /* Register operand. */
31 #define DstEax DstReg /* Register EAX (aka DstReg with no ModRM) */
32 #define DstMem (3<<1) /* Memory operand. */
33 #define DstMask (3<<1)
34 /* Source operand type. */
35 #define SrcInvalid (0<<3) /* Unimplemented opcode. */
36 #define SrcNone (1<<3) /* No source operand. */
37 #define SrcImplicit (1<<3) /* Source operand is implicit in the opcode. */
38 #define SrcReg (2<<3) /* Register operand. */
39 #define SrcMem (3<<3) /* Memory operand. */
40 #define SrcMem16 (4<<3) /* Memory operand (16-bit). */
41 #define SrcImm (5<<3) /* Immediate operand. */
42 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
43 #define SrcMask (7<<3)
44 /* Generic ModRM decode. */
45 #define ModRM (1<<6)
46 /* Destination is only written; never read. */
47 #define Mov (1<<7)
48 /* All operands are implicit in the opcode. */
49 #define ImplicitOps (DstImplicit|SrcImplicit)
51 static uint8_t opcode_table[256] = {
52 /* 0x00 - 0x07 */
53 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
54 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
55 ByteOp|DstEax|SrcImm, DstEax|SrcImm, ImplicitOps, ImplicitOps,
56 /* 0x08 - 0x0F */
57 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
58 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
59 ByteOp|DstEax|SrcImm, DstEax|SrcImm, ImplicitOps, 0,
60 /* 0x10 - 0x17 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstEax|SrcImm, DstEax|SrcImm, ImplicitOps, ImplicitOps,
64 /* 0x18 - 0x1F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstEax|SrcImm, DstEax|SrcImm, ImplicitOps, ImplicitOps,
68 /* 0x20 - 0x27 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstEax|SrcImm, DstEax|SrcImm, 0, ImplicitOps,
72 /* 0x28 - 0x2F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstEax|SrcImm, DstEax|SrcImm, 0, ImplicitOps,
76 /* 0x30 - 0x37 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstEax|SrcImm, DstEax|SrcImm, 0, ImplicitOps,
80 /* 0x38 - 0x3F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstEax|SrcImm, DstEax|SrcImm, 0, ImplicitOps,
84 /* 0x40 - 0x4F */
85 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
86 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
87 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
88 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
89 /* 0x50 - 0x5F */
90 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
91 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
92 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
93 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
94 /* 0x60 - 0x67 */
95 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
96 0, 0, 0, 0,
97 /* 0x68 - 0x6F */
98 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
99 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 /* 0x70 - 0x77 */
102 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
103 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
104 /* 0x78 - 0x7F */
105 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
106 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
107 /* 0x80 - 0x87 */
108 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
109 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
110 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
111 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
112 /* 0x88 - 0x8F */
113 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
114 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
115 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
116 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
117 /* 0x90 - 0x97 */
118 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
119 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
120 /* 0x98 - 0x9F */
121 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
122 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
123 /* 0xA0 - 0xA7 */
124 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
125 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
126 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
127 ByteOp|ImplicitOps, ImplicitOps,
128 /* 0xA8 - 0xAF */
129 ByteOp|DstEax|SrcImm, DstEax|SrcImm,
130 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
131 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
132 ByteOp|ImplicitOps, ImplicitOps,
133 /* 0xB0 - 0xB7 */
134 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
135 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
136 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
137 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
138 /* 0xB8 - 0xBF */
139 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
140 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
141 /* 0xC0 - 0xC7 */
142 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
143 ImplicitOps, ImplicitOps,
144 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
145 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
146 /* 0xC8 - 0xCF */
147 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
148 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
149 /* 0xD0 - 0xD7 */
150 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
151 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
152 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
153 /* 0xD8 - 0xDF */
154 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
155 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
156 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
157 ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
158 /* 0xE0 - 0xE7 */
159 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
160 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
161 /* 0xE8 - 0xEF */
162 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
163 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
164 /* 0xF0 - 0xF7 */
165 0, ImplicitOps, 0, 0,
166 ImplicitOps, ImplicitOps,
167 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
168 /* 0xF8 - 0xFF */
169 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
170 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
171 };
173 static uint8_t twobyte_table[256] = {
174 /* 0x00 - 0x07 */
175 SrcMem16|ModRM, ImplicitOps|ModRM, 0, 0, 0, ImplicitOps, ImplicitOps, 0,
176 /* 0x08 - 0x0F */
177 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
178 /* 0x10 - 0x17 */
179 0, 0, 0, 0, 0, 0, 0, 0,
180 /* 0x18 - 0x1F */
181 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
182 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
183 /* 0x20 - 0x27 */
184 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
185 0, 0, 0, 0,
186 /* 0x28 - 0x2F */
187 0, 0, 0, 0, 0, 0, 0, 0,
188 /* 0x30 - 0x37 */
189 ImplicitOps, ImplicitOps, ImplicitOps, 0,
190 ImplicitOps, ImplicitOps, 0, 0,
191 /* 0x38 - 0x3F */
192 0, 0, 0, 0, 0, 0, 0, 0,
193 /* 0x40 - 0x47 */
194 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
195 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
196 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
197 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
198 /* 0x48 - 0x4F */
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
202 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
203 /* 0x50 - 0x5F */
204 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
205 /* 0x60 - 0x6F */
206 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
207 /* 0x70 - 0x7F */
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
209 /* 0x80 - 0x87 */
210 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
211 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
212 /* 0x88 - 0x8F */
213 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
214 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
215 /* 0x90 - 0x97 */
216 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
217 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
218 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
219 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
220 /* 0x98 - 0x9F */
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
224 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
225 /* 0xA0 - 0xA7 */
226 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
227 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
228 /* 0xA8 - 0xAF */
229 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
230 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
231 /* 0xB0 - 0xB7 */
232 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
233 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
234 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
235 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
236 /* 0xB8 - 0xBF */
237 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
238 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
239 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
240 /* 0xC0 - 0xC7 */
241 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
242 0, DstMem|SrcReg|ModRM|Mov,
243 0, 0, 0, ImplicitOps|ModRM,
244 /* 0xC8 - 0xCF */
245 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
246 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
247 /* 0xD0 - 0xDF */
248 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
249 /* 0xE0 - 0xEF */
250 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
251 /* 0xF0 - 0xFF */
252 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
253 };
255 /* Type, address-of, and value of an instruction's operand. */
256 struct operand {
257 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
258 unsigned int bytes;
260 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
261 union {
262 unsigned long val;
263 uint32_t bigval[4];
264 };
266 /* Up to 128-byte operand value, addressable as ulong or uint32_t[]. */
267 union {
268 unsigned long orig_val;
269 uint32_t orig_bigval[4];
270 };
272 union {
273 /* OP_REG: Pointer to register field. */
274 unsigned long *reg;
275 /* OP_MEM: Segment and offset. */
276 struct {
277 enum x86_segment seg;
278 unsigned long off;
279 } mem;
280 };
281 };
283 /* MSRs. */
284 #define MSR_TSC 0x00000010
285 #define MSR_SYSENTER_CS 0x00000174
286 #define MSR_SYSENTER_ESP 0x00000175
287 #define MSR_SYSENTER_EIP 0x00000176
288 #define MSR_EFER 0xc0000080
289 #define EFER_SCE (1u<<0)
290 #define EFER_LMA (1u<<10)
291 #define MSR_STAR 0xc0000081
292 #define MSR_LSTAR 0xc0000082
293 #define MSR_CSTAR 0xc0000083
294 #define MSR_FMASK 0xc0000084
296 /* Control register flags. */
297 #define CR0_PE (1<<0)
298 #define CR4_TSD (1<<2)
300 /* EFLAGS bit definitions. */
301 #define EFLG_VIP (1<<20)
302 #define EFLG_VIF (1<<19)
303 #define EFLG_AC (1<<18)
304 #define EFLG_VM (1<<17)
305 #define EFLG_RF (1<<16)
306 #define EFLG_NT (1<<14)
307 #define EFLG_IOPL (3<<12)
308 #define EFLG_OF (1<<11)
309 #define EFLG_DF (1<<10)
310 #define EFLG_IF (1<<9)
311 #define EFLG_TF (1<<8)
312 #define EFLG_SF (1<<7)
313 #define EFLG_ZF (1<<6)
314 #define EFLG_AF (1<<4)
315 #define EFLG_PF (1<<2)
316 #define EFLG_CF (1<<0)
318 /* Exception definitions. */
319 #define EXC_DE 0
320 #define EXC_DB 1
321 #define EXC_BP 3
322 #define EXC_OF 4
323 #define EXC_BR 5
324 #define EXC_UD 6
325 #define EXC_TS 10
326 #define EXC_NP 11
327 #define EXC_SS 12
328 #define EXC_GP 13
329 #define EXC_PF 14
330 #define EXC_MF 16
332 /*
333 * Instruction emulation:
334 * Most instructions are emulated directly via a fragment of inline assembly
335 * code. This allows us to save/restore EFLAGS and thus very easily pick up
336 * any modified flags.
337 */
339 #if defined(__x86_64__)
340 #define _LO32 "k" /* force 32-bit operand */
341 #define _STK "%%rsp" /* stack pointer */
342 #define _BYTES_PER_LONG "8"
343 #elif defined(__i386__)
344 #define _LO32 "" /* force 32-bit operand */
345 #define _STK "%%esp" /* stack pointer */
346 #define _BYTES_PER_LONG "4"
347 #endif
349 /*
350 * These EFLAGS bits are restored from saved value during emulation, and
351 * any changes are written back to the saved value after emulation.
352 */
353 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
355 /* Before executing instruction: restore necessary bits in EFLAGS. */
356 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
357 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
358 "movl %"_sav",%"_LO32 _tmp"; " \
359 "push %"_tmp"; " \
360 "push %"_tmp"; " \
361 "movl %"_msk",%"_LO32 _tmp"; " \
362 "andl %"_LO32 _tmp",("_STK"); " \
363 "pushf; " \
364 "notl %"_LO32 _tmp"; " \
365 "andl %"_LO32 _tmp",("_STK"); " \
366 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
367 "pop %"_tmp"; " \
368 "orl %"_LO32 _tmp",("_STK"); " \
369 "popf; " \
370 "pop %"_sav"; "
372 /* After executing instruction: write-back necessary bits in EFLAGS. */
373 #define _POST_EFLAGS(_sav, _msk, _tmp) \
374 /* _sav |= EFLAGS & _msk; */ \
375 "pushf; " \
376 "pop %"_tmp"; " \
377 "andl %"_msk",%"_LO32 _tmp"; " \
378 "orl %"_LO32 _tmp",%"_sav"; "
380 /* Raw emulation: instruction has two explicit operands. */
381 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
382 do{ unsigned long _tmp; \
383 switch ( (_dst).bytes ) \
384 { \
385 case 2: \
386 asm volatile ( \
387 _PRE_EFLAGS("0","4","2") \
388 _op"w %"_wx"3,%1; " \
389 _POST_EFLAGS("0","4","2") \
390 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
391 : _wy ((_src).val), "i" (EFLAGS_MASK), \
392 "m" (_eflags), "m" ((_dst).val) ); \
393 break; \
394 case 4: \
395 asm volatile ( \
396 _PRE_EFLAGS("0","4","2") \
397 _op"l %"_lx"3,%1; " \
398 _POST_EFLAGS("0","4","2") \
399 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
400 : _ly ((_src).val), "i" (EFLAGS_MASK), \
401 "m" (_eflags), "m" ((_dst).val) ); \
402 break; \
403 case 8: \
404 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
405 break; \
406 } \
407 } while (0)
408 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
409 do{ unsigned long _tmp; \
410 switch ( (_dst).bytes ) \
411 { \
412 case 1: \
413 asm volatile ( \
414 _PRE_EFLAGS("0","4","2") \
415 _op"b %"_bx"3,%1; " \
416 _POST_EFLAGS("0","4","2") \
417 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
418 : _by ((_src).val), "i" (EFLAGS_MASK), \
419 "m" (_eflags), "m" ((_dst).val) ); \
420 break; \
421 default: \
422 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
423 break; \
424 } \
425 } while (0)
426 /* Source operand is byte-sized and may be restricted to just %cl. */
427 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
428 __emulate_2op(_op, _src, _dst, _eflags, \
429 "b", "c", "b", "c", "b", "c", "b", "c")
430 /* Source operand is byte, word, long or quad sized. */
431 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
432 __emulate_2op(_op, _src, _dst, _eflags, \
433 "b", "q", "w", "r", _LO32, "r", "", "r")
434 /* Source operand is word, long or quad sized. */
435 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
436 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
437 "w", "r", _LO32, "r", "", "r")
439 /* Instruction has only one explicit operand (no source operand). */
440 #define emulate_1op(_op,_dst,_eflags) \
441 do{ unsigned long _tmp; \
442 switch ( (_dst).bytes ) \
443 { \
444 case 1: \
445 asm volatile ( \
446 _PRE_EFLAGS("0","3","2") \
447 _op"b %1; " \
448 _POST_EFLAGS("0","3","2") \
449 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
450 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
451 break; \
452 case 2: \
453 asm volatile ( \
454 _PRE_EFLAGS("0","3","2") \
455 _op"w %1; " \
456 _POST_EFLAGS("0","3","2") \
457 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
458 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
459 break; \
460 case 4: \
461 asm volatile ( \
462 _PRE_EFLAGS("0","3","2") \
463 _op"l %1; " \
464 _POST_EFLAGS("0","3","2") \
465 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
466 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
467 break; \
468 case 8: \
469 __emulate_1op_8byte(_op, _dst, _eflags); \
470 break; \
471 } \
472 } while (0)
474 /* Emulate an instruction with quadword operands (x86/64 only). */
475 #if defined(__x86_64__)
476 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
477 do{ asm volatile ( \
478 _PRE_EFLAGS("0","4","2") \
479 _op"q %"_qx"3,%1; " \
480 _POST_EFLAGS("0","4","2") \
481 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
482 : _qy ((_src).val), "i" (EFLAGS_MASK), \
483 "m" (_eflags), "m" ((_dst).val) ); \
484 } while (0)
485 #define __emulate_1op_8byte(_op, _dst, _eflags) \
486 do{ asm volatile ( \
487 _PRE_EFLAGS("0","3","2") \
488 _op"q %1; " \
489 _POST_EFLAGS("0","3","2") \
490 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
491 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
492 } while (0)
493 #elif defined(__i386__)
494 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
495 #define __emulate_1op_8byte(_op, _dst, _eflags)
496 #endif /* __i386__ */
498 /* Fetch next part of the instruction being emulated. */
499 #define insn_fetch_bytes(_size) \
500 ({ unsigned long _x = 0, _eip = _regs.eip; \
501 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
502 _regs.eip += (_size); /* real hardware doesn't truncate */ \
503 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
504 EXC_GP, 0); \
505 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
506 if ( rc ) goto done; \
507 _x; \
508 })
509 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
511 #define truncate_word(ea, byte_width) \
512 ({ unsigned long __ea = (ea); \
513 unsigned int _width = (byte_width); \
514 ((_width == sizeof(unsigned long)) ? __ea : \
515 (__ea & ((1UL << (_width << 3)) - 1))); \
516 })
517 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
519 #define mode_64bit() (def_ad_bytes == 8)
521 #define fail_if(p) \
522 do { \
523 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
524 if ( rc ) goto done; \
525 } while (0)
527 #define generate_exception_if(p, e, ec) \
528 ({ if ( (p) ) { \
529 fail_if(ops->inject_hw_exception == NULL); \
530 rc = ops->inject_hw_exception(e, ec, ctxt) ? : X86EMUL_EXCEPTION; \
531 goto done; \
532 } \
533 })
535 /*
536 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
537 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
538 */
539 static int even_parity(uint8_t v)
540 {
541 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
542 return v;
543 }
545 /* Update address held in a register, based on addressing mode. */
546 #define _register_address_increment(reg, inc, byte_width) \
547 do { \
548 int _inc = (inc); /* signed type ensures sign extension to long */ \
549 unsigned int _width = (byte_width); \
550 if ( _width == sizeof(unsigned long) ) \
551 (reg) += _inc; \
552 else if ( mode_64bit() ) \
553 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
554 else \
555 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
556 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
557 } while (0)
558 #define register_address_increment(reg, inc) \
559 _register_address_increment((reg), (inc), ad_bytes)
561 #define sp_pre_dec(dec) ({ \
562 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
563 truncate_word(_regs.esp, ctxt->sp_size/8); \
564 })
565 #define sp_post_inc(inc) ({ \
566 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
567 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
568 __esp; \
569 })
571 #define jmp_rel(rel) \
572 do { \
573 int _rel = (int)(rel); \
574 _regs.eip += _rel; \
575 if ( !mode_64bit() ) \
576 _regs.eip = ((op_bytes == 2) \
577 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
578 } while (0)
580 struct fpu_insn_ctxt {
581 uint8_t insn_bytes;
582 uint8_t exn_raised;
583 };
585 static void fpu_handle_exception(void *_fic, struct cpu_user_regs *regs)
586 {
587 struct fpu_insn_ctxt *fic = _fic;
588 fic->exn_raised = 1;
589 regs->eip += fic->insn_bytes;
590 }
592 #define get_fpu(_type, _fic) \
593 do{ (_fic)->exn_raised = 0; \
594 fail_if(ops->get_fpu == NULL); \
595 rc = ops->get_fpu(fpu_handle_exception, _fic, _type, ctxt); \
596 if ( rc ) goto done; \
597 } while (0)
598 #define put_fpu(_fic) \
599 do{ \
600 if ( ops->put_fpu != NULL ) \
601 ops->put_fpu(ctxt); \
602 generate_exception_if((_fic)->exn_raised, EXC_MF, -1); \
603 } while (0)
605 #define emulate_fpu_insn(_op) \
606 do{ struct fpu_insn_ctxt fic; \
607 get_fpu(X86EMUL_FPU_fpu, &fic); \
608 asm volatile ( \
609 "movb $2f-1f,%0 \n" \
610 "1: " _op " \n" \
611 "2: \n" \
612 : "=m" (fic.insn_bytes) : : "memory" ); \
613 put_fpu(&fic); \
614 } while (0)
616 #define emulate_fpu_insn_memdst(_op, _arg) \
617 do{ struct fpu_insn_ctxt fic; \
618 get_fpu(X86EMUL_FPU_fpu, &fic); \
619 asm volatile ( \
620 "movb $2f-1f,%0 \n" \
621 "1: " _op " %1 \n" \
622 "2: \n" \
623 : "=m" (fic.insn_bytes), "=m" (_arg) \
624 : : "memory" ); \
625 put_fpu(&fic); \
626 } while (0)
628 #define emulate_fpu_insn_memsrc(_op, _arg) \
629 do{ struct fpu_insn_ctxt fic; \
630 get_fpu(X86EMUL_FPU_fpu, &fic); \
631 asm volatile ( \
632 "movb $2f-1f,%0 \n" \
633 "1: " _op " %1 \n" \
634 "2: \n" \
635 : "=m" (fic.insn_bytes) \
636 : "m" (_arg) : "memory" ); \
637 put_fpu(&fic); \
638 } while (0)
640 #define emulate_fpu_insn_stub(_bytes...) \
641 do{ uint8_t stub[] = { _bytes, 0xc3 }; \
642 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 }; \
643 get_fpu(X86EMUL_FPU_fpu, &fic); \
644 (*(void(*)(void))stub)(); \
645 put_fpu(&fic); \
646 } while (0)
648 static unsigned long __get_rep_prefix(
649 struct cpu_user_regs *int_regs,
650 struct cpu_user_regs *ext_regs,
651 int ad_bytes)
652 {
653 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
654 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
655 int_regs->ecx);
657 /* Skip the instruction if no repetitions are required. */
658 if ( ecx == 0 )
659 ext_regs->eip = int_regs->eip;
661 return ecx;
662 }
664 #define get_rep_prefix() ({ \
665 unsigned long max_reps = 1; \
666 if ( rep_prefix ) \
667 max_reps = __get_rep_prefix(&_regs, ctxt->regs, ad_bytes); \
668 if ( max_reps == 0 ) \
669 goto done; \
670 max_reps; \
671 })
673 static void __put_rep_prefix(
674 struct cpu_user_regs *int_regs,
675 struct cpu_user_regs *ext_regs,
676 int ad_bytes,
677 unsigned long reps_completed)
678 {
679 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
680 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
681 int_regs->ecx);
683 /* Reduce counter appropriately, and repeat instruction if non-zero. */
684 ecx -= reps_completed;
685 if ( ecx != 0 )
686 int_regs->eip = ext_regs->eip;
688 if ( ad_bytes == 2 )
689 *(uint16_t *)&int_regs->ecx = ecx;
690 else if ( ad_bytes == 4 )
691 int_regs->ecx = (uint32_t)ecx;
692 else
693 int_regs->ecx = ecx;
694 }
696 #define put_rep_prefix(reps_completed) ({ \
697 if ( rep_prefix ) \
698 __put_rep_prefix(&_regs, ctxt->regs, ad_bytes, reps_completed); \
699 })
701 /* Clip maximum repetitions so that the index register only just wraps. */
702 #define truncate_ea_and_reps(ea, reps, bytes_per_rep) ({ \
703 unsigned long __todo = (ctxt->regs->eflags & EFLG_DF) ? (ea) : ~(ea); \
704 __todo = truncate_word(__todo, ad_bytes); \
705 __todo = (__todo / (bytes_per_rep)) + 1; \
706 (reps) = (__todo < (reps)) ? __todo : (reps); \
707 truncate_word((ea), ad_bytes); \
708 })
710 /* Compatibility function: read guest memory, zero-extend result to a ulong. */
711 static int read_ulong(
712 enum x86_segment seg,
713 unsigned long offset,
714 unsigned long *val,
715 unsigned int bytes,
716 struct x86_emulate_ctxt *ctxt,
717 struct x86_emulate_ops *ops)
718 {
719 *val = 0;
720 return ops->read(seg, offset, val, bytes, ctxt);
721 }
723 /*
724 * Unsigned multiplication with double-word result.
725 * IN: Multiplicand=m[0], Multiplier=m[1]
726 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
727 */
728 static int mul_dbl(unsigned long m[2])
729 {
730 int rc;
731 asm ( "mul %4; seto %b2"
732 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
733 : "0" (m[0]), "1" (m[1]), "2" (0) );
734 return rc;
735 }
737 /*
738 * Signed multiplication with double-word result.
739 * IN: Multiplicand=m[0], Multiplier=m[1]
740 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
741 */
742 static int imul_dbl(unsigned long m[2])
743 {
744 int rc;
745 asm ( "imul %4; seto %b2"
746 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
747 : "0" (m[0]), "1" (m[1]), "2" (0) );
748 return rc;
749 }
751 /*
752 * Unsigned division of double-word dividend.
753 * IN: Dividend=u[1]:u[0], Divisor=v
754 * OUT: Return 1: #DE
755 * Return 0: Quotient=u[0], Remainder=u[1]
756 */
757 static int div_dbl(unsigned long u[2], unsigned long v)
758 {
759 if ( (v == 0) || (u[1] >= v) )
760 return 1;
761 asm ( "div %4"
762 : "=a" (u[0]), "=d" (u[1])
763 : "0" (u[0]), "1" (u[1]), "r" (v) );
764 return 0;
765 }
767 /*
768 * Signed division of double-word dividend.
769 * IN: Dividend=u[1]:u[0], Divisor=v
770 * OUT: Return 1: #DE
771 * Return 0: Quotient=u[0], Remainder=u[1]
772 * NB. We don't use idiv directly as it's moderately hard to work out
773 * ahead of time whether it will #DE, which we cannot allow to happen.
774 */
775 static int idiv_dbl(unsigned long u[2], unsigned long v)
776 {
777 int negu = (long)u[1] < 0, negv = (long)v < 0;
779 /* u = abs(u) */
780 if ( negu )
781 {
782 u[1] = ~u[1];
783 if ( (u[0] = -u[0]) == 0 )
784 u[1]++;
785 }
787 /* abs(u) / abs(v) */
788 if ( div_dbl(u, negv ? -v : v) )
789 return 1;
791 /* Remainder has same sign as dividend. It cannot overflow. */
792 if ( negu )
793 u[1] = -u[1];
795 /* Quotient is overflowed if sign bit is set. */
796 if ( negu ^ negv )
797 {
798 if ( (long)u[0] >= 0 )
799 u[0] = -u[0];
800 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
801 return 1;
802 }
803 else if ( (long)u[0] < 0 )
804 return 1;
806 return 0;
807 }
809 static int
810 test_cc(
811 unsigned int condition, unsigned int flags)
812 {
813 int rc = 0;
815 switch ( (condition & 15) >> 1 )
816 {
817 case 0: /* o */
818 rc |= (flags & EFLG_OF);
819 break;
820 case 1: /* b/c/nae */
821 rc |= (flags & EFLG_CF);
822 break;
823 case 2: /* z/e */
824 rc |= (flags & EFLG_ZF);
825 break;
826 case 3: /* be/na */
827 rc |= (flags & (EFLG_CF|EFLG_ZF));
828 break;
829 case 4: /* s */
830 rc |= (flags & EFLG_SF);
831 break;
832 case 5: /* p/pe */
833 rc |= (flags & EFLG_PF);
834 break;
835 case 7: /* le/ng */
836 rc |= (flags & EFLG_ZF);
837 /* fall through */
838 case 6: /* l/nge */
839 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
840 break;
841 }
843 /* Odd condition identifiers (lsb == 1) have inverted sense. */
844 return (!!rc ^ (condition & 1));
845 }
847 static int
848 get_cpl(
849 struct x86_emulate_ctxt *ctxt,
850 struct x86_emulate_ops *ops)
851 {
852 struct segment_register reg;
854 if ( ctxt->regs->eflags & EFLG_VM )
855 return 3;
857 if ( (ops->read_segment == NULL) ||
858 ops->read_segment(x86_seg_ss, &reg, ctxt) )
859 return -1;
861 return reg.attr.fields.dpl;
862 }
864 static int
865 _mode_iopl(
866 struct x86_emulate_ctxt *ctxt,
867 struct x86_emulate_ops *ops)
868 {
869 int cpl = get_cpl(ctxt, ops);
870 if ( cpl == -1 )
871 return -1;
872 return (cpl <= ((ctxt->regs->eflags >> 12) & 3));
873 }
875 #define mode_ring0() ({ \
876 int _cpl = get_cpl(ctxt, ops); \
877 fail_if(_cpl < 0); \
878 (_cpl == 0); \
879 })
880 #define mode_iopl() ({ \
881 int _iopl = _mode_iopl(ctxt, ops); \
882 fail_if(_iopl < 0); \
883 _iopl; \
884 })
886 static int ioport_access_check(
887 unsigned int first_port,
888 unsigned int bytes,
889 struct x86_emulate_ctxt *ctxt,
890 struct x86_emulate_ops *ops)
891 {
892 unsigned long iobmp;
893 struct segment_register tr;
894 int rc = X86EMUL_OKAY;
896 if ( !(ctxt->regs->eflags & EFLG_VM) && mode_iopl() )
897 return X86EMUL_OKAY;
899 fail_if(ops->read_segment == NULL);
900 if ( (rc = ops->read_segment(x86_seg_tr, &tr, ctxt)) != 0 )
901 return rc;
903 /* Ensure that the TSS is valid and has an io-bitmap-offset field. */
904 if ( !tr.attr.fields.p ||
905 ((tr.attr.fields.type & 0xd) != 0x9) ||
906 (tr.limit < 0x67) )
907 goto raise_exception;
909 if ( (rc = read_ulong(x86_seg_none, tr.base + 0x66,
910 &iobmp, 2, ctxt, ops)) )
911 return rc;
913 /* Ensure TSS includes two bytes including byte containing first port. */
914 iobmp += first_port / 8;
915 if ( tr.limit <= iobmp )
916 goto raise_exception;
918 if ( (rc = read_ulong(x86_seg_none, tr.base + iobmp,
919 &iobmp, 2, ctxt, ops)) )
920 return rc;
921 if ( (iobmp & (((1<<bytes)-1) << (first_port&7))) != 0 )
922 goto raise_exception;
924 done:
925 return rc;
927 raise_exception:
928 fail_if(ops->inject_hw_exception == NULL);
929 return ops->inject_hw_exception(EXC_GP, 0, ctxt) ? : X86EMUL_EXCEPTION;
930 }
932 static int
933 in_realmode(
934 struct x86_emulate_ctxt *ctxt,
935 struct x86_emulate_ops *ops)
936 {
937 unsigned long cr0;
938 int rc;
940 if ( ops->read_cr == NULL )
941 return 0;
943 rc = ops->read_cr(0, &cr0, ctxt);
944 return (!rc && !(cr0 & CR0_PE));
945 }
947 static int
948 in_protmode(
949 struct x86_emulate_ctxt *ctxt,
950 struct x86_emulate_ops *ops)
951 {
952 return !(in_realmode(ctxt, ops) || (ctxt->regs->eflags & EFLG_VM));
953 }
955 static int
956 in_longmode(
957 struct x86_emulate_ctxt *ctxt,
958 struct x86_emulate_ops *ops)
959 {
960 uint64_t efer;
962 if (ops->read_msr == NULL)
963 return -1;
965 ops->read_msr(MSR_EFER, &efer, ctxt);
966 return !!(efer & EFER_LMA);
967 }
969 static int
970 realmode_load_seg(
971 enum x86_segment seg,
972 uint16_t sel,
973 struct x86_emulate_ctxt *ctxt,
974 struct x86_emulate_ops *ops)
975 {
976 struct segment_register reg;
977 int rc;
979 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
980 return rc;
982 reg.sel = sel;
983 reg.base = (uint32_t)sel << 4;
985 return ops->write_segment(seg, &reg, ctxt);
986 }
988 static int
989 protmode_load_seg(
990 enum x86_segment seg,
991 uint16_t sel,
992 struct x86_emulate_ctxt *ctxt,
993 struct x86_emulate_ops *ops)
994 {
995 struct segment_register desctab, ss, segr;
996 struct { uint32_t a, b; } desc;
997 unsigned long val;
998 uint8_t dpl, rpl, cpl;
999 uint32_t new_desc_b, a_flag = 0x100;
1000 int rc, fault_type = EXC_GP;
1002 /* NULL selector? */
1003 if ( (sel & 0xfffc) == 0 )
1005 if ( (seg == x86_seg_cs) || (seg == x86_seg_ss) )
1006 goto raise_exn;
1007 memset(&segr, 0, sizeof(segr));
1008 return ops->write_segment(seg, &segr, ctxt);
1011 /* System segment descriptors must reside in the GDT. */
1012 if ( !is_x86_user_segment(seg) && (sel & 4) )
1013 goto raise_exn;
1015 if ( (rc = ops->read_segment(x86_seg_ss, &ss, ctxt)) ||
1016 (rc = ops->read_segment((sel & 4) ? x86_seg_ldtr : x86_seg_gdtr,
1017 &desctab, ctxt)) )
1018 return rc;
1020 /* Check against descriptor table limit. */
1021 if ( ((sel & 0xfff8) + 7) > desctab.limit )
1022 goto raise_exn;
1024 do {
1025 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8),
1026 &val, 4, ctxt, ops)) )
1027 return rc;
1028 desc.a = val;
1029 if ( (rc = read_ulong(x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1030 &val, 4, ctxt, ops)) )
1031 return rc;
1032 desc.b = val;
1034 /* Segment present in memory? */
1035 if ( !(desc.b & (1u<<15)) )
1037 fault_type = EXC_NP;
1038 goto raise_exn;
1041 /* System segments must have the system flag (S) set. */
1042 if ( (desc.b & (1u<<12)) == (!is_x86_user_segment(seg) << 12) )
1043 goto raise_exn;
1045 dpl = (desc.b >> 13) & 3;
1046 rpl = sel & 3;
1047 cpl = ss.attr.fields.dpl;
1049 switch ( seg )
1051 case x86_seg_cs:
1052 /* Code segment? */
1053 if ( !(desc.b & (1u<<11)) )
1054 goto raise_exn;
1055 /* Non-conforming segment: check DPL against RPL. */
1056 if ( ((desc.b & (6u<<9)) != (6u<<9)) && (dpl != rpl) )
1057 goto raise_exn;
1058 break;
1059 case x86_seg_ss:
1060 /* Writable data segment? */
1061 if ( (desc.b & (5u<<9)) != (1u<<9) )
1062 goto raise_exn;
1063 if ( (dpl != cpl) || (dpl != rpl) )
1064 goto raise_exn;
1065 break;
1066 case x86_seg_ldtr:
1067 /* LDT system segment? */
1068 if ( (desc.b & (15u<<8)) != (2u<<8) )
1069 goto raise_exn;
1070 goto skip_accessed_flag;
1071 case x86_seg_tr:
1072 /* Available TSS system segment? */
1073 if ( (desc.b & (15u<<8)) != (9u<<8) )
1074 goto raise_exn;
1075 a_flag = 0x200; /* busy flag */
1076 break;
1077 default:
1078 /* Readable code or data segment? */
1079 if ( (desc.b & (5u<<9)) == (4u<<9) )
1080 goto raise_exn;
1081 /* Non-conforming segment: check DPL against RPL and CPL. */
1082 if ( ((desc.b & (6u<<9)) != (6u<<9)) &&
1083 ((dpl < cpl) || (dpl < rpl)) )
1084 goto raise_exn;
1085 break;
1088 /* Ensure Accessed flag is set. */
1089 new_desc_b = desc.b | a_flag;
1090 rc = ((desc.b & a_flag) ? X86EMUL_OKAY :
1091 ops->cmpxchg(
1092 x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1093 &desc.b, &new_desc_b, 4, ctxt));
1094 } while ( rc == X86EMUL_CMPXCHG_FAILED );
1096 if ( rc )
1097 return rc;
1099 /* Force the Accessed flag in our local copy. */
1100 desc.b |= a_flag;
1102 skip_accessed_flag:
1103 segr.base = (((desc.b << 0) & 0xff000000u) |
1104 ((desc.b << 16) & 0x00ff0000u) |
1105 ((desc.a >> 16) & 0x0000ffffu));
1106 segr.attr.bytes = (((desc.b >> 8) & 0x00ffu) |
1107 ((desc.b >> 12) & 0x0f00u));
1108 segr.limit = (desc.b & 0x000f0000u) | (desc.a & 0x0000ffffu);
1109 if ( segr.attr.fields.g )
1110 segr.limit = (segr.limit << 12) | 0xfffu;
1111 segr.sel = sel;
1112 return ops->write_segment(seg, &segr, ctxt);
1114 raise_exn:
1115 if ( ops->inject_hw_exception == NULL )
1116 return X86EMUL_UNHANDLEABLE;
1117 if ( (rc = ops->inject_hw_exception(fault_type, sel & 0xfffc, ctxt)) )
1118 return rc;
1119 return X86EMUL_EXCEPTION;
1122 static int
1123 load_seg(
1124 enum x86_segment seg,
1125 uint16_t sel,
1126 struct x86_emulate_ctxt *ctxt,
1127 struct x86_emulate_ops *ops)
1129 if ( (ops->read_segment == NULL) ||
1130 (ops->write_segment == NULL) )
1131 return X86EMUL_UNHANDLEABLE;
1133 if ( in_protmode(ctxt, ops) )
1134 return protmode_load_seg(seg, sel, ctxt, ops);
1136 return realmode_load_seg(seg, sel, ctxt, ops);
1139 void *
1140 decode_register(
1141 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
1143 void *p;
1145 switch ( modrm_reg )
1147 case 0: p = &regs->eax; break;
1148 case 1: p = &regs->ecx; break;
1149 case 2: p = &regs->edx; break;
1150 case 3: p = &regs->ebx; break;
1151 case 4: p = (highbyte_regs ?
1152 ((unsigned char *)&regs->eax + 1) :
1153 (unsigned char *)&regs->esp); break;
1154 case 5: p = (highbyte_regs ?
1155 ((unsigned char *)&regs->ecx + 1) :
1156 (unsigned char *)&regs->ebp); break;
1157 case 6: p = (highbyte_regs ?
1158 ((unsigned char *)&regs->edx + 1) :
1159 (unsigned char *)&regs->esi); break;
1160 case 7: p = (highbyte_regs ?
1161 ((unsigned char *)&regs->ebx + 1) :
1162 (unsigned char *)&regs->edi); break;
1163 #if defined(__x86_64__)
1164 case 8: p = &regs->r8; break;
1165 case 9: p = &regs->r9; break;
1166 case 10: p = &regs->r10; break;
1167 case 11: p = &regs->r11; break;
1168 case 12: p = &regs->r12; break;
1169 case 13: p = &regs->r13; break;
1170 case 14: p = &regs->r14; break;
1171 case 15: p = &regs->r15; break;
1172 #endif
1173 default: p = NULL; break;
1176 return p;
1179 #define decode_segment_failed x86_seg_tr
1180 enum x86_segment
1181 decode_segment(
1182 uint8_t modrm_reg)
1184 switch ( modrm_reg )
1186 case 0: return x86_seg_es;
1187 case 1: return x86_seg_cs;
1188 case 2: return x86_seg_ss;
1189 case 3: return x86_seg_ds;
1190 case 4: return x86_seg_fs;
1191 case 5: return x86_seg_gs;
1192 default: break;
1194 return decode_segment_failed;
1197 int
1198 x86_emulate(
1199 struct x86_emulate_ctxt *ctxt,
1200 struct x86_emulate_ops *ops)
1202 /* Shadow copy of register state. Committed on successful emulation. */
1203 struct cpu_user_regs _regs = *ctxt->regs;
1205 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
1206 uint8_t modrm = 0, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
1207 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
1208 #define REPE_PREFIX 1
1209 #define REPNE_PREFIX 2
1210 unsigned int lock_prefix = 0, rep_prefix = 0;
1211 int override_seg = -1, rc = X86EMUL_OKAY;
1212 struct operand src, dst;
1214 /*
1215 * Data operand effective address (usually computed from ModRM).
1216 * Default is a memory operand relative to segment DS.
1217 */
1218 struct operand ea = { .type = OP_MEM };
1219 ea.mem.seg = x86_seg_ds; /* gcc may reject anon union initializer */
1221 ctxt->retire.byte = 0;
1223 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
1224 if ( op_bytes == 8 )
1226 op_bytes = def_op_bytes = 4;
1227 #ifndef __x86_64__
1228 return X86EMUL_UNHANDLEABLE;
1229 #endif
1232 /* Prefix bytes. */
1233 for ( ; ; )
1235 switch ( b = insn_fetch_type(uint8_t) )
1237 case 0x66: /* operand-size override */
1238 op_bytes = def_op_bytes ^ 6;
1239 break;
1240 case 0x67: /* address-size override */
1241 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
1242 break;
1243 case 0x2e: /* CS override */
1244 override_seg = x86_seg_cs;
1245 break;
1246 case 0x3e: /* DS override */
1247 override_seg = x86_seg_ds;
1248 break;
1249 case 0x26: /* ES override */
1250 override_seg = x86_seg_es;
1251 break;
1252 case 0x64: /* FS override */
1253 override_seg = x86_seg_fs;
1254 break;
1255 case 0x65: /* GS override */
1256 override_seg = x86_seg_gs;
1257 break;
1258 case 0x36: /* SS override */
1259 override_seg = x86_seg_ss;
1260 break;
1261 case 0xf0: /* LOCK */
1262 lock_prefix = 1;
1263 break;
1264 case 0xf2: /* REPNE/REPNZ */
1265 rep_prefix = REPNE_PREFIX;
1266 break;
1267 case 0xf3: /* REP/REPE/REPZ */
1268 rep_prefix = REPE_PREFIX;
1269 break;
1270 case 0x40 ... 0x4f: /* REX */
1271 if ( !mode_64bit() )
1272 goto done_prefixes;
1273 rex_prefix = b;
1274 continue;
1275 default:
1276 goto done_prefixes;
1279 /* Any legacy prefix after a REX prefix nullifies its effect. */
1280 rex_prefix = 0;
1282 done_prefixes:
1284 if ( rex_prefix & 8 ) /* REX.W */
1285 op_bytes = 8;
1287 /* Opcode byte(s). */
1288 d = opcode_table[b];
1289 if ( d == 0 )
1291 /* Two-byte opcode? */
1292 if ( b == 0x0f )
1294 twobyte = 1;
1295 b = insn_fetch_type(uint8_t);
1296 d = twobyte_table[b];
1299 /* Unrecognised? */
1300 if ( d == 0 )
1301 goto cannot_emulate;
1304 /* Lock prefix is allowed only on RMW instructions. */
1305 generate_exception_if((d & Mov) && lock_prefix, EXC_GP, 0);
1307 /* ModRM and SIB bytes. */
1308 if ( d & ModRM )
1310 modrm = insn_fetch_type(uint8_t);
1311 modrm_mod = (modrm & 0xc0) >> 6;
1312 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
1313 modrm_rm = modrm & 0x07;
1315 if ( modrm_mod == 3 )
1317 modrm_rm |= (rex_prefix & 1) << 3;
1318 ea.type = OP_REG;
1319 ea.reg = decode_register(
1320 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
1322 else if ( ad_bytes == 2 )
1324 /* 16-bit ModR/M decode. */
1325 switch ( modrm_rm )
1327 case 0:
1328 ea.mem.off = _regs.ebx + _regs.esi;
1329 break;
1330 case 1:
1331 ea.mem.off = _regs.ebx + _regs.edi;
1332 break;
1333 case 2:
1334 ea.mem.seg = x86_seg_ss;
1335 ea.mem.off = _regs.ebp + _regs.esi;
1336 break;
1337 case 3:
1338 ea.mem.seg = x86_seg_ss;
1339 ea.mem.off = _regs.ebp + _regs.edi;
1340 break;
1341 case 4:
1342 ea.mem.off = _regs.esi;
1343 break;
1344 case 5:
1345 ea.mem.off = _regs.edi;
1346 break;
1347 case 6:
1348 if ( modrm_mod == 0 )
1349 break;
1350 ea.mem.seg = x86_seg_ss;
1351 ea.mem.off = _regs.ebp;
1352 break;
1353 case 7:
1354 ea.mem.off = _regs.ebx;
1355 break;
1357 switch ( modrm_mod )
1359 case 0:
1360 if ( modrm_rm == 6 )
1361 ea.mem.off = insn_fetch_type(int16_t);
1362 break;
1363 case 1:
1364 ea.mem.off += insn_fetch_type(int8_t);
1365 break;
1366 case 2:
1367 ea.mem.off += insn_fetch_type(int16_t);
1368 break;
1370 ea.mem.off = truncate_ea(ea.mem.off);
1372 else
1374 /* 32/64-bit ModR/M decode. */
1375 if ( modrm_rm == 4 )
1377 sib = insn_fetch_type(uint8_t);
1378 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1379 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1380 if ( sib_index != 4 )
1381 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1382 ea.mem.off <<= (sib >> 6) & 3;
1383 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1384 ea.mem.off += insn_fetch_type(int32_t);
1385 else if ( sib_base == 4 )
1387 ea.mem.seg = x86_seg_ss;
1388 ea.mem.off += _regs.esp;
1389 if ( !twobyte && (b == 0x8f) )
1390 /* POP <rm> computes its EA post increment. */
1391 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1392 ? 8 : op_bytes);
1394 else if ( sib_base == 5 )
1396 ea.mem.seg = x86_seg_ss;
1397 ea.mem.off += _regs.ebp;
1399 else
1400 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1402 else
1404 modrm_rm |= (rex_prefix & 1) << 3;
1405 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1406 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1407 ea.mem.seg = x86_seg_ss;
1409 switch ( modrm_mod )
1411 case 0:
1412 if ( (modrm_rm & 7) != 5 )
1413 break;
1414 ea.mem.off = insn_fetch_type(int32_t);
1415 if ( !mode_64bit() )
1416 break;
1417 /* Relative to RIP of next instruction. Argh! */
1418 ea.mem.off += _regs.eip;
1419 if ( (d & SrcMask) == SrcImm )
1420 ea.mem.off += (d & ByteOp) ? 1 :
1421 ((op_bytes == 8) ? 4 : op_bytes);
1422 else if ( (d & SrcMask) == SrcImmByte )
1423 ea.mem.off += 1;
1424 else if ( !twobyte && ((b & 0xfe) == 0xf6) &&
1425 ((modrm_reg & 7) <= 1) )
1426 /* Special case in Grp3: test has immediate operand. */
1427 ea.mem.off += (d & ByteOp) ? 1
1428 : ((op_bytes == 8) ? 4 : op_bytes);
1429 else if ( twobyte && ((b & 0xf7) == 0xa4) )
1430 /* SHLD/SHRD with immediate byte third operand. */
1431 ea.mem.off++;
1432 break;
1433 case 1:
1434 ea.mem.off += insn_fetch_type(int8_t);
1435 break;
1436 case 2:
1437 ea.mem.off += insn_fetch_type(int32_t);
1438 break;
1440 ea.mem.off = truncate_ea(ea.mem.off);
1444 if ( override_seg != -1 )
1445 ea.mem.seg = override_seg;
1447 /* Decode and fetch the source operand: register, memory or immediate. */
1448 switch ( d & SrcMask )
1450 case SrcNone: /* case SrcImplicit: */
1451 src.type = OP_NONE;
1452 break;
1453 case SrcReg:
1454 src.type = OP_REG;
1455 if ( d & ByteOp )
1457 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1458 src.val = *(uint8_t *)src.reg;
1459 src.bytes = 1;
1461 else
1463 src.reg = decode_register(modrm_reg, &_regs, 0);
1464 switch ( (src.bytes = op_bytes) )
1466 case 2: src.val = *(uint16_t *)src.reg; break;
1467 case 4: src.val = *(uint32_t *)src.reg; break;
1468 case 8: src.val = *(uint64_t *)src.reg; break;
1471 break;
1472 case SrcMem16:
1473 ea.bytes = 2;
1474 goto srcmem_common;
1475 case SrcMem:
1476 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1477 srcmem_common:
1478 src = ea;
1479 if ( src.type == OP_REG )
1481 switch ( src.bytes )
1483 case 1: src.val = *(uint8_t *)src.reg; break;
1484 case 2: src.val = *(uint16_t *)src.reg; break;
1485 case 4: src.val = *(uint32_t *)src.reg; break;
1486 case 8: src.val = *(uint64_t *)src.reg; break;
1489 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1490 &src.val, src.bytes, ctxt, ops)) )
1491 goto done;
1492 break;
1493 case SrcImm:
1494 src.type = OP_IMM;
1495 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1496 if ( src.bytes == 8 ) src.bytes = 4;
1497 /* NB. Immediates are sign-extended as necessary. */
1498 switch ( src.bytes )
1500 case 1: src.val = insn_fetch_type(int8_t); break;
1501 case 2: src.val = insn_fetch_type(int16_t); break;
1502 case 4: src.val = insn_fetch_type(int32_t); break;
1504 break;
1505 case SrcImmByte:
1506 src.type = OP_IMM;
1507 src.bytes = 1;
1508 src.val = insn_fetch_type(int8_t);
1509 break;
1512 /* Decode and fetch the destination operand: register or memory. */
1513 switch ( d & DstMask )
1515 case DstNone: /* case DstImplicit: */
1516 /*
1517 * The only implicit-operands instructions allowed a LOCK prefix are
1518 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1519 */
1520 generate_exception_if(
1521 lock_prefix &&
1522 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1523 (b != 0xc7), /* CMPXCHG{8,16}B */
1524 EXC_GP, 0);
1525 dst.type = OP_NONE;
1526 break;
1528 case DstReg:
1529 generate_exception_if(lock_prefix, EXC_GP, 0);
1530 dst.type = OP_REG;
1531 if ( d & ByteOp )
1533 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1534 dst.val = *(uint8_t *)dst.reg;
1535 dst.bytes = 1;
1537 else
1539 dst.reg = decode_register(modrm_reg, &_regs, 0);
1540 switch ( (dst.bytes = op_bytes) )
1542 case 2: dst.val = *(uint16_t *)dst.reg; break;
1543 case 4: dst.val = *(uint32_t *)dst.reg; break;
1544 case 8: dst.val = *(uint64_t *)dst.reg; break;
1547 break;
1548 case DstBitBase:
1549 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1551 src.val &= (op_bytes << 3) - 1;
1553 else
1555 /*
1556 * EA += BitOffset DIV op_bytes*8
1557 * BitOffset = BitOffset MOD op_bytes*8
1558 * DIV truncates towards negative infinity.
1559 * MOD always produces a positive result.
1560 */
1561 if ( op_bytes == 2 )
1562 src.val = (int16_t)src.val;
1563 else if ( op_bytes == 4 )
1564 src.val = (int32_t)src.val;
1565 if ( (long)src.val < 0 )
1567 unsigned long byte_offset;
1568 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1569 ea.mem.off -= byte_offset;
1570 src.val = (byte_offset << 3) + src.val;
1572 else
1574 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1575 src.val &= (op_bytes << 3) - 1;
1578 /* Becomes a normal DstMem operation from here on. */
1579 d = (d & ~DstMask) | DstMem;
1580 case DstMem:
1581 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1582 dst = ea;
1583 if ( dst.type == OP_REG )
1585 generate_exception_if(lock_prefix, EXC_GP, 0);
1586 switch ( dst.bytes )
1588 case 1: dst.val = *(uint8_t *)dst.reg; break;
1589 case 2: dst.val = *(uint16_t *)dst.reg; break;
1590 case 4: dst.val = *(uint32_t *)dst.reg; break;
1591 case 8: dst.val = *(uint64_t *)dst.reg; break;
1594 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1596 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
1597 &dst.val, dst.bytes, ctxt, ops)) )
1598 goto done;
1599 dst.orig_val = dst.val;
1601 break;
1604 if ( twobyte )
1605 goto twobyte_insn;
1607 switch ( b )
1609 case 0x00 ... 0x05: add: /* add */
1610 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1611 break;
1613 case 0x08 ... 0x0d: or: /* or */
1614 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1615 break;
1617 case 0x10 ... 0x15: adc: /* adc */
1618 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1619 break;
1621 case 0x18 ... 0x1d: sbb: /* sbb */
1622 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1623 break;
1625 case 0x20 ... 0x25: and: /* and */
1626 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1627 break;
1629 case 0x28 ... 0x2d: sub: /* sub */
1630 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1631 break;
1633 case 0x30 ... 0x35: xor: /* xor */
1634 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1635 break;
1637 case 0x38 ... 0x3d: cmp: /* cmp */
1638 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1639 dst.type = OP_NONE;
1640 break;
1642 case 0x06: /* push %%es */ {
1643 struct segment_register reg;
1644 src.val = x86_seg_es;
1645 push_seg:
1646 fail_if(ops->read_segment == NULL);
1647 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
1648 return rc;
1649 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1650 if ( mode_64bit() && (op_bytes == 4) )
1651 op_bytes = 8;
1652 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1653 &reg.sel, op_bytes, ctxt)) != 0 )
1654 goto done;
1655 break;
1658 case 0x07: /* pop %%es */
1659 src.val = x86_seg_es;
1660 pop_seg:
1661 fail_if(ops->write_segment == NULL);
1662 /* 64-bit mode: POP defaults to a 64-bit operand. */
1663 if ( mode_64bit() && (op_bytes == 4) )
1664 op_bytes = 8;
1665 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
1666 &dst.val, op_bytes, ctxt, ops)) != 0 )
1667 goto done;
1668 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
1669 return rc;
1670 break;
1672 case 0x0e: /* push %%cs */
1673 src.val = x86_seg_cs;
1674 goto push_seg;
1676 case 0x16: /* push %%ss */
1677 src.val = x86_seg_ss;
1678 goto push_seg;
1680 case 0x17: /* pop %%ss */
1681 src.val = x86_seg_ss;
1682 ctxt->retire.flags.mov_ss = 1;
1683 goto pop_seg;
1685 case 0x1e: /* push %%ds */
1686 src.val = x86_seg_ds;
1687 goto push_seg;
1689 case 0x1f: /* pop %%ds */
1690 src.val = x86_seg_ds;
1691 goto pop_seg;
1693 case 0x27: /* daa */ {
1694 uint8_t al = _regs.eax;
1695 unsigned long eflags = _regs.eflags;
1696 generate_exception_if(mode_64bit(), EXC_UD, -1);
1697 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1698 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1700 *(uint8_t *)&_regs.eax += 6;
1701 _regs.eflags |= EFLG_AF;
1703 if ( (al > 0x99) || (eflags & EFLG_CF) )
1705 *(uint8_t *)&_regs.eax += 0x60;
1706 _regs.eflags |= EFLG_CF;
1708 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1709 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1710 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1711 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1712 break;
1715 case 0x2f: /* das */ {
1716 uint8_t al = _regs.eax;
1717 unsigned long eflags = _regs.eflags;
1718 generate_exception_if(mode_64bit(), EXC_UD, -1);
1719 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1720 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1722 _regs.eflags |= EFLG_AF;
1723 if ( (al < 6) || (eflags & EFLG_CF) )
1724 _regs.eflags |= EFLG_CF;
1725 *(uint8_t *)&_regs.eax -= 6;
1727 if ( (al > 0x99) || (eflags & EFLG_CF) )
1729 *(uint8_t *)&_regs.eax -= 0x60;
1730 _regs.eflags |= EFLG_CF;
1732 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1733 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1734 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1735 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1736 break;
1739 case 0x37: /* aaa */
1740 case 0x3f: /* aas */
1741 generate_exception_if(mode_64bit(), EXC_UD, -1);
1742 _regs.eflags &= ~EFLG_CF;
1743 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1745 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1746 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1747 _regs.eflags |= EFLG_CF | EFLG_AF;
1749 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1750 break;
1752 case 0x40 ... 0x4f: /* inc/dec reg */
1753 dst.type = OP_REG;
1754 dst.reg = decode_register(b & 7, &_regs, 0);
1755 dst.bytes = op_bytes;
1756 dst.val = *dst.reg;
1757 if ( b & 8 )
1758 emulate_1op("dec", dst, _regs.eflags);
1759 else
1760 emulate_1op("inc", dst, _regs.eflags);
1761 break;
1763 case 0x50 ... 0x57: /* push reg */
1764 src.val = *(unsigned long *)decode_register(
1765 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1766 goto push;
1768 case 0x58 ... 0x5f: /* pop reg */
1769 dst.type = OP_REG;
1770 dst.reg = decode_register(
1771 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1772 dst.bytes = op_bytes;
1773 if ( mode_64bit() && (dst.bytes == 4) )
1774 dst.bytes = 8;
1775 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
1776 &dst.val, dst.bytes, ctxt, ops)) != 0 )
1777 goto done;
1778 break;
1780 case 0x60: /* pusha */ {
1781 int i;
1782 unsigned long regs[] = {
1783 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1784 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1785 generate_exception_if(mode_64bit(), EXC_UD, -1);
1786 for ( i = 0; i < 8; i++ )
1787 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1788 &regs[i], op_bytes, ctxt)) != 0 )
1789 goto done;
1790 break;
1793 case 0x61: /* popa */ {
1794 int i;
1795 unsigned long dummy_esp, *regs[] = {
1796 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
1797 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
1798 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
1799 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
1800 generate_exception_if(mode_64bit(), EXC_UD, -1);
1801 for ( i = 0; i < 8; i++ )
1803 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
1804 &dst.val, op_bytes, ctxt, ops)) != 0 )
1805 goto done;
1806 switch ( op_bytes )
1808 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
1809 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
1810 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
1811 case 8: *regs[i] = dst.val; break;
1814 break;
1817 case 0x62: /* bound */ {
1818 unsigned long src_val2;
1819 int lb, ub, idx;
1820 generate_exception_if(mode_64bit() || (src.type != OP_MEM),
1821 EXC_UD, -1);
1822 if ( (rc = read_ulong(src.mem.seg, src.mem.off + op_bytes,
1823 &src_val2, op_bytes, ctxt, ops)) )
1824 goto done;
1825 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1826 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1827 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1828 generate_exception_if((idx < lb) || (idx > ub), EXC_BR, -1);
1829 dst.type = OP_NONE;
1830 break;
1833 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1834 if ( mode_64bit() )
1836 /* movsxd */
1837 if ( src.type == OP_REG )
1838 src.val = *(int32_t *)src.reg;
1839 else if ( (rc = read_ulong(src.mem.seg, src.mem.off,
1840 &src.val, 4, ctxt, ops)) )
1841 goto done;
1842 dst.val = (int32_t)src.val;
1844 else
1846 /* arpl */
1847 uint16_t src_val = dst.val;
1848 dst = src;
1849 _regs.eflags &= ~EFLG_ZF;
1850 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1851 if ( _regs.eflags & EFLG_ZF )
1852 dst.val = (dst.val & ~3) | (src_val & 3);
1853 else
1854 dst.type = OP_NONE;
1855 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
1857 break;
1859 case 0x68: /* push imm{16,32,64} */
1860 src.val = ((op_bytes == 2)
1861 ? (int32_t)insn_fetch_type(int16_t)
1862 : insn_fetch_type(int32_t));
1863 goto push;
1865 case 0x69: /* imul imm16/32 */
1866 case 0x6b: /* imul imm8 */ {
1867 unsigned long src1; /* ModR/M source operand */
1868 if ( ea.type == OP_REG )
1869 src1 = *ea.reg;
1870 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
1871 &src1, op_bytes, ctxt, ops)) )
1872 goto done;
1873 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1874 switch ( dst.bytes )
1876 case 2:
1877 dst.val = ((uint32_t)(int16_t)src.val *
1878 (uint32_t)(int16_t)src1);
1879 if ( (int16_t)dst.val != (uint32_t)dst.val )
1880 _regs.eflags |= EFLG_OF|EFLG_CF;
1881 break;
1882 #ifdef __x86_64__
1883 case 4:
1884 dst.val = ((uint64_t)(int32_t)src.val *
1885 (uint64_t)(int32_t)src1);
1886 if ( (int32_t)dst.val != dst.val )
1887 _regs.eflags |= EFLG_OF|EFLG_CF;
1888 break;
1889 #endif
1890 default: {
1891 unsigned long m[2] = { src.val, src1 };
1892 if ( imul_dbl(m) )
1893 _regs.eflags |= EFLG_OF|EFLG_CF;
1894 dst.val = m[0];
1895 break;
1898 break;
1901 case 0x6a: /* push imm8 */
1902 src.val = insn_fetch_type(int8_t);
1903 push:
1904 d |= Mov; /* force writeback */
1905 dst.type = OP_MEM;
1906 dst.bytes = op_bytes;
1907 if ( mode_64bit() && (dst.bytes == 4) )
1908 dst.bytes = 8;
1909 dst.val = src.val;
1910 dst.mem.seg = x86_seg_ss;
1911 dst.mem.off = sp_pre_dec(dst.bytes);
1912 break;
1914 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */ {
1915 unsigned long nr_reps = get_rep_prefix();
1916 unsigned int port = (uint16_t)_regs.edx;
1917 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1918 dst.mem.seg = x86_seg_es;
1919 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
1920 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
1921 goto done;
1922 if ( (nr_reps > 1) && (ops->rep_ins != NULL) &&
1923 ((rc = ops->rep_ins(port, dst.mem.seg, dst.mem.off, dst.bytes,
1924 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
1926 if ( rc != 0 )
1927 goto done;
1929 else
1931 fail_if(ops->read_io == NULL);
1932 if ( (rc = ops->read_io(port, dst.bytes, &dst.val, ctxt)) != 0 )
1933 goto done;
1934 dst.type = OP_MEM;
1935 nr_reps = 1;
1937 register_address_increment(
1938 _regs.edi,
1939 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
1940 put_rep_prefix(nr_reps);
1941 break;
1944 case 0x6e ... 0x6f: /* outs %esi,%dx */ {
1945 unsigned long nr_reps = get_rep_prefix();
1946 unsigned int port = (uint16_t)_regs.edx;
1947 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1948 ea.mem.off = truncate_ea_and_reps(_regs.esi, nr_reps, dst.bytes);
1949 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
1950 goto done;
1951 if ( (nr_reps > 1) && (ops->rep_outs != NULL) &&
1952 ((rc = ops->rep_outs(ea.mem.seg, ea.mem.off, port, dst.bytes,
1953 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
1955 if ( rc != 0 )
1956 goto done;
1958 else
1960 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
1961 &dst.val, dst.bytes, ctxt, ops)) != 0 )
1962 goto done;
1963 fail_if(ops->write_io == NULL);
1964 if ( (rc = ops->write_io(port, dst.bytes, dst.val, ctxt)) != 0 )
1965 goto done;
1966 nr_reps = 1;
1968 register_address_increment(
1969 _regs.esi,
1970 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
1971 put_rep_prefix(nr_reps);
1972 break;
1975 case 0x70 ... 0x7f: /* jcc (short) */ {
1976 int rel = insn_fetch_type(int8_t);
1977 if ( test_cc(b, _regs.eflags) )
1978 jmp_rel(rel);
1979 break;
1982 case 0x82: /* Grp1 (x86/32 only) */
1983 generate_exception_if(mode_64bit(), EXC_UD, -1);
1984 case 0x80: case 0x81: case 0x83: /* Grp1 */
1985 switch ( modrm_reg & 7 )
1987 case 0: goto add;
1988 case 1: goto or;
1989 case 2: goto adc;
1990 case 3: goto sbb;
1991 case 4: goto and;
1992 case 5: goto sub;
1993 case 6: goto xor;
1994 case 7: goto cmp;
1996 break;
1998 case 0xa8 ... 0xa9: /* test imm,%%eax */
1999 case 0x84 ... 0x85: test: /* test */
2000 emulate_2op_SrcV("test", src, dst, _regs.eflags);
2001 dst.type = OP_NONE;
2002 break;
2004 case 0x86 ... 0x87: xchg: /* xchg */
2005 /* Write back the register source. */
2006 switch ( dst.bytes )
2008 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2009 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2010 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2011 case 8: *src.reg = dst.val; break;
2013 /* Write back the memory destination with implicit LOCK prefix. */
2014 dst.val = src.val;
2015 lock_prefix = 1;
2016 break;
2018 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
2019 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
2020 case 0x88 ... 0x8b: /* mov */
2021 dst.val = src.val;
2022 break;
2024 case 0x8c: /* mov Sreg,r/m */ {
2025 struct segment_register reg;
2026 enum x86_segment seg = decode_segment(modrm_reg);
2027 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
2028 fail_if(ops->read_segment == NULL);
2029 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
2030 goto done;
2031 dst.val = reg.sel;
2032 if ( dst.type == OP_MEM )
2033 dst.bytes = 2;
2034 break;
2037 case 0x8e: /* mov r/m,Sreg */ {
2038 enum x86_segment seg = decode_segment(modrm_reg);
2039 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
2040 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
2041 goto done;
2042 if ( seg == x86_seg_ss )
2043 ctxt->retire.flags.mov_ss = 1;
2044 dst.type = OP_NONE;
2045 break;
2048 case 0x8d: /* lea */
2049 dst.val = ea.mem.off;
2050 break;
2052 case 0x8f: /* pop (sole member of Grp1a) */
2053 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
2054 /* 64-bit mode: POP defaults to a 64-bit operand. */
2055 if ( mode_64bit() && (dst.bytes == 4) )
2056 dst.bytes = 8;
2057 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2058 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2059 goto done;
2060 break;
2062 case 0x90: /* nop / xchg %%r8,%%rax */
2063 if ( !(rex_prefix & 1) )
2064 break; /* nop */
2066 case 0x91 ... 0x97: /* xchg reg,%%rax */
2067 src.type = dst.type = OP_REG;
2068 src.bytes = dst.bytes = op_bytes;
2069 src.reg = (unsigned long *)&_regs.eax;
2070 src.val = *src.reg;
2071 dst.reg = decode_register(
2072 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2073 dst.val = *dst.reg;
2074 goto xchg;
2076 case 0x98: /* cbw/cwde/cdqe */
2077 switch ( op_bytes )
2079 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2080 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2081 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2083 break;
2085 case 0x99: /* cwd/cdq/cqo */
2086 switch ( op_bytes )
2088 case 2:
2089 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2090 break;
2091 case 4:
2092 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2093 break;
2094 case 8:
2095 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2096 break;
2098 break;
2100 case 0x9a: /* call (far, absolute) */ {
2101 struct segment_register reg;
2102 uint16_t sel;
2103 uint32_t eip;
2105 fail_if(ops->read_segment == NULL);
2106 generate_exception_if(mode_64bit(), EXC_UD, -1);
2108 eip = insn_fetch_bytes(op_bytes);
2109 sel = insn_fetch_type(uint16_t);
2111 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2112 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2113 &reg.sel, op_bytes, ctxt)) ||
2114 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2115 &_regs.eip, op_bytes, ctxt)) )
2116 goto done;
2118 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2119 goto done;
2120 _regs.eip = eip;
2121 break;
2124 case 0x9b: /* wait/fwait */
2125 emulate_fpu_insn("fwait");
2126 break;
2128 case 0x9c: /* pushf */
2129 src.val = _regs.eflags;
2130 goto push;
2132 case 0x9d: /* popf */ {
2133 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2134 if ( !mode_ring0() )
2135 mask |= EFLG_IOPL;
2136 if ( !mode_iopl() )
2137 mask |= EFLG_IF;
2138 /* 64-bit mode: POP defaults to a 64-bit operand. */
2139 if ( mode_64bit() && (op_bytes == 4) )
2140 op_bytes = 8;
2141 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2142 &dst.val, op_bytes, ctxt, ops)) != 0 )
2143 goto done;
2144 if ( op_bytes == 2 )
2145 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2146 dst.val &= 0x257fd5;
2147 _regs.eflags &= mask;
2148 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2149 break;
2152 case 0x9e: /* sahf */
2153 *(uint8_t *)&_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2154 break;
2156 case 0x9f: /* lahf */
2157 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2158 break;
2160 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2161 /* Source EA is not encoded via ModRM. */
2162 dst.type = OP_REG;
2163 dst.reg = (unsigned long *)&_regs.eax;
2164 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2165 if ( (rc = read_ulong(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2166 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2167 goto done;
2168 break;
2170 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2171 /* Destination EA is not encoded via ModRM. */
2172 dst.type = OP_MEM;
2173 dst.mem.seg = ea.mem.seg;
2174 dst.mem.off = insn_fetch_bytes(ad_bytes);
2175 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2176 dst.val = (unsigned long)_regs.eax;
2177 break;
2179 case 0xa4 ... 0xa5: /* movs */ {
2180 unsigned long nr_reps = get_rep_prefix();
2181 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2182 dst.mem.seg = x86_seg_es;
2183 dst.mem.off = truncate_ea_and_reps(_regs.edi, nr_reps, dst.bytes);
2184 if ( (nr_reps > 1) && (ops->rep_movs != NULL) &&
2185 ((rc = ops->rep_movs(ea.mem.seg, truncate_ea(_regs.esi),
2186 dst.mem.seg, dst.mem.off, dst.bytes,
2187 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2189 if ( rc != 0 )
2190 goto done;
2192 else
2194 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2195 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2196 goto done;
2197 dst.type = OP_MEM;
2198 nr_reps = 1;
2200 register_address_increment(
2201 _regs.esi,
2202 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2203 register_address_increment(
2204 _regs.edi,
2205 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2206 put_rep_prefix(nr_reps);
2207 break;
2210 case 0xa6 ... 0xa7: /* cmps */ {
2211 unsigned long next_eip = _regs.eip;
2212 get_rep_prefix();
2213 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2214 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2215 &dst.val, dst.bytes, ctxt, ops)) ||
2216 (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2217 &src.val, src.bytes, ctxt, ops)) )
2218 goto done;
2219 register_address_increment(
2220 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2221 register_address_increment(
2222 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2223 put_rep_prefix(1);
2224 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2225 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2226 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2227 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2228 _regs.eip = next_eip;
2229 break;
2232 case 0xaa ... 0xab: /* stos */ {
2233 /* unsigned long max_reps = */get_rep_prefix();
2234 dst.type = OP_MEM;
2235 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2236 dst.mem.seg = x86_seg_es;
2237 dst.mem.off = truncate_ea(_regs.edi);
2238 dst.val = _regs.eax;
2239 register_address_increment(
2240 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2241 put_rep_prefix(1);
2242 break;
2245 case 0xac ... 0xad: /* lods */ {
2246 /* unsigned long max_reps = */get_rep_prefix();
2247 dst.type = OP_REG;
2248 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2249 dst.reg = (unsigned long *)&_regs.eax;
2250 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.esi),
2251 &dst.val, dst.bytes, ctxt, ops)) != 0 )
2252 goto done;
2253 register_address_increment(
2254 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2255 put_rep_prefix(1);
2256 break;
2259 case 0xae ... 0xaf: /* scas */ {
2260 unsigned long next_eip = _regs.eip;
2261 get_rep_prefix();
2262 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2263 dst.val = _regs.eax;
2264 if ( (rc = read_ulong(x86_seg_es, truncate_ea(_regs.edi),
2265 &src.val, src.bytes, ctxt, ops)) != 0 )
2266 goto done;
2267 register_address_increment(
2268 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2269 put_rep_prefix(1);
2270 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2271 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2272 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2273 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2274 _regs.eip = next_eip;
2275 break;
2278 case 0xb0 ... 0xb7: /* mov imm8,r8 */
2279 dst.reg = decode_register(
2280 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
2281 dst.val = src.val;
2282 break;
2284 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
2285 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
2286 src.val = ((uint32_t)src.val |
2287 ((uint64_t)insn_fetch_type(uint32_t) << 32));
2288 dst.reg = decode_register(
2289 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2290 dst.val = src.val;
2291 break;
2293 case 0xc0 ... 0xc1: grp2: /* Grp2 */
2294 switch ( modrm_reg & 7 )
2296 case 0: /* rol */
2297 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
2298 break;
2299 case 1: /* ror */
2300 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
2301 break;
2302 case 2: /* rcl */
2303 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
2304 break;
2305 case 3: /* rcr */
2306 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
2307 break;
2308 case 4: /* sal/shl */
2309 case 6: /* sal/shl */
2310 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
2311 break;
2312 case 5: /* shr */
2313 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
2314 break;
2315 case 7: /* sar */
2316 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
2317 break;
2319 break;
2321 case 0xc2: /* ret imm16 (near) */
2322 case 0xc3: /* ret (near) */ {
2323 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2324 op_bytes = mode_64bit() ? 8 : op_bytes;
2325 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2326 &dst.val, op_bytes, ctxt, ops)) != 0 )
2327 goto done;
2328 _regs.eip = dst.val;
2329 break;
2332 case 0xc4: /* les */ {
2333 unsigned long sel;
2334 dst.val = x86_seg_es;
2335 les: /* dst.val identifies the segment */
2336 generate_exception_if(src.type != OP_MEM, EXC_UD, -1);
2337 if ( (rc = read_ulong(src.mem.seg, src.mem.off + src.bytes,
2338 &sel, 2, ctxt, ops)) != 0 )
2339 goto done;
2340 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
2341 goto done;
2342 dst.val = src.val;
2343 break;
2346 case 0xc5: /* lds */
2347 dst.val = x86_seg_ds;
2348 goto les;
2350 case 0xc8: /* enter imm16,imm8 */ {
2351 uint16_t size = insn_fetch_type(uint16_t);
2352 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2353 int i;
2355 dst.type = OP_REG;
2356 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2357 dst.reg = (unsigned long *)&_regs.ebp;
2358 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2359 &_regs.ebp, dst.bytes, ctxt)) )
2360 goto done;
2361 dst.val = _regs.esp;
2363 if ( depth > 0 )
2365 for ( i = 1; i < depth; i++ )
2367 unsigned long ebp, temp_data;
2368 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2369 if ( (rc = read_ulong(x86_seg_ss, ebp,
2370 &temp_data, dst.bytes, ctxt, ops)) ||
2371 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2372 &temp_data, dst.bytes, ctxt)) )
2373 goto done;
2375 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2376 &dst.val, dst.bytes, ctxt)) )
2377 goto done;
2380 sp_pre_dec(size);
2381 break;
2384 case 0xc9: /* leave */
2385 /* First writeback, to %%esp. */
2386 dst.type = OP_REG;
2387 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2388 dst.reg = (unsigned long *)&_regs.esp;
2389 dst.val = _regs.ebp;
2391 /* Flush first writeback, since there is a second. */
2392 switch ( dst.bytes )
2394 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2395 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2396 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2397 case 8: *dst.reg = dst.val; break;
2400 /* Second writeback, to %%ebp. */
2401 dst.reg = (unsigned long *)&_regs.ebp;
2402 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(dst.bytes),
2403 &dst.val, dst.bytes, ctxt, ops)) )
2404 goto done;
2405 break;
2407 case 0xca: /* ret imm16 (far) */
2408 case 0xcb: /* ret (far) */ {
2409 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2410 op_bytes = mode_64bit() ? 8 : op_bytes;
2411 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2412 &dst.val, op_bytes, ctxt, ops)) ||
2413 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes + offset),
2414 &src.val, op_bytes, ctxt, ops)) ||
2415 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2416 goto done;
2417 _regs.eip = dst.val;
2418 break;
2421 case 0xcc: /* int3 */
2422 src.val = EXC_BP;
2423 goto swint;
2425 case 0xcd: /* int imm8 */
2426 src.val = insn_fetch_type(uint8_t);
2427 swint:
2428 fail_if(ops->inject_sw_interrupt == NULL);
2429 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2430 ctxt) ? : X86EMUL_EXCEPTION;
2431 goto done;
2433 case 0xce: /* into */
2434 generate_exception_if(mode_64bit(), EXC_UD, -1);
2435 if ( !(_regs.eflags & EFLG_OF) )
2436 break;
2437 src.val = EXC_OF;
2438 goto swint;
2440 case 0xcf: /* iret */ {
2441 unsigned long cs, eip, eflags;
2442 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2443 if ( !mode_ring0() )
2444 mask |= EFLG_IOPL;
2445 if ( !mode_iopl() )
2446 mask |= EFLG_IF;
2447 fail_if(!in_realmode(ctxt, ops));
2448 if ( (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2449 &eip, op_bytes, ctxt, ops)) ||
2450 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2451 &cs, op_bytes, ctxt, ops)) ||
2452 (rc = read_ulong(x86_seg_ss, sp_post_inc(op_bytes),
2453 &eflags, op_bytes, ctxt, ops)) )
2454 goto done;
2455 if ( op_bytes == 2 )
2456 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2457 eflags &= 0x257fd5;
2458 _regs.eflags &= mask;
2459 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2460 _regs.eip = eip;
2461 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2462 goto done;
2463 break;
2466 case 0xd0 ... 0xd1: /* Grp2 */
2467 src.val = 1;
2468 goto grp2;
2470 case 0xd2 ... 0xd3: /* Grp2 */
2471 src.val = _regs.ecx;
2472 goto grp2;
2474 case 0xd4: /* aam */ {
2475 unsigned int base = insn_fetch_type(uint8_t);
2476 uint8_t al = _regs.eax;
2477 generate_exception_if(mode_64bit(), EXC_UD, -1);
2478 generate_exception_if(base == 0, EXC_DE, -1);
2479 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2480 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2481 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2482 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2483 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2484 break;
2487 case 0xd5: /* aad */ {
2488 unsigned int base = insn_fetch_type(uint8_t);
2489 uint16_t ax = _regs.eax;
2490 generate_exception_if(mode_64bit(), EXC_UD, -1);
2491 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2492 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2493 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2494 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2495 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2496 break;
2499 case 0xd6: /* salc */
2500 generate_exception_if(mode_64bit(), EXC_UD, -1);
2501 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2502 break;
2504 case 0xd7: /* xlat */ {
2505 unsigned long al = (uint8_t)_regs.eax;
2506 if ( (rc = read_ulong(ea.mem.seg, truncate_ea(_regs.ebx + al),
2507 &al, 1, ctxt, ops)) != 0 )
2508 goto done;
2509 *(uint8_t *)&_regs.eax = al;
2510 break;
2513 case 0xd8: /* FPU 0xd8 */
2514 switch ( modrm )
2516 case 0xc0 ... 0xc7: /* fadd %stN,%stN */
2517 case 0xc8 ... 0xcf: /* fmul %stN,%stN */
2518 case 0xd0 ... 0xd7: /* fcom %stN,%stN */
2519 case 0xd8 ... 0xdf: /* fcomp %stN,%stN */
2520 case 0xe0 ... 0xe7: /* fsub %stN,%stN */
2521 case 0xe8 ... 0xef: /* fsubr %stN,%stN */
2522 case 0xf0 ... 0xf7: /* fdiv %stN,%stN */
2523 case 0xf8 ... 0xff: /* fdivr %stN,%stN */
2524 emulate_fpu_insn_stub(0xd8, modrm);
2525 break;
2526 default:
2527 fail_if(modrm >= 0xc0);
2528 ea.bytes = 4;
2529 src = ea;
2530 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2531 src.bytes, ctxt)) != 0 )
2532 goto done;
2533 switch ( modrm_reg & 7 )
2535 case 0: /* fadd */
2536 emulate_fpu_insn_memsrc("fadds", src.val);
2537 break;
2538 case 1: /* fmul */
2539 emulate_fpu_insn_memsrc("fmuls", src.val);
2540 break;
2541 case 2: /* fcom */
2542 emulate_fpu_insn_memsrc("fcoms", src.val);
2543 break;
2544 case 3: /* fcomp */
2545 emulate_fpu_insn_memsrc("fcomps", src.val);
2546 break;
2547 case 4: /* fsub */
2548 emulate_fpu_insn_memsrc("fsubs", src.val);
2549 break;
2550 case 5: /* fsubr */
2551 emulate_fpu_insn_memsrc("fsubrs", src.val);
2552 break;
2553 case 6: /* fdiv */
2554 emulate_fpu_insn_memsrc("fdivs", src.val);
2555 break;
2556 case 7: /* fdivr */
2557 emulate_fpu_insn_memsrc("fdivrs", src.val);
2558 break;
2559 default:
2560 goto cannot_emulate;
2563 break;
2565 case 0xd9: /* FPU 0xd9 */
2566 switch ( modrm )
2568 case 0xc0 ... 0xc7: /* fld %stN */
2569 case 0xc8 ... 0xcf: /* fxch %stN */
2570 case 0xd0: /* fnop */
2571 case 0xe0: /* fchs */
2572 case 0xe1: /* fabs */
2573 case 0xe4: /* ftst */
2574 case 0xe5: /* fxam */
2575 case 0xe8: /* fld1 */
2576 case 0xe9: /* fldl2t */
2577 case 0xea: /* fldl2e */
2578 case 0xeb: /* fldpi */
2579 case 0xec: /* fldlg2 */
2580 case 0xed: /* fldln2 */
2581 case 0xee: /* fldz */
2582 case 0xf0: /* f2xm1 */
2583 case 0xf1: /* fyl2x */
2584 case 0xf2: /* fptan */
2585 case 0xf3: /* fpatan */
2586 case 0xf4: /* fxtract */
2587 case 0xf5: /* fprem1 */
2588 case 0xf6: /* fdecstp */
2589 case 0xf7: /* fincstp */
2590 case 0xf8: /* fprem */
2591 case 0xf9: /* fyl2xp1 */
2592 case 0xfa: /* fsqrt */
2593 case 0xfb: /* fsincos */
2594 case 0xfc: /* frndint */
2595 case 0xfd: /* fscale */
2596 case 0xfe: /* fsin */
2597 case 0xff: /* fcos */
2598 emulate_fpu_insn_stub(0xd9, modrm);
2599 break;
2600 default:
2601 fail_if(modrm >= 0xc0);
2602 switch ( modrm_reg & 7 )
2604 case 0: /* fld m32fp */
2605 ea.bytes = 4;
2606 src = ea;
2607 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &src.val,
2608 src.bytes, ctxt)) != 0 )
2609 goto done;
2610 emulate_fpu_insn_memsrc("flds", src.val);
2611 break;
2612 case 2: /* fstp m32fp */
2613 ea.bytes = 4;
2614 dst = ea;
2615 dst.type = OP_MEM;
2616 emulate_fpu_insn_memdst("fsts", dst.val);
2617 break;
2618 case 3: /* fstp m32fp */
2619 ea.bytes = 4;
2620 dst = ea;
2621 dst.type = OP_MEM;
2622 emulate_fpu_insn_memdst("fstps", dst.val);
2623 break;
2624 /* case 4: fldenv - TODO */
2625 case 5: /* fldcw m2byte */
2626 ea.bytes = 2;
2627 src = ea;
2628 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2629 src.bytes, ctxt)) != 0 )
2630 goto done;
2631 emulate_fpu_insn_memsrc("fldcw", src.val);
2632 break;
2633 /* case 6: fstenv - TODO */
2634 case 7: /* fnstcw m2byte */
2635 ea.bytes = 2;
2636 dst = ea;
2637 dst.type = OP_MEM;
2638 emulate_fpu_insn_memdst("fnstcw", dst.val);
2639 break;
2640 default:
2641 goto cannot_emulate;
2644 break;
2646 case 0xda: /* FPU 0xda */
2647 switch ( modrm )
2649 case 0xc0 ... 0xc7: /* fcmovb %stN */
2650 case 0xc8 ... 0xcf: /* fcmove %stN */
2651 case 0xd0 ... 0xd7: /* fcmovbe %stN */
2652 case 0xd8 ... 0xdf: /* fcmovu %stN */
2653 case 0xe9: /* fucompp */
2654 emulate_fpu_insn_stub(0xda, modrm);
2655 break;
2656 default:
2657 fail_if(modrm >= 0xc0);
2658 ea.bytes = 8;
2659 src = ea;
2660 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2661 src.bytes, ctxt)) != 0 )
2662 goto done;
2663 switch ( modrm_reg & 7 )
2665 case 0: /* fiadd m64i */
2666 emulate_fpu_insn_memsrc("fiaddl", src.val);
2667 break;
2668 case 1: /* fimul m64i */
2669 emulate_fpu_insn_memsrc("fimul", src.val);
2670 break;
2671 case 2: /* ficom m64i */
2672 emulate_fpu_insn_memsrc("ficoml", src.val);
2673 break;
2674 case 3: /* ficomp m64i */
2675 emulate_fpu_insn_memsrc("ficompl", src.val);
2676 break;
2677 case 4: /* fisub m64i */
2678 emulate_fpu_insn_memsrc("fisubl", src.val);
2679 break;
2680 case 5: /* fisubr m64i */
2681 emulate_fpu_insn_memsrc("fisubrl", src.val);
2682 break;
2683 case 6: /* fidiv m64i */
2684 emulate_fpu_insn_memsrc("fidivl", src.val);
2685 break;
2686 case 7: /* fidivr m64i */
2687 emulate_fpu_insn_memsrc("fidivrl", src.val);
2688 break;
2689 default:
2690 goto cannot_emulate;
2693 break;
2695 case 0xdb: /* FPU 0xdb */
2696 switch ( modrm )
2698 case 0xc0 ... 0xc7: /* fcmovnb %stN */
2699 case 0xc8 ... 0xcf: /* fcmovne %stN */
2700 case 0xd0 ... 0xd7: /* fcmovnbe %stN */
2701 case 0xd8 ... 0xdf: /* fcmovnu %stN */
2702 emulate_fpu_insn_stub(0xdb, modrm);
2703 break;
2704 case 0xe2: /* fnclex */
2705 emulate_fpu_insn("fnclex");
2706 break;
2707 case 0xe3: /* fninit */
2708 emulate_fpu_insn("fninit");
2709 break;
2710 case 0xe4: /* fsetpm - 287 only, ignored by 387 */
2711 break;
2712 case 0xe8 ... 0xef: /* fucomi %stN */
2713 case 0xf0 ... 0xf7: /* fcomi %stN */
2714 emulate_fpu_insn_stub(0xdb, modrm);
2715 break;
2716 default:
2717 fail_if(modrm >= 0xc0);
2718 switch ( modrm_reg & 7 )
2720 case 0: /* fild m32i */
2721 ea.bytes = 4;
2722 src = ea;
2723 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2724 src.bytes, ctxt)) != 0 )
2725 goto done;
2726 emulate_fpu_insn_memsrc("fildl", src.val);
2727 break;
2728 case 1: /* fisttp m32i */
2729 ea.bytes = 4;
2730 dst = ea;
2731 dst.type = OP_MEM;
2732 emulate_fpu_insn_memdst("fisttpl", dst.val);
2733 break;
2734 case 2: /* fist m32i */
2735 ea.bytes = 4;
2736 dst = ea;
2737 dst.type = OP_MEM;
2738 emulate_fpu_insn_memdst("fistl", dst.val);
2739 break;
2740 case 3: /* fistp m32i */
2741 ea.bytes = 4;
2742 dst = ea;
2743 dst.type = OP_MEM;
2744 emulate_fpu_insn_memdst("fistpl", dst.val);
2745 break;
2746 case 5: /* fld m80fp */
2747 ea.bytes = 10;
2748 src = ea;
2749 if ( (rc = ops->read(src.mem.seg, src.mem.off,
2750 &src.val, src.bytes, ctxt)) != 0 )
2751 goto done;
2752 emulate_fpu_insn_memdst("fldt", src.val);
2753 break;
2754 case 7: /* fstp m80fp */
2755 ea.bytes = 10;
2756 dst.type = OP_MEM;
2757 dst = ea;
2758 emulate_fpu_insn_memdst("fstpt", dst.val);
2759 break;
2760 default:
2761 goto cannot_emulate;
2764 break;
2766 case 0xdc: /* FPU 0xdc */
2767 switch ( modrm )
2769 case 0xc0 ... 0xc7: /* fadd %stN */
2770 case 0xc8 ... 0xcf: /* fmul %stN */
2771 case 0xe0 ... 0xe7: /* fsubr %stN */
2772 case 0xe8 ... 0xef: /* fsub %stN */
2773 case 0xf0 ... 0xf7: /* fdivr %stN */
2774 case 0xf8 ... 0xff: /* fdiv %stN */
2775 emulate_fpu_insn_stub(0xdc, modrm);
2776 break;
2777 default:
2778 fail_if(modrm >= 0xc0);
2779 ea.bytes = 8;
2780 src = ea;
2781 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2782 src.bytes, ctxt)) != 0 )
2783 goto done;
2784 switch ( modrm_reg & 7 )
2786 case 0: /* fadd m64fp */
2787 emulate_fpu_insn_memsrc("faddl", src.val);
2788 break;
2789 case 1: /* fmul m64fp */
2790 emulate_fpu_insn_memsrc("fmull", src.val);
2791 break;
2792 case 2: /* fcom m64fp */
2793 emulate_fpu_insn_memsrc("fcoml", src.val);
2794 break;
2795 case 3: /* fcomp m64fp */
2796 emulate_fpu_insn_memsrc("fcompl", src.val);
2797 break;
2798 case 4: /* fsub m64fp */
2799 emulate_fpu_insn_memsrc("fsubl", src.val);
2800 break;
2801 case 5: /* fsubr m64fp */
2802 emulate_fpu_insn_memsrc("fsubrl", src.val);
2803 break;
2804 case 6: /* fdiv m64fp */
2805 emulate_fpu_insn_memsrc("fdivl", src.val);
2806 break;
2807 case 7: /* fdivr m64fp */
2808 emulate_fpu_insn_memsrc("fdivrl", src.val);
2809 break;
2812 break;
2814 case 0xdd: /* FPU 0xdd */
2815 switch ( modrm )
2817 case 0xc0 ... 0xc7: /* ffree %stN */
2818 case 0xd0 ... 0xd7: /* fst %stN */
2819 case 0xd8 ... 0xdf: /* fstp %stN */
2820 case 0xe0 ... 0xe7: /* fucom %stN */
2821 case 0xe8 ... 0xef: /* fucomp %stN */
2822 emulate_fpu_insn_stub(0xdd, modrm);
2823 break;
2824 default:
2825 fail_if(modrm >= 0xc0);
2826 switch ( modrm_reg & 7 )
2828 case 0: /* fld m64fp */;
2829 ea.bytes = 8;
2830 src = ea;
2831 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2832 src.bytes, ctxt)) != 0 )
2833 goto done;
2834 emulate_fpu_insn_memsrc("fldl", src.val);
2835 break;
2836 case 1: /* fisttp m64i */
2837 ea.bytes = 8;
2838 dst = ea;
2839 dst.type = OP_MEM;
2840 emulate_fpu_insn_memdst("fisttpll", dst.val);
2841 break;
2842 case 2: /* fst m64fp */
2843 ea.bytes = 8;
2844 dst = ea;
2845 dst.type = OP_MEM;
2846 emulate_fpu_insn_memsrc("fstl", dst.val);
2847 break;
2848 case 3: /* fstp m64fp */
2849 ea.bytes = 8;
2850 dst = ea;
2851 dst.type = OP_MEM;
2852 emulate_fpu_insn_memdst("fstpl", dst.val);
2853 break;
2854 case 7: /* fnstsw m2byte */
2855 ea.bytes = 2;
2856 dst = ea;
2857 dst.type = OP_MEM;
2858 emulate_fpu_insn_memdst("fnstsw", dst.val);
2859 break;
2860 default:
2861 goto cannot_emulate;
2864 break;
2866 case 0xde: /* FPU 0xde */
2867 switch ( modrm )
2869 case 0xc0 ... 0xc7: /* faddp %stN */
2870 case 0xc8 ... 0xcf: /* fmulp %stN */
2871 case 0xd9: /* fcompp */
2872 case 0xe0 ... 0xe7: /* fsubrp %stN */
2873 case 0xe8 ... 0xef: /* fsubp %stN */
2874 case 0xf0 ... 0xf7: /* fdivrp %stN */
2875 case 0xf8 ... 0xff: /* fdivp %stN */
2876 emulate_fpu_insn_stub(0xde, modrm);
2877 break;
2878 default:
2879 fail_if(modrm >= 0xc0);
2880 ea.bytes = 2;
2881 src = ea;
2882 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2883 src.bytes, ctxt)) != 0 )
2884 goto done;
2885 switch ( modrm_reg & 7 )
2887 case 0: /* fiadd m16i */
2888 emulate_fpu_insn_memsrc("fiadd", src.val);
2889 break;
2890 case 1: /* fimul m16i */
2891 emulate_fpu_insn_memsrc("fimul", src.val);
2892 break;
2893 case 2: /* ficom m16i */
2894 emulate_fpu_insn_memsrc("ficom", src.val);
2895 break;
2896 case 3: /* ficomp m16i */
2897 emulate_fpu_insn_memsrc("ficomp", src.val);
2898 break;
2899 case 4: /* fisub m16i */
2900 emulate_fpu_insn_memsrc("fisub", src.val);
2901 break;
2902 case 5: /* fisubr m16i */
2903 emulate_fpu_insn_memsrc("fisubr", src.val);
2904 break;
2905 case 6: /* fidiv m16i */
2906 emulate_fpu_insn_memsrc("fidiv", src.val);
2907 break;
2908 case 7: /* fidivr m16i */
2909 emulate_fpu_insn_memsrc("fidivr", src.val);
2910 break;
2911 default:
2912 goto cannot_emulate;
2915 break;
2917 case 0xdf: /* FPU 0xdf */
2918 switch ( modrm )
2920 case 0xe0:
2921 /* fnstsw %ax */
2922 dst.bytes = 2;
2923 dst.type = OP_REG;
2924 dst.reg = (unsigned long *)&_regs.eax;
2925 emulate_fpu_insn_memdst("fnstsw", dst.val);
2926 break;
2927 case 0xf0 ... 0xf7: /* fcomip %stN */
2928 case 0xf8 ... 0xff: /* fucomip %stN */
2929 emulate_fpu_insn_stub(0xdf, modrm);
2930 break;
2931 default:
2932 fail_if(modrm >= 0xc0);
2933 switch ( modrm_reg & 7 )
2935 case 0: /* fild m16i */
2936 ea.bytes = 2;
2937 src = ea;
2938 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2939 src.bytes, ctxt)) != 0 )
2940 goto done;
2941 emulate_fpu_insn_memsrc("fild", src.val);
2942 break;
2943 case 1: /* fisttp m16i */
2944 ea.bytes = 2;
2945 dst = ea;
2946 dst.type = OP_MEM;
2947 emulate_fpu_insn_memdst("fisttp", dst.val);
2948 break;
2949 case 2: /* fist m16i */
2950 ea.bytes = 2;
2951 dst = ea;
2952 dst.type = OP_MEM;
2953 emulate_fpu_insn_memdst("fist", dst.val);
2954 break;
2955 case 3: /* fistp m16i */
2956 ea.bytes = 2;
2957 dst = ea;
2958 dst.type = OP_MEM;
2959 emulate_fpu_insn_memdst("fistp", dst.val);
2960 break;
2961 case 4: /* fbld m80dec */
2962 ea.bytes = 10;
2963 dst = ea;
2964 if ( (rc = ops->read(src.mem.seg, src.mem.off,
2965 &src.val, src.bytes, ctxt)) != 0 )
2966 goto done;
2967 emulate_fpu_insn_memdst("fbld", src.val);
2968 break;
2969 case 5: /* fild m64i */
2970 ea.bytes = 8;
2971 src = ea;
2972 if ( (rc = ops->read(src.mem.seg, src.mem.off, &src.val,
2973 src.bytes, ctxt)) != 0 )
2974 goto done;
2975 emulate_fpu_insn_memsrc("fildll", src.val);
2976 break;
2977 case 6: /* fbstp packed bcd */
2978 ea.bytes = 10;
2979 dst = ea;
2980 dst.type = OP_MEM;
2981 emulate_fpu_insn_memdst("fbstp", dst.val);
2982 break;
2983 case 7: /* fistp m64i */
2984 ea.bytes = 8;
2985 dst = ea;
2986 dst.type = OP_MEM;
2987 emulate_fpu_insn_memdst("fistpll", dst.val);
2988 break;
2989 default:
2990 goto cannot_emulate;
2993 break;
2995 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2996 int rel = insn_fetch_type(int8_t);
2997 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2998 if ( b == 0xe1 )
2999 do_jmp = !do_jmp; /* loopz */
3000 else if ( b == 0xe2 )
3001 do_jmp = 1; /* loop */
3002 switch ( ad_bytes )
3004 case 2:
3005 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
3006 break;
3007 case 4:
3008 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
3009 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
3010 break;
3011 default: /* case 8: */
3012 do_jmp &= --_regs.ecx != 0;
3013 break;
3015 if ( do_jmp )
3016 jmp_rel(rel);
3017 break;
3020 case 0xe3: /* jcxz/jecxz (short) */ {
3021 int rel = insn_fetch_type(int8_t);
3022 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
3023 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
3024 jmp_rel(rel);
3025 break;
3028 case 0xe4: /* in imm8,%al */
3029 case 0xe5: /* in imm8,%eax */
3030 case 0xe6: /* out %al,imm8 */
3031 case 0xe7: /* out %eax,imm8 */
3032 case 0xec: /* in %dx,%al */
3033 case 0xed: /* in %dx,%eax */
3034 case 0xee: /* out %al,%dx */
3035 case 0xef: /* out %eax,%dx */ {
3036 unsigned int port = ((b < 0xe8)
3037 ? insn_fetch_type(uint8_t)
3038 : (uint16_t)_regs.edx);
3039 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
3040 if ( (rc = ioport_access_check(port, op_bytes, ctxt, ops)) != 0 )
3041 goto done;
3042 if ( b & 2 )
3044 /* out */
3045 fail_if(ops->write_io == NULL);
3046 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
3048 else
3050 /* in */
3051 dst.type = OP_REG;
3052 dst.bytes = op_bytes;
3053 dst.reg = (unsigned long *)&_regs.eax;
3054 fail_if(ops->read_io == NULL);
3055 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
3057 if ( rc != 0 )
3058 goto done;
3059 break;
3062 case 0xe8: /* call (near) */ {
3063 int rel = (((op_bytes == 2) && !mode_64bit())
3064 ? (int32_t)insn_fetch_type(int16_t)
3065 : insn_fetch_type(int32_t));
3066 op_bytes = mode_64bit() ? 8 : op_bytes;
3067 src.val = _regs.eip;
3068 jmp_rel(rel);
3069 goto push;
3072 case 0xe9: /* jmp (near) */ {
3073 int rel = (((op_bytes == 2) && !mode_64bit())
3074 ? (int32_t)insn_fetch_type(int16_t)
3075 : insn_fetch_type(int32_t));
3076 jmp_rel(rel);
3077 break;
3080 case 0xea: /* jmp (far, absolute) */ {
3081 uint16_t sel;
3082 uint32_t eip;
3083 generate_exception_if(mode_64bit(), EXC_UD, -1);
3084 eip = insn_fetch_bytes(op_bytes);
3085 sel = insn_fetch_type(uint16_t);
3086 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
3087 goto done;
3088 _regs.eip = eip;
3089 break;
3092 case 0xeb: /* jmp (short) */ {
3093 int rel = insn_fetch_type(int8_t);
3094 jmp_rel(rel);
3095 break;
3098 case 0xf1: /* int1 (icebp) */
3099 src.val = EXC_DB;
3100 goto swint;
3102 case 0xf4: /* hlt */
3103 ctxt->retire.flags.hlt = 1;
3104 break;
3106 case 0xf5: /* cmc */
3107 _regs.eflags ^= EFLG_CF;
3108 break;
3110 case 0xf6 ... 0xf7: /* Grp3 */
3111 switch ( modrm_reg & 7 )
3113 case 0 ... 1: /* test */
3114 /* Special case in Grp3: test has an immediate source operand. */
3115 src.type = OP_IMM;
3116 src.bytes = (d & ByteOp) ? 1 : op_bytes;
3117 if ( src.bytes == 8 ) src.bytes = 4;
3118 switch ( src.bytes )
3120 case 1: src.val = insn_fetch_type(int8_t); break;
3121 case 2: src.val = insn_fetch_type(int16_t); break;
3122 case 4: src.val = insn_fetch_type(int32_t); break;
3124 goto test;
3125 case 2: /* not */
3126 dst.val = ~dst.val;
3127 break;
3128 case 3: /* neg */
3129 emulate_1op("neg", dst, _regs.eflags);
3130 break;
3131 case 4: /* mul */
3132 src = dst;
3133 dst.type = OP_REG;
3134 dst.reg = (unsigned long *)&_regs.eax;
3135 dst.val = *dst.reg;
3136 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3137 switch ( src.bytes )
3139 case 1:
3140 dst.val = (uint8_t)dst.val;
3141 dst.val *= src.val;
3142 if ( (uint8_t)dst.val != (uint16_t)dst.val )
3143 _regs.eflags |= EFLG_OF|EFLG_CF;
3144 dst.bytes = 2;
3145 break;
3146 case 2:
3147 dst.val = (uint16_t)dst.val;
3148 dst.val *= src.val;
3149 if ( (uint16_t)dst.val != (uint32_t)dst.val )
3150 _regs.eflags |= EFLG_OF|EFLG_CF;
3151 *(uint16_t *)&_regs.edx = dst.val >> 16;
3152 break;
3153 #ifdef __x86_64__
3154 case 4:
3155 dst.val = (uint32_t)dst.val;
3156 dst.val *= src.val;
3157 if ( (uint32_t)dst.val != dst.val )
3158 _regs.eflags |= EFLG_OF|EFLG_CF;
3159 _regs.edx = (uint32_t)(dst.val >> 32);
3160 break;
3161 #endif
3162 default: {
3163 unsigned long m[2] = { src.val, dst.val };
3164 if ( mul_dbl(m) )
3165 _regs.eflags |= EFLG_OF|EFLG_CF;
3166 _regs.edx = m[1];
3167 dst.val = m[0];
3168 break;
3171 break;
3172 case 5: /* imul */
3173 src = dst;
3174 dst.type = OP_REG;
3175 dst.reg = (unsigned long *)&_regs.eax;
3176 dst.val = *dst.reg;
3177 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3178 switch ( src.bytes )
3180 case 1:
3181 dst.val = ((uint16_t)(int8_t)src.val *
3182 (uint16_t)(int8_t)dst.val);
3183 if ( (int8_t)dst.val != (uint16_t)dst.val )
3184 _regs.eflags |= EFLG_OF|EFLG_CF;
3185 dst.bytes = 2;
3186 break;
3187 case 2:
3188 dst.val = ((uint32_t)(int16_t)src.val *
3189 (uint32_t)(int16_t)dst.val);
3190 if ( (int16_t)dst.val != (uint32_t)dst.val )
3191 _regs.eflags |= EFLG_OF|EFLG_CF;
3192 *(uint16_t *)&_regs.edx = dst.val >> 16;
3193 break;
3194 #ifdef __x86_64__
3195 case 4:
3196 dst.val = ((uint64_t)(int32_t)src.val *
3197 (uint64_t)(int32_t)dst.val);
3198 if ( (int32_t)dst.val != dst.val )
3199 _regs.eflags |= EFLG_OF|EFLG_CF;
3200 _regs.edx = (uint32_t)(dst.val >> 32);
3201 break;
3202 #endif
3203 default: {
3204 unsigned long m[2] = { src.val, dst.val };
3205 if ( imul_dbl(m) )
3206 _regs.eflags |= EFLG_OF|EFLG_CF;
3207 _regs.edx = m[1];
3208 dst.val = m[0];
3209 break;
3212 break;
3213 case 6: /* div */ {
3214 unsigned long u[2], v;
3215 src = dst;
3216 dst.type = OP_REG;
3217 dst.reg = (unsigned long *)&_regs.eax;
3218 switch ( src.bytes )
3220 case 1:
3221 u[0] = (uint16_t)_regs.eax;
3222 u[1] = 0;
3223 v = (uint8_t)src.val;
3224 generate_exception_if(
3225 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
3226 EXC_DE, -1);
3227 dst.val = (uint8_t)u[0];
3228 ((uint8_t *)&_regs.eax)[1] = u[1];
3229 break;
3230 case 2:
3231 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
3232 u[1] = 0;
3233 v = (uint16_t)src.val;
3234 generate_exception_if(
3235 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
3236 EXC_DE, -1);
3237 dst.val = (uint16_t)u[0];
3238 *(uint16_t *)&_regs.edx = u[1];
3239 break;
3240 #ifdef __x86_64__
3241 case 4:
3242 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
3243 u[1] = 0;
3244 v = (uint32_t)src.val;
3245 generate_exception_if(
3246 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
3247 EXC_DE, -1);
3248 dst.val = (uint32_t)u[0];
3249 _regs.edx = (uint32_t)u[1];
3250 break;
3251 #endif
3252 default:
3253 u[0] = _regs.eax;
3254 u[1] = _regs.edx;
3255 v = src.val;
3256 generate_exception_if(div_dbl(u, v), EXC_DE, -1);
3257 dst.val = u[0];
3258 _regs.edx = u[1];
3259 break;
3261 break;
3263 case 7: /* idiv */ {
3264 unsigned long u[2], v;
3265 src = dst;
3266 dst.type = OP_REG;
3267 dst.reg = (unsigned long *)&_regs.eax;
3268 switch ( src.bytes )
3270 case 1:
3271 u[0] = (int16_t)_regs.eax;
3272 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3273 v = (int8_t)src.val;
3274 generate_exception_if(
3275 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
3276 EXC_DE, -1);
3277 dst.val = (int8_t)u[0];
3278 ((int8_t *)&_regs.eax)[1] = u[1];
3279 break;
3280 case 2:
3281 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
3282 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3283 v = (int16_t)src.val;
3284 generate_exception_if(
3285 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
3286 EXC_DE, -1);
3287 dst.val = (int16_t)u[0];
3288 *(int16_t *)&_regs.edx = u[1];
3289 break;
3290 #ifdef __x86_64__
3291 case 4:
3292 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
3293 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
3294 v = (int32_t)src.val;
3295 generate_exception_if(
3296 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
3297 EXC_DE, -1);
3298 dst.val = (int32_t)u[0];
3299 _regs.edx = (uint32_t)u[1];
3300 break;
3301 #endif
3302 default:
3303 u[0] = _regs.eax;
3304 u[1] = _regs.edx;
3305 v = src.val;
3306 generate_exception_if(idiv_dbl(u, v), EXC_DE, -1);
3307 dst.val = u[0];
3308 _regs.edx = u[1];
3309 break;
3311 break;
3313 default:
3314 goto cannot_emulate;
3316 break;
3318 case 0xf8: /* clc */
3319 _regs.eflags &= ~EFLG_CF;
3320 break;
3322 case 0xf9: /* stc */
3323 _regs.eflags |= EFLG_CF;
3324 break;
3326 case 0xfa: /* cli */
3327 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3328 _regs.eflags &= ~EFLG_IF;
3329 break;
3331 case 0xfb: /* sti */
3332 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3333 if ( !(_regs.eflags & EFLG_IF) )
3335 _regs.eflags |= EFLG_IF;
3336 ctxt->retire.flags.sti = 1;
3338 break;
3340 case 0xfc: /* cld */
3341 _regs.eflags &= ~EFLG_DF;
3342 break;
3344 case 0xfd: /* std */
3345 _regs.eflags |= EFLG_DF;
3346 break;
3348 case 0xfe: /* Grp4 */
3349 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD, -1);
3350 case 0xff: /* Grp5 */
3351 switch ( modrm_reg & 7 )
3353 case 0: /* inc */
3354 emulate_1op("inc", dst, _regs.eflags);
3355 break;
3356 case 1: /* dec */
3357 emulate_1op("dec", dst, _regs.eflags);
3358 break;
3359 case 2: /* call (near) */
3360 case 4: /* jmp (near) */
3361 if ( (dst.bytes != 8) && mode_64bit() )
3363 dst.bytes = op_bytes = 8;
3364 if ( dst.type == OP_REG )
3365 dst.val = *dst.reg;
3366 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
3367 &dst.val, 8, ctxt, ops)) != 0 )
3368 goto done;
3370 src.val = _regs.eip;
3371 _regs.eip = dst.val;
3372 if ( (modrm_reg & 7) == 2 )
3373 goto push; /* call */
3374 dst.type = OP_NONE;
3375 break;
3376 case 3: /* call (far, absolute indirect) */
3377 case 5: /* jmp (far, absolute indirect) */ {
3378 unsigned long sel;
3380 generate_exception_if(dst.type != OP_MEM, EXC_UD, -1);
3382 if ( (rc = read_ulong(dst.mem.seg, dst.mem.off+dst.bytes,
3383 &sel, 2, ctxt, ops)) )
3384 goto done;
3386 if ( (modrm_reg & 7) == 3 ) /* call */
3388 struct segment_register reg;
3389 fail_if(ops->read_segment == NULL);
3390 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
3391 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
3392 &reg.sel, op_bytes, ctxt)) ||
3393 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
3394 &_regs.eip, op_bytes, ctxt)) )
3395 goto done;
3398 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
3399 goto done;
3400 _regs.eip = dst.val;
3402 dst.type = OP_NONE;
3403 break;
3405 case 6: /* push */
3406 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
3407 if ( mode_64bit() && (dst.bytes == 4) )
3409 dst.bytes = 8;
3410 if ( dst.type == OP_REG )
3411 dst.val = *dst.reg;
3412 else if ( (rc = read_ulong(dst.mem.seg, dst.mem.off,
3413 &dst.val, 8, ctxt, ops)) != 0 )
3414 goto done;
3416 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
3417 &dst.val, dst.bytes, ctxt)) != 0 )
3418 goto done;
3419 dst.type = OP_NONE;
3420 break;
3421 case 7:
3422 generate_exception_if(1, EXC_UD, -1);
3423 default:
3424 goto cannot_emulate;
3426 break;
3429 writeback:
3430 switch ( dst.type )
3432 case OP_REG:
3433 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
3434 switch ( dst.bytes )
3436 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
3437 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
3438 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
3439 case 8: *dst.reg = dst.val; break;
3441 break;
3442 case OP_MEM:
3443 if ( !(d & Mov) && (dst.orig_val == dst.val) &&
3444 !ctxt->force_writeback )
3445 /* nothing to do */;
3446 else if ( lock_prefix )
3447 rc = ops->cmpxchg(
3448 dst.mem.seg, dst.mem.off, &dst.orig_val,
3449 &dst.val, dst.bytes, ctxt);
3450 else
3451 rc = ops->write(
3452 dst.mem.seg, dst.mem.off, &dst.val, dst.bytes, ctxt);
3453 if ( rc != 0 )
3454 goto done;
3455 default:
3456 break;
3459 /* Inject #DB if single-step tracing was enabled at instruction start. */
3460 if ( (ctxt->regs->eflags & EFLG_TF) && (rc == X86EMUL_OKAY) &&
3461 (ops->inject_hw_exception != NULL) )
3462 rc = ops->inject_hw_exception(EXC_DB, -1, ctxt) ? : X86EMUL_EXCEPTION;
3464 /* Commit shadow register state. */
3465 _regs.eflags &= ~EFLG_RF;
3466 *ctxt->regs = _regs;
3468 done:
3469 return rc;
3471 twobyte_insn:
3472 switch ( b )
3474 case 0x00: /* Grp6 */
3475 fail_if((modrm_reg & 6) != 2);
3476 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
3477 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3478 if ( (rc = load_seg((modrm_reg & 1) ? x86_seg_tr : x86_seg_ldtr,
3479 src.val, ctxt, ops)) != 0 )
3480 goto done;
3481 break;
3483 case 0x01: /* Grp7 */ {
3484 struct segment_register reg;
3485 unsigned long base, limit, cr0, cr0w;
3487 if ( modrm == 0xdf ) /* invlpga */
3489 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
3490 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3491 fail_if(ops->invlpg == NULL);
3492 if ( (rc = ops->invlpg(x86_seg_none, truncate_ea(_regs.eax),
3493 ctxt)) )
3494 goto done;
3495 break;
3498 switch ( modrm_reg & 7 )
3500 case 0: /* sgdt */
3501 case 1: /* sidt */
3502 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3503 fail_if(ops->read_segment == NULL);
3504 if ( (rc = ops->read_segment((modrm_reg & 1) ?
3505 x86_seg_idtr : x86_seg_gdtr,
3506 &reg, ctxt)) )
3507 goto done;
3508 if ( op_bytes == 2 )
3509 reg.base &= 0xffffff;
3510 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
3511 &reg.limit, 2, ctxt)) ||
3512 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
3513 &reg.base, mode_64bit() ? 8 : 4, ctxt)) )
3514 goto done;
3515 break;
3516 case 2: /* lgdt */
3517 case 3: /* lidt */
3518 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3519 fail_if(ops->write_segment == NULL);
3520 memset(&reg, 0, sizeof(reg));
3521 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3522 &limit, 2, ctxt, ops)) ||
3523 (rc = read_ulong(ea.mem.seg, ea.mem.off+2,
3524 &base, mode_64bit() ? 8 : 4, ctxt, ops)) )
3525 goto done;
3526 reg.base = base;
3527 reg.limit = limit;
3528 if ( op_bytes == 2 )
3529 reg.base &= 0xffffff;
3530 if ( (rc = ops->write_segment((modrm_reg & 1) ?
3531 x86_seg_idtr : x86_seg_gdtr,
3532 &reg, ctxt)) )
3533 goto done;
3534 break;
3535 case 4: /* smsw */
3536 if ( ea.type == OP_MEM )
3537 ea.bytes = 2;
3538 dst = ea;
3539 fail_if(ops->read_cr == NULL);
3540 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
3541 goto done;
3542 d |= Mov; /* force writeback */
3543 break;
3544 case 6: /* lmsw */
3545 fail_if(ops->read_cr == NULL);
3546 fail_if(ops->write_cr == NULL);
3547 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
3548 goto done;
3549 if ( ea.type == OP_REG )
3550 cr0w = *ea.reg;
3551 else if ( (rc = read_ulong(ea.mem.seg, ea.mem.off,
3552 &cr0w, 2, ctxt, ops)) )
3553 goto done;
3554 /* LMSW can: (1) set bits 0-3; (2) clear bits 1-3. */
3555 cr0 = (cr0 & ~0xe) | (cr0w & 0xf);
3556 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
3557 goto done;
3558 break;
3559 case 7: /* invlpg */
3560 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3561 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3562 fail_if(ops->invlpg == NULL);
3563 if ( (rc = ops->invlpg(ea.mem.seg, ea.mem.off, ctxt)) )
3564 goto done;
3565 break;
3566 default:
3567 goto cannot_emulate;
3569 break;
3572 case 0x05: /* syscall */ {
3573 uint64_t msr_content;
3574 struct segment_register cs = { 0 }, ss = { 0 };
3575 int rc;
3577 fail_if(ops->read_msr == NULL);
3578 fail_if(ops->read_segment == NULL);
3579 fail_if(ops->write_segment == NULL);
3581 generate_exception_if(in_realmode(ctxt, ops), EXC_UD, 0);
3582 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, 0);
3583 generate_exception_if(lock_prefix, EXC_UD, 0);
3585 /* Inject #UD if syscall/sysret are disabled. */
3586 rc = ops->read_msr(MSR_EFER, &msr_content, ctxt);
3587 fail_if(rc != 0);
3588 generate_exception_if((msr_content & EFER_SCE) == 0, EXC_UD, 0);
3590 rc = ops->read_msr(MSR_STAR, &msr_content, ctxt);
3591 fail_if(rc != 0);
3593 msr_content >>= 32;
3594 cs.sel = (uint16_t)(msr_content & 0xfffc);
3595 ss.sel = (uint16_t)(msr_content + 8);
3597 cs.base = ss.base = 0; /* flat segment */
3598 cs.limit = ss.limit = ~0u; /* 4GB limit */
3599 cs.attr.bytes = 0xc9b; /* G+DB+P+S+Code */
3600 ss.attr.bytes = 0xc93; /* G+DB+P+S+Data */
3602 #ifdef __x86_64__
3603 if ( in_longmode(ctxt, ops) )
3605 cs.attr.fields.db = 0;
3606 cs.attr.fields.l = 1;
3608 _regs.rcx = _regs.rip;
3609 _regs.r11 = _regs.eflags & ~EFLG_RF;
3611 rc = ops->read_msr(mode_64bit() ? MSR_LSTAR : MSR_CSTAR,
3612 &msr_content, ctxt);
3613 fail_if(rc != 0);
3615 _regs.rip = msr_content;
3617 rc = ops->read_msr(MSR_FMASK, &msr_content, ctxt);
3618 fail_if(rc != 0);
3619 _regs.eflags &= ~(msr_content | EFLG_RF);
3621 else
3622 #endif
3624 rc = ops->read_msr(MSR_STAR, &msr_content, ctxt);
3625 fail_if(rc != 0);
3627 _regs.ecx = _regs.eip;
3628 _regs.eip = (uint32_t)msr_content;
3629 _regs.eflags &= ~(EFLG_VM | EFLG_IF | EFLG_RF);
3632 if ( (rc = ops->write_segment(x86_seg_cs, &cs, ctxt)) ||
3633 (rc = ops->write_segment(x86_seg_ss, &ss, ctxt)) )
3634 goto done;
3636 break;
3639 case 0x06: /* clts */
3640 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3641 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
3642 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
3643 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
3644 goto done;
3645 break;
3647 case 0x08: /* invd */
3648 case 0x09: /* wbinvd */
3649 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3650 fail_if(ops->wbinvd == NULL);
3651 if ( (rc = ops->wbinvd(ctxt)) != 0 )
3652 goto done;
3653 break;
3655 case 0x0d: /* GrpP (prefetch) */
3656 case 0x18: /* Grp16 (prefetch/nop) */
3657 case 0x19 ... 0x1f: /* nop (amd-defined) */
3658 break;
3660 case 0x20: /* mov cr,reg */
3661 case 0x21: /* mov dr,reg */
3662 case 0x22: /* mov reg,cr */
3663 case 0x23: /* mov reg,dr */
3664 generate_exception_if(ea.type != OP_REG, EXC_UD, -1);
3665 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3666 modrm_reg |= lock_prefix << 3;
3667 if ( b & 2 )
3669 /* Write to CR/DR. */
3670 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
3671 if ( !mode_64bit() )
3672 src.val = (uint32_t)src.val;
3673 rc = ((b & 1)
3674 ? (ops->write_dr
3675 ? ops->write_dr(modrm_reg, src.val, ctxt)
3676 : X86EMUL_UNHANDLEABLE)
3677 : (ops->write_cr
3678 ? ops->write_cr(modrm_reg, src.val, ctxt)
3679 : X86EMUL_UNHANDLEABLE));
3681 else
3683 /* Read from CR/DR. */
3684 dst.type = OP_REG;
3685 dst.bytes = mode_64bit() ? 8 : 4;
3686 dst.reg = decode_register(modrm_rm, &_regs, 0);
3687 rc = ((b & 1)
3688 ? (ops->read_dr
3689 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
3690 : X86EMUL_UNHANDLEABLE)
3691 : (ops->read_cr
3692 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
3693 : X86EMUL_UNHANDLEABLE));
3695 if ( rc != 0 )
3696 goto done;
3697 break;
3699 case 0x30: /* wrmsr */ {
3700 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
3701 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3702 fail_if(ops->write_msr == NULL);
3703 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
3704 goto done;
3705 break;
3708 case 0x31: /* rdtsc */ {
3709 unsigned long cr4;
3710 uint64_t val;
3711 fail_if(ops->read_cr == NULL);
3712 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
3713 goto done;
3714 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP, 0);
3715 fail_if(ops->read_msr == NULL);
3716 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
3717 goto done;
3718 _regs.edx = (uint32_t)(val >> 32);
3719 _regs.eax = (uint32_t)(val >> 0);
3720 break;
3723 case 0x32: /* rdmsr */ {
3724 uint64_t val;
3725 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3726 fail_if(ops->read_msr == NULL);
3727 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
3728 goto done;
3729 _regs.edx = (uint32_t)(val >> 32);
3730 _regs.eax = (uint32_t)(val >> 0);
3731 break;
3734 case 0x40 ... 0x4f: /* cmovcc */
3735 dst.val = src.val;
3736 if ( !test_cc(b, _regs.eflags) )
3737 dst.type = OP_NONE;
3738 break;
3740 case 0x34: /* sysenter */ {
3741 uint64_t msr_content;
3742 struct segment_register cs, ss;
3743 int rc;
3745 fail_if(ops->read_msr == NULL);
3746 fail_if(ops->read_segment == NULL);
3747 fail_if(ops->write_segment == NULL);
3749 generate_exception_if(mode_ring0(), EXC_GP, 0);
3750 generate_exception_if(in_realmode(ctxt, ops), EXC_GP, 0);
3751 generate_exception_if(!in_protmode(ctxt, ops), EXC_GP, 0);
3752 generate_exception_if(lock_prefix, EXC_UD, 0);
3754 rc = ops->read_msr(MSR_SYSENTER_CS, &msr_content, ctxt);
3755 fail_if(rc != 0);
3757 if ( mode_64bit() )
3758 generate_exception_if(msr_content == 0, EXC_GP, 0);
3759 else
3760 generate_exception_if((msr_content & 0xfffc) == 0, EXC_GP, 0);
3762 _regs.eflags &= ~(EFLG_VM | EFLG_IF | EFLG_RF);
3764 ops->read_segment(x86_seg_cs, &cs, ctxt);
3765 cs.sel = (uint16_t)msr_content & ~3; /* SELECTOR_RPL_MASK */
3766 cs.base = 0; /* flat segment */
3767 cs.limit = ~0u; /* 4GB limit */
3768 cs.attr.bytes = 0xc9b; /* G+DB+P+S+Code */
3770 ss.sel = cs.sel + 8;
3771 ss.base = 0; /* flat segment */
3772 ss.limit = ~0u; /* 4GB limit */
3773 ss.attr.bytes = 0xc93; /* G+DB+P+S+Data */
3775 if ( in_longmode(ctxt, ops) )
3777 cs.attr.fields.db = 0;
3778 cs.attr.fields.l = 1;
3781 rc = ops->write_segment(x86_seg_cs, &cs, ctxt);
3782 fail_if(rc != 0);
3783 rc = ops->write_segment(x86_seg_ss, &ss, ctxt);
3784 fail_if(rc != 0);
3786 rc = ops->read_msr(MSR_SYSENTER_EIP, &msr_content, ctxt);
3787 fail_if(rc != 0);
3788 _regs.eip = msr_content;
3790 rc = ops->read_msr(MSR_SYSENTER_ESP, &msr_content, ctxt);
3791 fail_if(rc != 0);
3792 _regs.esp = msr_content;
3794 break;
3797 case 0x35: /* sysexit */ {
3798 uint64_t msr_content;
3799 struct segment_register cs, ss;
3800 int user64 = !!(rex_prefix & 8); /* REX.W */
3801 int rc;
3803 fail_if(ops->read_msr == NULL);
3804 fail_if(ops->read_segment == NULL);
3805 fail_if(ops->write_segment == NULL);
3807 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3808 generate_exception_if(in_realmode(ctxt, ops), EXC_GP, 0);
3809 generate_exception_if(!in_protmode(ctxt, ops), EXC_GP, 0);
3810 generate_exception_if(lock_prefix, EXC_UD, 0);
3812 rc = ops->read_msr(MSR_SYSENTER_CS, &msr_content, ctxt);
3813 fail_if(rc != 0);
3814 rc = ops->read_segment(x86_seg_cs, &cs, ctxt);
3815 fail_if(rc != 0);
3817 if ( user64 )
3819 cs.sel = (uint16_t)(msr_content + 32);
3820 ss.sel = (cs.sel + 8);
3821 generate_exception_if(msr_content == 0, EXC_GP, 0);
3823 else
3825 cs.sel = (uint16_t)(msr_content + 16);
3826 ss.sel = (uint16_t)(msr_content + 24);
3827 generate_exception_if((msr_content & 0xfffc) == 0, EXC_GP, 0);
3830 cs.sel |= 0x3; /* SELECTOR_RPL_MASK */
3831 cs.base = 0; /* flat segment */
3832 cs.limit = ~0u; /* 4GB limit */
3833 cs.attr.bytes = 0xcfb; /* G+DB+P+DPL3+S+Code */
3835 ss.sel |= 0x3; /* SELECTOR_RPL_MASK */
3836 ss.base = 0; /* flat segment */
3837 ss.limit = ~0u; /* 4GB limit */
3838 ss.attr.bytes = 0xcf3; /* G+DB+P+DPL3+S+Data */
3840 if ( user64 )
3842 cs.attr.fields.db = 0;
3843 cs.attr.fields.l = 1;
3846 rc = ops->write_segment(x86_seg_cs, &cs, ctxt);
3847 fail_if(rc != 0);
3848 rc = ops->write_segment(x86_seg_ss, &ss, ctxt);
3849 fail_if(rc != 0);
3851 _regs.eip = _regs.edx;
3852 _regs.esp = _regs.ecx;
3853 break;
3856 case 0x6f: /* movq mm/m64,mm */ {
3857 uint8_t stub[] = { 0x0f, 0x6f, modrm, 0xc3 };
3858 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3859 uint64_t val;
3860 if ( ea.type == OP_MEM )
3862 unsigned long lval, hval;
3863 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off+0,
3864 &lval, 4, ctxt, ops)) ||
3865 (rc = read_ulong(ea.mem.seg, ea.mem.off+4,
3866 &hval, 4, ctxt, ops)) )
3867 goto done;
3868 val = ((uint64_t)hval << 32) | (uint32_t)lval;
3869 stub[2] = modrm & 0x38; /* movq (%eax),%mmN */
3871 get_fpu(X86EMUL_FPU_mmx, &fic);
3872 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3873 put_fpu(&fic);
3874 break;
3877 case 0x7f: /* movq mm,mm/m64 */ {
3878 uint8_t stub[] = { 0x0f, 0x7f, modrm, 0xc3 };
3879 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3880 uint64_t val;
3881 if ( ea.type == OP_MEM )
3882 stub[2] = modrm & 0x38; /* movq %mmN,(%eax) */
3883 get_fpu(X86EMUL_FPU_mmx, &fic);
3884 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3885 put_fpu(&fic);
3886 if ( ea.type == OP_MEM )
3888 unsigned long lval = (uint32_t)val, hval = (uint32_t)(val >> 32);
3889 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0, &lval, 4, ctxt)) ||
3890 (rc = ops->write(ea.mem.seg, ea.mem.off+4, &hval, 4, ctxt)) )
3891 goto done;
3893 break;
3896 case 0x80 ... 0x8f: /* jcc (near) */ {
3897 int rel = (((op_bytes == 2) && !mode_64bit())
3898 ? (int32_t)insn_fetch_type(int16_t)
3899 : insn_fetch_type(int32_t));
3900 if ( test_cc(b, _regs.eflags) )
3901 jmp_rel(rel);
3902 break;
3905 case 0x90 ... 0x9f: /* setcc */
3906 dst.val = test_cc(b, _regs.eflags);
3907 break;
3909 case 0xa0: /* push %%fs */
3910 src.val = x86_seg_fs;
3911 goto push_seg;
3913 case 0xa1: /* pop %%fs */
3914 src.val = x86_seg_fs;
3915 goto pop_seg;
3917 case 0xa2: /* cpuid */ {
3918 unsigned int eax = _regs.eax, ebx = _regs.ebx;
3919 unsigned int ecx = _regs.ecx, edx = _regs.edx;
3920 fail_if(ops->cpuid == NULL);
3921 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
3922 goto done;
3923 _regs.eax = eax; _regs.ebx = ebx;
3924 _regs.ecx = ecx; _regs.edx = edx;
3925 break;
3928 case 0xa8: /* push %%gs */
3929 src.val = x86_seg_gs;
3930 goto push_seg;
3932 case 0xa9: /* pop %%gs */
3933 src.val = x86_seg_gs;
3934 goto pop_seg;
3936 case 0xb0 ... 0xb1: /* cmpxchg */
3937 /* Save real source value, then compare EAX against destination. */
3938 src.orig_val = src.val;
3939 src.val = _regs.eax;
3940 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
3941 if ( _regs.eflags & EFLG_ZF )
3943 /* Success: write back to memory. */
3944 dst.val = src.orig_val;
3946 else
3948 /* Failure: write the value we saw to EAX. */
3949 dst.type = OP_REG;
3950 dst.reg = (unsigned long *)&_regs.eax;
3952 break;
3954 case 0xa3: bt: /* bt */
3955 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
3956 dst.type = OP_NONE;
3957 break;
3959 case 0xa4: /* shld imm8,r,r/m */
3960 case 0xa5: /* shld %%cl,r,r/m */
3961 case 0xac: /* shrd imm8,r,r/m */
3962 case 0xad: /* shrd %%cl,r,r/m */ {
3963 uint8_t shift, width = dst.bytes << 3;
3964 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
3965 if ( (shift &= width - 1) == 0 )
3966 break;
3967 dst.orig_val = truncate_word(dst.val, dst.bytes);
3968 dst.val = ((shift == width) ? src.val :
3969 (b & 8) ?
3970 /* shrd */
3971 ((dst.orig_val >> shift) |
3972 truncate_word(src.val << (width - shift), dst.bytes)) :
3973 /* shld */
3974 ((dst.orig_val << shift) |
3975 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
3976 dst.val = truncate_word(dst.val, dst.bytes);
3977 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
3978 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
3979 _regs.eflags |= EFLG_CF;
3980 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
3981 _regs.eflags |= EFLG_OF;
3982 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
3983 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
3984 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
3985 break;
3988 case 0xb3: btr: /* btr */
3989 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
3990 break;
3992 case 0xab: bts: /* bts */
3993 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
3994 break;
3996 case 0xaf: /* imul */
3997 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3998 switch ( dst.bytes )
4000 case 2:
4001 dst.val = ((uint32_t)(int16_t)src.val *
4002 (uint32_t)(int16_t)dst.val);
4003 if ( (int16_t)dst.val != (uint32_t)dst.val )
4004 _regs.eflags |= EFLG_OF|EFLG_CF;
4005 break;
4006 #ifdef __x86_64__
4007 case 4:
4008 dst.val = ((uint64_t)(int32_t)src.val *
4009 (uint64_t)(int32_t)dst.val);
4010 if ( (int32_t)dst.val != dst.val )
4011 _regs.eflags |= EFLG_OF|EFLG_CF;
4012 break;
4013 #endif
4014 default: {
4015 unsigned long m[2] = { src.val, dst.val };
4016 if ( imul_dbl(m) )
4017 _regs.eflags |= EFLG_OF|EFLG_CF;
4018 dst.val = m[0];
4019 break;
4022 break;
4024 case 0xb2: /* lss */
4025 dst.val = x86_seg_ss;
4026 goto les;
4028 case 0xb4: /* lfs */
4029 dst.val = x86_seg_fs;
4030 goto les;
4032 case 0xb5: /* lgs */
4033 dst.val = x86_seg_gs;
4034 goto les;
4036 case 0xb6: /* movzx rm8,r{16,32,64} */
4037 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
4038 dst.reg = decode_register(modrm_reg, &_regs, 0);
4039 dst.bytes = op_bytes;
4040 dst.val = (uint8_t)src.val;
4041 break;
4043 case 0xbc: /* bsf */ {
4044 int zf;
4045 asm ( "bsf %2,%0; setz %b1"
4046 : "=r" (dst.val), "=q" (zf)
4047 : "r" (src.val), "1" (0) );
4048 _regs.eflags &= ~EFLG_ZF;
4049 if ( zf )
4051 _regs.eflags |= EFLG_ZF;
4052 dst.type = OP_NONE;
4054 break;
4057 case 0xbd: /* bsr */ {
4058 int zf;
4059 asm ( "bsr %2,%0; setz %b1"
4060 : "=r" (dst.val), "=q" (zf)
4061 : "r" (src.val), "1" (0) );
4062 _regs.eflags &= ~EFLG_ZF;
4063 if ( zf )
4065 _regs.eflags |= EFLG_ZF;
4066 dst.type = OP_NONE;
4068 break;
4071 case 0xb7: /* movzx rm16,r{16,32,64} */
4072 dst.val = (uint16_t)src.val;
4073 break;
4075 case 0xbb: btc: /* btc */
4076 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
4077 break;
4079 case 0xba: /* Grp8 */
4080 switch ( modrm_reg & 7 )
4082 case 4: goto bt;
4083 case 5: goto bts;
4084 case 6: goto btr;
4085 case 7: goto btc;
4086 default: generate_exception_if(1, EXC_UD, -1);
4088 break;
4090 case 0xbe: /* movsx rm8,r{16,32,64} */
4091 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
4092 dst.reg = decode_register(modrm_reg, &_regs, 0);
4093 dst.bytes = op_bytes;
4094 dst.val = (int8_t)src.val;
4095 break;
4097 case 0xbf: /* movsx rm16,r{16,32,64} */
4098 dst.val = (int16_t)src.val;
4099 break;
4101 case 0xc0 ... 0xc1: /* xadd */
4102 /* Write back the register source. */
4103 switch ( dst.bytes )
4105 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
4106 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
4107 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
4108 case 8: *src.reg = dst.val; break;
4110 goto add;
4112 case 0xc3: /* movnti */
4113 /* Ignore the non-temporal hint for now. */
4114 generate_exception_if(dst.bytes <= 2, EXC_UD, -1);
4115 dst.val = src.val;
4116 break;
4118 case 0xc7: /* Grp9 (cmpxchg8b/cmpxchg16b) */ {
4119 unsigned long old[2], exp[2], new[2];
4120 unsigned int i;
4122 generate_exception_if((modrm_reg & 7) != 1, EXC_UD, -1);
4123 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
4124 op_bytes *= 2;
4126 /* Get actual old value. */
4127 for ( i = 0; i < (op_bytes/sizeof(long)); i++ )
4128 if ( (rc = read_ulong(ea.mem.seg, ea.mem.off + i*sizeof(long),
4129 &old[i], sizeof(long), ctxt, ops)) != 0 )
4130 goto done;
4132 /* Get expected and proposed values. */
4133 if ( op_bytes == 8 )
4135 ((uint32_t *)exp)[0] = _regs.eax; ((uint32_t *)exp)[1] = _regs.edx;
4136 ((uint32_t *)new)[0] = _regs.ebx; ((uint32_t *)new)[1] = _regs.ecx;
4138 else
4140 exp[0] = _regs.eax; exp[1] = _regs.edx;
4141 new[0] = _regs.ebx; new[1] = _regs.ecx;
4144 if ( memcmp(old, exp, op_bytes) )
4146 /* Expected != actual: store actual to rDX:rAX and clear ZF. */
4147 _regs.eax = (op_bytes == 8) ? ((uint32_t *)old)[0] : old[0];
4148 _regs.edx = (op_bytes == 8) ? ((uint32_t *)old)[1] : old[1];
4149 _regs.eflags &= ~EFLG_ZF;
4151 else
4153 /* Expected == actual: attempt atomic cmpxchg and set ZF. */
4154 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
4155 new, op_bytes, ctxt)) != 0 )
4156 goto done;
4157 _regs.eflags |= EFLG_ZF;
4159 break;
4162 case 0xc8 ... 0xcf: /* bswap */
4163 dst.type = OP_REG;
4164 dst.reg = decode_register(
4165 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
4166 switch ( dst.bytes = op_bytes )
4168 default: /* case 2: */
4169 /* Undefined behaviour. Writes zero on all tested CPUs. */
4170 dst.val = 0;
4171 break;
4172 case 4:
4173 #ifdef __x86_64__
4174 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
4175 break;
4176 case 8:
4177 #endif
4178 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
4179 break;
4181 break;
4183 goto writeback;
4185 cannot_emulate:
4186 return X86EMUL_UNHANDLEABLE;