ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 17269:ba3356c82317

x86_emulate: Remove the CPL and IOPL check in the I/O handling code.

The check is already carried out by the processor during VMEXIT, where
that is required.

Signed-off-by: Xu Dongxiao <dongxiao.xu@intel.com>
author Keir Fraser <keir.fraser@citrix.com>
date Wed Mar 19 14:08:02 2008 +0000 (2008-03-19)
parents f45aa9a14db4
children e7abfeee2808
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <string.h>
28 #include <public/xen.h>
29 #else
30 #include <xen/config.h>
31 #include <xen/types.h>
32 #include <xen/lib.h>
33 #include <asm/regs.h>
34 #undef cmpxchg
35 #endif
36 #include <asm-x86/x86_emulate.h>
38 /* Operand sizes: 8-bit operands or specified/overridden size. */
39 #define ByteOp (1<<0) /* 8-bit operands. */
40 /* Destination operand type. */
41 #define DstBitBase (0<<1) /* Memory operand, bit string. */
42 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
43 #define DstReg (2<<1) /* Register operand. */
44 #define DstMem (3<<1) /* Memory operand. */
45 #define DstMask (3<<1)
46 /* Source operand type. */
47 #define SrcNone (0<<3) /* No source operand. */
48 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
49 #define SrcReg (1<<3) /* Register operand. */
50 #define SrcMem (2<<3) /* Memory operand. */
51 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
52 #define SrcImm (4<<3) /* Immediate operand. */
53 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
54 #define SrcMask (7<<3)
55 /* Generic ModRM decode. */
56 #define ModRM (1<<6)
57 /* Destination is only written; never read. */
58 #define Mov (1<<7)
60 static uint8_t opcode_table[256] = {
61 /* 0x00 - 0x07 */
62 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
63 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
64 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
65 /* 0x08 - 0x0F */
66 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
67 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
68 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
69 /* 0x10 - 0x17 */
70 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
71 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
72 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
73 /* 0x18 - 0x1F */
74 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
75 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
76 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
77 /* 0x20 - 0x27 */
78 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
79 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
80 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
81 /* 0x28 - 0x2F */
82 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
83 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
84 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
85 /* 0x30 - 0x37 */
86 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
87 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
88 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
89 /* 0x38 - 0x3F */
90 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
91 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
92 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
93 /* 0x40 - 0x4F */
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
98 /* 0x50 - 0x5F */
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
103 /* 0x60 - 0x67 */
104 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
105 0, 0, 0, 0,
106 /* 0x68 - 0x6F */
107 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
108 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
109 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
110 /* 0x70 - 0x77 */
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
113 /* 0x78 - 0x7F */
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
116 /* 0x80 - 0x87 */
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
118 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
121 /* 0x88 - 0x8F */
122 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
123 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
124 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
125 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
126 /* 0x90 - 0x97 */
127 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
128 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
129 /* 0x98 - 0x9F */
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
132 /* 0xA0 - 0xA7 */
133 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
134 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
135 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
136 ByteOp|ImplicitOps, ImplicitOps,
137 /* 0xA8 - 0xAF */
138 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
139 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
140 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
141 ByteOp|ImplicitOps, ImplicitOps,
142 /* 0xB0 - 0xB7 */
143 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
144 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
145 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
146 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
147 /* 0xB8 - 0xBF */
148 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
149 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
150 /* 0xC0 - 0xC7 */
151 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
152 ImplicitOps, ImplicitOps,
153 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
154 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
155 /* 0xC8 - 0xCF */
156 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
157 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
158 /* 0xD0 - 0xD7 */
159 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
160 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
161 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
162 /* 0xD8 - 0xDF */
163 0, ImplicitOps|ModRM|Mov, 0, ImplicitOps|ModRM|Mov,
164 0, ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
165 /* 0xE0 - 0xE7 */
166 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 /* 0xE8 - 0xEF */
169 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
170 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
171 /* 0xF0 - 0xF7 */
172 0, ImplicitOps, 0, 0,
173 ImplicitOps, ImplicitOps,
174 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
175 /* 0xF8 - 0xFF */
176 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
177 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
178 };
180 static uint8_t twobyte_table[256] = {
181 /* 0x00 - 0x07 */
182 0, ImplicitOps|ModRM, 0, 0, 0, 0, ImplicitOps, 0,
183 /* 0x08 - 0x0F */
184 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
185 /* 0x10 - 0x17 */
186 0, 0, 0, 0, 0, 0, 0, 0,
187 /* 0x18 - 0x1F */
188 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
189 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
190 /* 0x20 - 0x27 */
191 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
192 0, 0, 0, 0,
193 /* 0x28 - 0x2F */
194 0, 0, 0, 0, 0, 0, 0, 0,
195 /* 0x30 - 0x37 */
196 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
197 /* 0x38 - 0x3F */
198 0, 0, 0, 0, 0, 0, 0, 0,
199 /* 0x40 - 0x47 */
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
202 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
203 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
204 /* 0x48 - 0x4F */
205 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
206 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
207 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
208 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
209 /* 0x50 - 0x5F */
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 /* 0x60 - 0x6F */
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 /* 0x70 - 0x7F */
214 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
215 /* 0x80 - 0x87 */
216 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
217 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
218 /* 0x88 - 0x8F */
219 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
220 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
221 /* 0x90 - 0x97 */
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
224 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
225 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
226 /* 0x98 - 0x9F */
227 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
228 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
229 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
230 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
231 /* 0xA0 - 0xA7 */
232 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
233 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
234 /* 0xA8 - 0xAF */
235 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
236 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
237 /* 0xB0 - 0xB7 */
238 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
239 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
240 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
241 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
242 /* 0xB8 - 0xBF */
243 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
244 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
245 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
246 /* 0xC0 - 0xC7 */
247 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
248 0, 0, 0, ImplicitOps|ModRM,
249 /* 0xC8 - 0xCF */
250 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
251 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
252 /* 0xD0 - 0xDF */
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
254 /* 0xE0 - 0xEF */
255 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
256 /* 0xF0 - 0xFF */
257 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
258 };
260 /* Type, address-of, and value of an instruction's operand. */
261 struct operand {
262 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
263 unsigned int bytes;
264 unsigned long val, orig_val;
265 union {
266 /* OP_REG: Pointer to register field. */
267 unsigned long *reg;
268 /* OP_MEM: Segment and offset. */
269 struct {
270 enum x86_segment seg;
271 unsigned long off;
272 } mem;
273 };
274 };
276 /* MSRs. */
277 #define MSR_TSC 0x10
279 /* Control register flags. */
280 #define CR0_PE (1<<0)
281 #define CR4_TSD (1<<2)
283 /* EFLAGS bit definitions. */
284 #define EFLG_VIP (1<<20)
285 #define EFLG_VIF (1<<19)
286 #define EFLG_AC (1<<18)
287 #define EFLG_VM (1<<17)
288 #define EFLG_RF (1<<16)
289 #define EFLG_NT (1<<14)
290 #define EFLG_IOPL (3<<12)
291 #define EFLG_OF (1<<11)
292 #define EFLG_DF (1<<10)
293 #define EFLG_IF (1<<9)
294 #define EFLG_TF (1<<8)
295 #define EFLG_SF (1<<7)
296 #define EFLG_ZF (1<<6)
297 #define EFLG_AF (1<<4)
298 #define EFLG_PF (1<<2)
299 #define EFLG_CF (1<<0)
301 /* Exception definitions. */
302 #define EXC_DE 0
303 #define EXC_DB 1
304 #define EXC_BP 3
305 #define EXC_OF 4
306 #define EXC_BR 5
307 #define EXC_UD 6
308 #define EXC_TS 10
309 #define EXC_NP 11
310 #define EXC_SS 12
311 #define EXC_GP 13
312 #define EXC_PF 14
313 #define EXC_MF 16
315 /*
316 * Instruction emulation:
317 * Most instructions are emulated directly via a fragment of inline assembly
318 * code. This allows us to save/restore EFLAGS and thus very easily pick up
319 * any modified flags.
320 */
322 #if defined(__x86_64__)
323 #define _LO32 "k" /* force 32-bit operand */
324 #define _STK "%%rsp" /* stack pointer */
325 #define _BYTES_PER_LONG "8"
326 #elif defined(__i386__)
327 #define _LO32 "" /* force 32-bit operand */
328 #define _STK "%%esp" /* stack pointer */
329 #define _BYTES_PER_LONG "4"
330 #endif
332 /*
333 * These EFLAGS bits are restored from saved value during emulation, and
334 * any changes are written back to the saved value after emulation.
335 */
336 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
338 /* Before executing instruction: restore necessary bits in EFLAGS. */
339 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
340 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
341 "movl %"_sav",%"_LO32 _tmp"; " \
342 "push %"_tmp"; " \
343 "push %"_tmp"; " \
344 "movl %"_msk",%"_LO32 _tmp"; " \
345 "andl %"_LO32 _tmp",("_STK"); " \
346 "pushf; " \
347 "notl %"_LO32 _tmp"; " \
348 "andl %"_LO32 _tmp",("_STK"); " \
349 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
350 "pop %"_tmp"; " \
351 "orl %"_LO32 _tmp",("_STK"); " \
352 "popf; " \
353 "pop %"_sav"; "
355 /* After executing instruction: write-back necessary bits in EFLAGS. */
356 #define _POST_EFLAGS(_sav, _msk, _tmp) \
357 /* _sav |= EFLAGS & _msk; */ \
358 "pushf; " \
359 "pop %"_tmp"; " \
360 "andl %"_msk",%"_LO32 _tmp"; " \
361 "orl %"_LO32 _tmp",%"_sav"; "
363 /* Raw emulation: instruction has two explicit operands. */
364 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
365 do{ unsigned long _tmp; \
366 switch ( (_dst).bytes ) \
367 { \
368 case 2: \
369 asm volatile ( \
370 _PRE_EFLAGS("0","4","2") \
371 _op"w %"_wx"3,%1; " \
372 _POST_EFLAGS("0","4","2") \
373 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
374 : _wy ((_src).val), "i" (EFLAGS_MASK), \
375 "m" (_eflags), "m" ((_dst).val) ); \
376 break; \
377 case 4: \
378 asm volatile ( \
379 _PRE_EFLAGS("0","4","2") \
380 _op"l %"_lx"3,%1; " \
381 _POST_EFLAGS("0","4","2") \
382 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
383 : _ly ((_src).val), "i" (EFLAGS_MASK), \
384 "m" (_eflags), "m" ((_dst).val) ); \
385 break; \
386 case 8: \
387 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
388 break; \
389 } \
390 } while (0)
391 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
392 do{ unsigned long _tmp; \
393 switch ( (_dst).bytes ) \
394 { \
395 case 1: \
396 asm volatile ( \
397 _PRE_EFLAGS("0","4","2") \
398 _op"b %"_bx"3,%1; " \
399 _POST_EFLAGS("0","4","2") \
400 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
401 : _by ((_src).val), "i" (EFLAGS_MASK), \
402 "m" (_eflags), "m" ((_dst).val) ); \
403 break; \
404 default: \
405 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
406 break; \
407 } \
408 } while (0)
409 /* Source operand is byte-sized and may be restricted to just %cl. */
410 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
411 __emulate_2op(_op, _src, _dst, _eflags, \
412 "b", "c", "b", "c", "b", "c", "b", "c")
413 /* Source operand is byte, word, long or quad sized. */
414 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
415 __emulate_2op(_op, _src, _dst, _eflags, \
416 "b", "q", "w", "r", _LO32, "r", "", "r")
417 /* Source operand is word, long or quad sized. */
418 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
419 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
420 "w", "r", _LO32, "r", "", "r")
422 /* Instruction has only one explicit operand (no source operand). */
423 #define emulate_1op(_op,_dst,_eflags) \
424 do{ unsigned long _tmp; \
425 switch ( (_dst).bytes ) \
426 { \
427 case 1: \
428 asm volatile ( \
429 _PRE_EFLAGS("0","3","2") \
430 _op"b %1; " \
431 _POST_EFLAGS("0","3","2") \
432 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
433 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
434 break; \
435 case 2: \
436 asm volatile ( \
437 _PRE_EFLAGS("0","3","2") \
438 _op"w %1; " \
439 _POST_EFLAGS("0","3","2") \
440 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
441 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
442 break; \
443 case 4: \
444 asm volatile ( \
445 _PRE_EFLAGS("0","3","2") \
446 _op"l %1; " \
447 _POST_EFLAGS("0","3","2") \
448 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
449 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
450 break; \
451 case 8: \
452 __emulate_1op_8byte(_op, _dst, _eflags); \
453 break; \
454 } \
455 } while (0)
457 /* Emulate an instruction with quadword operands (x86/64 only). */
458 #if defined(__x86_64__)
459 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
460 do{ asm volatile ( \
461 _PRE_EFLAGS("0","4","2") \
462 _op"q %"_qx"3,%1; " \
463 _POST_EFLAGS("0","4","2") \
464 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
465 : _qy ((_src).val), "i" (EFLAGS_MASK), \
466 "m" (_eflags), "m" ((_dst).val) ); \
467 } while (0)
468 #define __emulate_1op_8byte(_op, _dst, _eflags) \
469 do{ asm volatile ( \
470 _PRE_EFLAGS("0","3","2") \
471 _op"q %1; " \
472 _POST_EFLAGS("0","3","2") \
473 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
474 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
475 } while (0)
476 #elif defined(__i386__)
477 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
478 #define __emulate_1op_8byte(_op, _dst, _eflags)
479 #endif /* __i386__ */
481 #ifdef __XEN__
482 #define __emulate_fpu_insn(_op) \
483 do{ int _exn; \
484 asm volatile ( \
485 "1: " _op "\n" \
486 "2: \n" \
487 ".section .fixup,\"ax\"\n" \
488 "3: mov $1,%0\n" \
489 " jmp 2b\n" \
490 ".previous\n" \
491 ".section __ex_table,\"a\"\n" \
492 " "__FIXUP_ALIGN"\n" \
493 " "__FIXUP_WORD" 1b,3b\n" \
494 ".previous" \
495 : "=r" (_exn) : "0" (0) ); \
496 generate_exception_if(_exn, EXC_MF, -1); \
497 } while (0)
498 #else
499 #define __emulate_fpu_insn(_op) \
500 do{ rc = X86EMUL_UNHANDLEABLE; \
501 goto done; \
502 } while (0)
503 #endif
506 /* Fetch next part of the instruction being emulated. */
507 #define insn_fetch_bytes(_size) \
508 ({ unsigned long _x, _eip = _regs.eip; \
509 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
510 _regs.eip += (_size); /* real hardware doesn't truncate */ \
511 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
512 EXC_GP, 0); \
513 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
514 if ( rc ) goto done; \
515 _x; \
516 })
517 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
519 #define truncate_word(ea, byte_width) \
520 ({ unsigned long __ea = (ea); \
521 unsigned int _width = (byte_width); \
522 ((_width == sizeof(unsigned long)) ? __ea : \
523 (__ea & ((1UL << (_width << 3)) - 1))); \
524 })
525 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
527 #define mode_64bit() (def_ad_bytes == 8)
529 #define fail_if(p) \
530 do { \
531 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
532 if ( rc ) goto done; \
533 } while (0)
535 #define generate_exception_if(p, e, ec) \
536 ({ if ( (p) ) { \
537 fail_if(ops->inject_hw_exception == NULL); \
538 rc = ops->inject_hw_exception(e, ec, ctxt) ? : X86EMUL_EXCEPTION; \
539 goto done; \
540 } \
541 })
543 /*
544 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
545 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
546 */
547 static int even_parity(uint8_t v)
548 {
549 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
550 return v;
551 }
553 /* Update address held in a register, based on addressing mode. */
554 #define _register_address_increment(reg, inc, byte_width) \
555 do { \
556 int _inc = (inc); /* signed type ensures sign extension to long */ \
557 unsigned int _width = (byte_width); \
558 if ( _width == sizeof(unsigned long) ) \
559 (reg) += _inc; \
560 else if ( mode_64bit() ) \
561 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
562 else \
563 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
564 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
565 } while (0)
566 #define register_address_increment(reg, inc) \
567 _register_address_increment((reg), (inc), ad_bytes)
569 #define sp_pre_dec(dec) ({ \
570 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
571 truncate_word(_regs.esp, ctxt->sp_size/8); \
572 })
573 #define sp_post_inc(inc) ({ \
574 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
575 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
576 __esp; \
577 })
579 #define jmp_rel(rel) \
580 do { \
581 int _rel = (int)(rel); \
582 _regs.eip += _rel; \
583 if ( !mode_64bit() ) \
584 _regs.eip = ((op_bytes == 2) \
585 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
586 } while (0)
588 static unsigned long __get_rep_prefix(
589 struct cpu_user_regs *int_regs,
590 struct cpu_user_regs *ext_regs,
591 int ad_bytes)
592 {
593 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
594 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
595 int_regs->ecx);
597 /* Skip the instruction if no repetitions are required. */
598 if ( ecx == 0 )
599 ext_regs->eip = int_regs->eip;
601 return ecx;
602 }
604 #define get_rep_prefix() ({ \
605 unsigned long max_reps = 1; \
606 if ( rep_prefix ) \
607 max_reps = __get_rep_prefix(&_regs, ctxt->regs, ad_bytes); \
608 if ( max_reps == 0 ) \
609 goto done; \
610 max_reps; \
611 })
613 static void __put_rep_prefix(
614 struct cpu_user_regs *int_regs,
615 struct cpu_user_regs *ext_regs,
616 int ad_bytes,
617 unsigned long reps_completed)
618 {
619 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
620 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
621 int_regs->ecx);
623 /* Reduce counter appropriately, and repeat instruction if non-zero. */
624 ecx -= reps_completed;
625 if ( ecx != 0 )
626 int_regs->eip = ext_regs->eip;
628 if ( ad_bytes == 2 )
629 *(uint16_t *)&int_regs->ecx = ecx;
630 else if ( ad_bytes == 4 )
631 int_regs->ecx = (uint32_t)ecx;
632 else
633 int_regs->ecx = ecx;
634 }
636 #define put_rep_prefix(reps_completed) ({ \
637 if ( rep_prefix ) \
638 __put_rep_prefix(&_regs, ctxt->regs, ad_bytes, reps_completed); \
639 })
641 /*
642 * Unsigned multiplication with double-word result.
643 * IN: Multiplicand=m[0], Multiplier=m[1]
644 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
645 */
646 static int mul_dbl(unsigned long m[2])
647 {
648 int rc;
649 asm ( "mul %4; seto %b2"
650 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
651 : "0" (m[0]), "1" (m[1]), "2" (0) );
652 return rc;
653 }
655 /*
656 * Signed multiplication with double-word result.
657 * IN: Multiplicand=m[0], Multiplier=m[1]
658 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
659 */
660 static int imul_dbl(unsigned long m[2])
661 {
662 int rc;
663 asm ( "imul %4; seto %b2"
664 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
665 : "0" (m[0]), "1" (m[1]), "2" (0) );
666 return rc;
667 }
669 /*
670 * Unsigned division of double-word dividend.
671 * IN: Dividend=u[1]:u[0], Divisor=v
672 * OUT: Return 1: #DE
673 * Return 0: Quotient=u[0], Remainder=u[1]
674 */
675 static int div_dbl(unsigned long u[2], unsigned long v)
676 {
677 if ( (v == 0) || (u[1] >= v) )
678 return 1;
679 asm ( "div %4"
680 : "=a" (u[0]), "=d" (u[1])
681 : "0" (u[0]), "1" (u[1]), "r" (v) );
682 return 0;
683 }
685 /*
686 * Signed division of double-word dividend.
687 * IN: Dividend=u[1]:u[0], Divisor=v
688 * OUT: Return 1: #DE
689 * Return 0: Quotient=u[0], Remainder=u[1]
690 * NB. We don't use idiv directly as it's moderately hard to work out
691 * ahead of time whether it will #DE, which we cannot allow to happen.
692 */
693 static int idiv_dbl(unsigned long u[2], unsigned long v)
694 {
695 int negu = (long)u[1] < 0, negv = (long)v < 0;
697 /* u = abs(u) */
698 if ( negu )
699 {
700 u[1] = ~u[1];
701 if ( (u[0] = -u[0]) == 0 )
702 u[1]++;
703 }
705 /* abs(u) / abs(v) */
706 if ( div_dbl(u, negv ? -v : v) )
707 return 1;
709 /* Remainder has same sign as dividend. It cannot overflow. */
710 if ( negu )
711 u[1] = -u[1];
713 /* Quotient is overflowed if sign bit is set. */
714 if ( negu ^ negv )
715 {
716 if ( (long)u[0] >= 0 )
717 u[0] = -u[0];
718 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
719 return 1;
720 }
721 else if ( (long)u[0] < 0 )
722 return 1;
724 return 0;
725 }
727 static int
728 test_cc(
729 unsigned int condition, unsigned int flags)
730 {
731 int rc = 0;
733 switch ( (condition & 15) >> 1 )
734 {
735 case 0: /* o */
736 rc |= (flags & EFLG_OF);
737 break;
738 case 1: /* b/c/nae */
739 rc |= (flags & EFLG_CF);
740 break;
741 case 2: /* z/e */
742 rc |= (flags & EFLG_ZF);
743 break;
744 case 3: /* be/na */
745 rc |= (flags & (EFLG_CF|EFLG_ZF));
746 break;
747 case 4: /* s */
748 rc |= (flags & EFLG_SF);
749 break;
750 case 5: /* p/pe */
751 rc |= (flags & EFLG_PF);
752 break;
753 case 7: /* le/ng */
754 rc |= (flags & EFLG_ZF);
755 /* fall through */
756 case 6: /* l/nge */
757 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
758 break;
759 }
761 /* Odd condition identifiers (lsb == 1) have inverted sense. */
762 return (!!rc ^ (condition & 1));
763 }
765 static int
766 get_cpl(
767 struct x86_emulate_ctxt *ctxt,
768 struct x86_emulate_ops *ops)
769 {
770 struct segment_register reg;
772 if ( ctxt->regs->eflags & EFLG_VM )
773 return 3;
775 if ( (ops->read_segment == NULL) ||
776 ops->read_segment(x86_seg_ss, &reg, ctxt) )
777 return -1;
779 return reg.attr.fields.dpl;
780 }
782 static int
783 _mode_iopl(
784 struct x86_emulate_ctxt *ctxt,
785 struct x86_emulate_ops *ops)
786 {
787 int cpl = get_cpl(ctxt, ops);
788 if ( cpl == -1 )
789 return -1;
790 return ((cpl >= 0) && (cpl <= ((ctxt->regs->eflags >> 12) & 3)));
791 }
793 #define mode_ring0() ({ \
794 int _cpl = get_cpl(ctxt, ops); \
795 fail_if(_cpl < 0); \
796 (_cpl == 0); \
797 })
798 #define mode_iopl() ({ \
799 int _iopl = _mode_iopl(ctxt, ops); \
800 fail_if(_iopl < 0); \
801 _iopl; \
802 })
804 static int
805 in_realmode(
806 struct x86_emulate_ctxt *ctxt,
807 struct x86_emulate_ops *ops)
808 {
809 unsigned long cr0;
810 int rc;
812 if ( ops->read_cr == NULL )
813 return 0;
815 rc = ops->read_cr(0, &cr0, ctxt);
816 return (!rc && !(cr0 & CR0_PE));
817 }
819 static int
820 realmode_load_seg(
821 enum x86_segment seg,
822 uint16_t sel,
823 struct x86_emulate_ctxt *ctxt,
824 struct x86_emulate_ops *ops)
825 {
826 struct segment_register reg;
827 int rc;
829 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
830 return rc;
832 reg.sel = sel;
833 reg.base = (uint32_t)sel << 4;
835 return ops->write_segment(seg, &reg, ctxt);
836 }
838 static int
839 protmode_load_seg(
840 enum x86_segment seg,
841 uint16_t sel,
842 struct x86_emulate_ctxt *ctxt,
843 struct x86_emulate_ops *ops)
844 {
845 struct segment_register desctab, cs, segr;
846 struct { uint32_t a, b; } desc;
847 unsigned long val;
848 uint8_t dpl, rpl, cpl;
849 int rc, fault_type = EXC_TS;
851 /* NULL selector? */
852 if ( (sel & 0xfffc) == 0 )
853 {
854 if ( (seg == x86_seg_cs) || (seg == x86_seg_ss) )
855 goto raise_exn;
856 memset(&segr, 0, sizeof(segr));
857 return ops->write_segment(seg, &segr, ctxt);
858 }
860 /* LDT descriptor must be in the GDT. */
861 if ( (seg == x86_seg_ldtr) && (sel & 4) )
862 goto raise_exn;
864 if ( (rc = ops->read_segment(x86_seg_cs, &cs, ctxt)) ||
865 (rc = ops->read_segment((sel & 4) ? x86_seg_ldtr : x86_seg_gdtr,
866 &desctab, ctxt)) )
867 return rc;
869 /* Check against descriptor table limit. */
870 if ( ((sel & 0xfff8) + 7) > desctab.limit )
871 goto raise_exn;
873 do {
874 if ( (rc = ops->read(x86_seg_none, desctab.base + (sel & 0xfff8),
875 &val, 4, ctxt)) )
876 return rc;
877 desc.a = val;
878 if ( (rc = ops->read(x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
879 &val, 4, ctxt)) )
880 return rc;
881 desc.b = val;
883 /* Segment present in memory? */
884 if ( !(desc.b & (1u<<15)) )
885 {
886 fault_type = EXC_NP;
887 goto raise_exn;
888 }
890 /* LDT descriptor is a system segment. All others are code/data. */
891 if ( (desc.b & (1u<<12)) == ((seg == x86_seg_ldtr) << 12) )
892 goto raise_exn;
894 dpl = (desc.b >> 13) & 3;
895 rpl = sel & 3;
896 cpl = cs.sel & 3;
898 switch ( seg )
899 {
900 case x86_seg_cs:
901 /* Code segment? */
902 if ( !(desc.b & (1u<<11)) )
903 goto raise_exn;
904 /* Non-conforming segment: check DPL against RPL. */
905 if ( ((desc.b & (6u<<9)) != 6) && (dpl != rpl) )
906 goto raise_exn;
907 break;
908 case x86_seg_ss:
909 /* Writable data segment? */
910 if ( (desc.b & (5u<<9)) != (1u<<9) )
911 goto raise_exn;
912 if ( (dpl != cpl) || (dpl != rpl) )
913 goto raise_exn;
914 break;
915 case x86_seg_ldtr:
916 /* LDT system segment? */
917 if ( (desc.b & (15u<<8)) != (2u<<8) )
918 goto raise_exn;
919 goto skip_accessed_flag;
920 default:
921 /* Readable code or data segment? */
922 if ( (desc.b & (5u<<9)) == (4u<<9) )
923 goto raise_exn;
924 /* Non-conforming segment: check DPL against RPL and CPL. */
925 if ( ((desc.b & (6u<<9)) != 6) && ((dpl < cpl) || (dpl < rpl)) )
926 goto raise_exn;
927 break;
928 }
930 /* Ensure Accessed flag is set. */
931 rc = ((desc.b & 0x100) ? X86EMUL_OKAY :
932 ops->cmpxchg(
933 x86_seg_none, desctab.base + (sel & 0xfff8) + 4, desc.b,
934 desc.b | 0x100, 4, ctxt));
935 } while ( rc == X86EMUL_CMPXCHG_FAILED );
937 if ( rc )
938 return rc;
940 /* Force the Accessed flag in our local copy. */
941 desc.b |= 0x100;
943 skip_accessed_flag:
944 segr.base = (((desc.b << 0) & 0xff000000u) |
945 ((desc.b << 16) & 0x00ff0000u) |
946 ((desc.a >> 16) & 0x0000ffffu));
947 segr.attr.bytes = (((desc.b >> 8) & 0x00ffu) |
948 ((desc.b >> 12) & 0x0f00u));
949 segr.limit = (desc.b & 0x000f0000u) | (desc.a & 0x0000ffffu);
950 if ( segr.attr.fields.g )
951 segr.limit = (segr.limit << 12) | 0xfffu;
952 segr.sel = sel;
953 return ops->write_segment(seg, &segr, ctxt);
955 raise_exn:
956 if ( ops->inject_hw_exception == NULL )
957 return X86EMUL_UNHANDLEABLE;
958 if ( (rc = ops->inject_hw_exception(fault_type, sel & 0xfffc, ctxt)) )
959 return rc;
960 return X86EMUL_EXCEPTION;
961 }
963 static int
964 load_seg(
965 enum x86_segment seg,
966 uint16_t sel,
967 struct x86_emulate_ctxt *ctxt,
968 struct x86_emulate_ops *ops)
969 {
970 if ( (ops->read_segment == NULL) ||
971 (ops->write_segment == NULL) )
972 return X86EMUL_UNHANDLEABLE;
974 if ( in_realmode(ctxt, ops) )
975 return realmode_load_seg(seg, sel, ctxt, ops);
977 return protmode_load_seg(seg, sel, ctxt, ops);
978 }
980 void *
981 decode_register(
982 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
983 {
984 void *p;
986 switch ( modrm_reg )
987 {
988 case 0: p = &regs->eax; break;
989 case 1: p = &regs->ecx; break;
990 case 2: p = &regs->edx; break;
991 case 3: p = &regs->ebx; break;
992 case 4: p = (highbyte_regs ?
993 ((unsigned char *)&regs->eax + 1) :
994 (unsigned char *)&regs->esp); break;
995 case 5: p = (highbyte_regs ?
996 ((unsigned char *)&regs->ecx + 1) :
997 (unsigned char *)&regs->ebp); break;
998 case 6: p = (highbyte_regs ?
999 ((unsigned char *)&regs->edx + 1) :
1000 (unsigned char *)&regs->esi); break;
1001 case 7: p = (highbyte_regs ?
1002 ((unsigned char *)&regs->ebx + 1) :
1003 (unsigned char *)&regs->edi); break;
1004 #if defined(__x86_64__)
1005 case 8: p = &regs->r8; break;
1006 case 9: p = &regs->r9; break;
1007 case 10: p = &regs->r10; break;
1008 case 11: p = &regs->r11; break;
1009 case 12: p = &regs->r12; break;
1010 case 13: p = &regs->r13; break;
1011 case 14: p = &regs->r14; break;
1012 case 15: p = &regs->r15; break;
1013 #endif
1014 default: p = NULL; break;
1017 return p;
1020 #define decode_segment_failed x86_seg_tr
1021 enum x86_segment
1022 decode_segment(
1023 uint8_t modrm_reg)
1025 switch ( modrm_reg )
1027 case 0: return x86_seg_es;
1028 case 1: return x86_seg_cs;
1029 case 2: return x86_seg_ss;
1030 case 3: return x86_seg_ds;
1031 case 4: return x86_seg_fs;
1032 case 5: return x86_seg_gs;
1033 default: break;
1035 return decode_segment_failed;
1038 int
1039 x86_emulate(
1040 struct x86_emulate_ctxt *ctxt,
1041 struct x86_emulate_ops *ops)
1043 /* Shadow copy of register state. Committed on successful emulation. */
1044 struct cpu_user_regs _regs = *ctxt->regs;
1046 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
1047 uint8_t modrm = 0, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
1048 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
1049 #define REPE_PREFIX 1
1050 #define REPNE_PREFIX 2
1051 unsigned int lock_prefix = 0, rep_prefix = 0;
1052 int override_seg = -1, rc = X86EMUL_OKAY;
1053 struct operand src, dst;
1055 /* Data operand effective address (usually computed from ModRM). */
1056 struct operand ea;
1058 /* Default is a memory operand relative to segment DS. */
1059 ea.type = OP_MEM;
1060 ea.mem.seg = x86_seg_ds;
1061 ea.mem.off = 0;
1063 ctxt->retire.byte = 0;
1065 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
1066 if ( op_bytes == 8 )
1068 op_bytes = def_op_bytes = 4;
1069 #ifndef __x86_64__
1070 return X86EMUL_UNHANDLEABLE;
1071 #endif
1074 /* Prefix bytes. */
1075 for ( ; ; )
1077 switch ( b = insn_fetch_type(uint8_t) )
1079 case 0x66: /* operand-size override */
1080 op_bytes = def_op_bytes ^ 6;
1081 break;
1082 case 0x67: /* address-size override */
1083 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
1084 break;
1085 case 0x2e: /* CS override */
1086 override_seg = x86_seg_cs;
1087 break;
1088 case 0x3e: /* DS override */
1089 override_seg = x86_seg_ds;
1090 break;
1091 case 0x26: /* ES override */
1092 override_seg = x86_seg_es;
1093 break;
1094 case 0x64: /* FS override */
1095 override_seg = x86_seg_fs;
1096 break;
1097 case 0x65: /* GS override */
1098 override_seg = x86_seg_gs;
1099 break;
1100 case 0x36: /* SS override */
1101 override_seg = x86_seg_ss;
1102 break;
1103 case 0xf0: /* LOCK */
1104 lock_prefix = 1;
1105 break;
1106 case 0xf2: /* REPNE/REPNZ */
1107 rep_prefix = REPNE_PREFIX;
1108 break;
1109 case 0xf3: /* REP/REPE/REPZ */
1110 rep_prefix = REPE_PREFIX;
1111 break;
1112 case 0x40 ... 0x4f: /* REX */
1113 if ( !mode_64bit() )
1114 goto done_prefixes;
1115 rex_prefix = b;
1116 continue;
1117 default:
1118 goto done_prefixes;
1121 /* Any legacy prefix after a REX prefix nullifies its effect. */
1122 rex_prefix = 0;
1124 done_prefixes:
1126 if ( rex_prefix & 8 ) /* REX.W */
1127 op_bytes = 8;
1129 /* Opcode byte(s). */
1130 d = opcode_table[b];
1131 if ( d == 0 )
1133 /* Two-byte opcode? */
1134 if ( b == 0x0f )
1136 twobyte = 1;
1137 b = insn_fetch_type(uint8_t);
1138 d = twobyte_table[b];
1141 /* Unrecognised? */
1142 if ( d == 0 )
1143 goto cannot_emulate;
1146 /* Lock prefix is allowed only on RMW instructions. */
1147 generate_exception_if((d & Mov) && lock_prefix, EXC_GP, 0);
1149 /* ModRM and SIB bytes. */
1150 if ( d & ModRM )
1152 modrm = insn_fetch_type(uint8_t);
1153 modrm_mod = (modrm & 0xc0) >> 6;
1154 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
1155 modrm_rm = modrm & 0x07;
1157 if ( modrm_mod == 3 )
1159 modrm_rm |= (rex_prefix & 1) << 3;
1160 ea.type = OP_REG;
1161 ea.reg = decode_register(
1162 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
1164 else if ( ad_bytes == 2 )
1166 /* 16-bit ModR/M decode. */
1167 switch ( modrm_rm )
1169 case 0:
1170 ea.mem.off = _regs.ebx + _regs.esi;
1171 break;
1172 case 1:
1173 ea.mem.off = _regs.ebx + _regs.edi;
1174 break;
1175 case 2:
1176 ea.mem.seg = x86_seg_ss;
1177 ea.mem.off = _regs.ebp + _regs.esi;
1178 break;
1179 case 3:
1180 ea.mem.seg = x86_seg_ss;
1181 ea.mem.off = _regs.ebp + _regs.edi;
1182 break;
1183 case 4:
1184 ea.mem.off = _regs.esi;
1185 break;
1186 case 5:
1187 ea.mem.off = _regs.edi;
1188 break;
1189 case 6:
1190 if ( modrm_mod == 0 )
1191 break;
1192 ea.mem.seg = x86_seg_ss;
1193 ea.mem.off = _regs.ebp;
1194 break;
1195 case 7:
1196 ea.mem.off = _regs.ebx;
1197 break;
1199 switch ( modrm_mod )
1201 case 0:
1202 if ( modrm_rm == 6 )
1203 ea.mem.off = insn_fetch_type(int16_t);
1204 break;
1205 case 1:
1206 ea.mem.off += insn_fetch_type(int8_t);
1207 break;
1208 case 2:
1209 ea.mem.off += insn_fetch_type(int16_t);
1210 break;
1212 ea.mem.off = truncate_ea(ea.mem.off);
1214 else
1216 /* 32/64-bit ModR/M decode. */
1217 if ( modrm_rm == 4 )
1219 sib = insn_fetch_type(uint8_t);
1220 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1221 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1222 if ( sib_index != 4 )
1223 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1224 ea.mem.off <<= (sib >> 6) & 3;
1225 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1226 ea.mem.off += insn_fetch_type(int32_t);
1227 else if ( sib_base == 4 )
1229 ea.mem.seg = x86_seg_ss;
1230 ea.mem.off += _regs.esp;
1231 if ( !twobyte && (b == 0x8f) )
1232 /* POP <rm> computes its EA post increment. */
1233 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1234 ? 8 : op_bytes);
1236 else if ( sib_base == 5 )
1238 ea.mem.seg = x86_seg_ss;
1239 ea.mem.off += _regs.ebp;
1241 else
1242 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1244 else
1246 modrm_rm |= (rex_prefix & 1) << 3;
1247 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1248 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1249 ea.mem.seg = x86_seg_ss;
1251 switch ( modrm_mod )
1253 case 0:
1254 if ( (modrm_rm & 7) != 5 )
1255 break;
1256 ea.mem.off = insn_fetch_type(int32_t);
1257 if ( !mode_64bit() )
1258 break;
1259 /* Relative to RIP of next instruction. Argh! */
1260 ea.mem.off += _regs.eip;
1261 if ( (d & SrcMask) == SrcImm )
1262 ea.mem.off += (d & ByteOp) ? 1 :
1263 ((op_bytes == 8) ? 4 : op_bytes);
1264 else if ( (d & SrcMask) == SrcImmByte )
1265 ea.mem.off += 1;
1266 else if ( !twobyte && ((b & 0xfe) == 0xf6) &&
1267 ((modrm_reg & 7) <= 1) )
1268 /* Special case in Grp3: test has immediate operand. */
1269 ea.mem.off += (d & ByteOp) ? 1
1270 : ((op_bytes == 8) ? 4 : op_bytes);
1271 else if ( twobyte && ((b & 0xf7) == 0xa4) )
1272 /* SHLD/SHRD with immediate byte third operand. */
1273 ea.mem.off++;
1274 break;
1275 case 1:
1276 ea.mem.off += insn_fetch_type(int8_t);
1277 break;
1278 case 2:
1279 ea.mem.off += insn_fetch_type(int32_t);
1280 break;
1282 ea.mem.off = truncate_ea(ea.mem.off);
1286 if ( override_seg != -1 )
1287 ea.mem.seg = override_seg;
1289 /* Special instructions do their own operand decoding. */
1290 if ( (d & DstMask) == ImplicitOps )
1291 goto special_insn;
1293 /* Decode and fetch the source operand: register, memory or immediate. */
1294 switch ( d & SrcMask )
1296 case SrcNone:
1297 break;
1298 case SrcReg:
1299 src.type = OP_REG;
1300 if ( d & ByteOp )
1302 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1303 src.val = *(uint8_t *)src.reg;
1304 src.bytes = 1;
1306 else
1308 src.reg = decode_register(modrm_reg, &_regs, 0);
1309 switch ( (src.bytes = op_bytes) )
1311 case 2: src.val = *(uint16_t *)src.reg; break;
1312 case 4: src.val = *(uint32_t *)src.reg; break;
1313 case 8: src.val = *(uint64_t *)src.reg; break;
1316 break;
1317 case SrcMem16:
1318 ea.bytes = 2;
1319 goto srcmem_common;
1320 case SrcMem:
1321 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1322 srcmem_common:
1323 src = ea;
1324 if ( src.type == OP_REG )
1326 switch ( src.bytes )
1328 case 1: src.val = *(uint8_t *)src.reg; break;
1329 case 2: src.val = *(uint16_t *)src.reg; break;
1330 case 4: src.val = *(uint32_t *)src.reg; break;
1331 case 8: src.val = *(uint64_t *)src.reg; break;
1334 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1335 &src.val, src.bytes, ctxt)) )
1336 goto done;
1337 break;
1338 case SrcImm:
1339 src.type = OP_IMM;
1340 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1341 if ( src.bytes == 8 ) src.bytes = 4;
1342 /* NB. Immediates are sign-extended as necessary. */
1343 switch ( src.bytes )
1345 case 1: src.val = insn_fetch_type(int8_t); break;
1346 case 2: src.val = insn_fetch_type(int16_t); break;
1347 case 4: src.val = insn_fetch_type(int32_t); break;
1349 break;
1350 case SrcImmByte:
1351 src.type = OP_IMM;
1352 src.bytes = 1;
1353 src.val = insn_fetch_type(int8_t);
1354 break;
1357 /* Decode and fetch the destination operand: register or memory. */
1358 switch ( d & DstMask )
1360 case DstReg:
1361 dst.type = OP_REG;
1362 if ( d & ByteOp )
1364 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1365 dst.val = *(uint8_t *)dst.reg;
1366 dst.bytes = 1;
1368 else
1370 dst.reg = decode_register(modrm_reg, &_regs, 0);
1371 switch ( (dst.bytes = op_bytes) )
1373 case 2: dst.val = *(uint16_t *)dst.reg; break;
1374 case 4: dst.val = *(uint32_t *)dst.reg; break;
1375 case 8: dst.val = *(uint64_t *)dst.reg; break;
1378 break;
1379 case DstBitBase:
1380 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1382 src.val &= (op_bytes << 3) - 1;
1384 else
1386 /*
1387 * EA += BitOffset DIV op_bytes*8
1388 * BitOffset = BitOffset MOD op_bytes*8
1389 * DIV truncates towards negative infinity.
1390 * MOD always produces a positive result.
1391 */
1392 if ( op_bytes == 2 )
1393 src.val = (int16_t)src.val;
1394 else if ( op_bytes == 4 )
1395 src.val = (int32_t)src.val;
1396 if ( (long)src.val < 0 )
1398 unsigned long byte_offset;
1399 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1400 ea.mem.off -= byte_offset;
1401 src.val = (byte_offset << 3) + src.val;
1403 else
1405 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1406 src.val &= (op_bytes << 3) - 1;
1409 /* Becomes a normal DstMem operation from here on. */
1410 d = (d & ~DstMask) | DstMem;
1411 case DstMem:
1412 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1413 dst = ea;
1414 if ( dst.type == OP_REG )
1416 switch ( dst.bytes )
1418 case 1: dst.val = *(uint8_t *)dst.reg; break;
1419 case 2: dst.val = *(uint16_t *)dst.reg; break;
1420 case 4: dst.val = *(uint32_t *)dst.reg; break;
1421 case 8: dst.val = *(uint64_t *)dst.reg; break;
1424 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1426 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1427 &dst.val, dst.bytes, ctxt)) )
1428 goto done;
1429 dst.orig_val = dst.val;
1431 break;
1434 /* LOCK prefix allowed only on instructions with memory destination. */
1435 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP, 0);
1437 if ( twobyte )
1438 goto twobyte_insn;
1440 switch ( b )
1442 case 0x04 ... 0x05: /* add imm,%%eax */
1443 dst.reg = (unsigned long *)&_regs.eax;
1444 dst.val = _regs.eax;
1445 case 0x00 ... 0x03: add: /* add */
1446 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1447 break;
1449 case 0x0c ... 0x0d: /* or imm,%%eax */
1450 dst.reg = (unsigned long *)&_regs.eax;
1451 dst.val = _regs.eax;
1452 case 0x08 ... 0x0b: or: /* or */
1453 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1454 break;
1456 case 0x14 ... 0x15: /* adc imm,%%eax */
1457 dst.reg = (unsigned long *)&_regs.eax;
1458 dst.val = _regs.eax;
1459 case 0x10 ... 0x13: adc: /* adc */
1460 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1461 break;
1463 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1464 dst.reg = (unsigned long *)&_regs.eax;
1465 dst.val = _regs.eax;
1466 case 0x18 ... 0x1b: sbb: /* sbb */
1467 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1468 break;
1470 case 0x24 ... 0x25: /* and imm,%%eax */
1471 dst.reg = (unsigned long *)&_regs.eax;
1472 dst.val = _regs.eax;
1473 case 0x20 ... 0x23: and: /* and */
1474 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1475 break;
1477 case 0x2c ... 0x2d: /* sub imm,%%eax */
1478 dst.reg = (unsigned long *)&_regs.eax;
1479 dst.val = _regs.eax;
1480 case 0x28 ... 0x2b: sub: /* sub */
1481 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1482 break;
1484 case 0x34 ... 0x35: /* xor imm,%%eax */
1485 dst.reg = (unsigned long *)&_regs.eax;
1486 dst.val = _regs.eax;
1487 case 0x30 ... 0x33: xor: /* xor */
1488 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1489 break;
1491 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1492 dst.reg = (unsigned long *)&_regs.eax;
1493 dst.val = _regs.eax;
1494 case 0x38 ... 0x3b: cmp: /* cmp */
1495 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1496 break;
1498 case 0x62: /* bound */ {
1499 unsigned long src_val2;
1500 int lb, ub, idx;
1501 generate_exception_if(mode_64bit() || (src.type != OP_MEM),
1502 EXC_UD, -1);
1503 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1504 &src_val2, op_bytes, ctxt)) )
1505 goto done;
1506 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1507 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1508 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1509 generate_exception_if((idx < lb) || (idx > ub), EXC_BR, -1);
1510 dst.type = OP_NONE;
1511 break;
1514 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1515 if ( mode_64bit() )
1517 /* movsxd */
1518 if ( src.type == OP_REG )
1519 src.val = *(int32_t *)src.reg;
1520 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1521 &src.val, 4, ctxt)) )
1522 goto done;
1523 dst.val = (int32_t)src.val;
1525 else
1527 /* arpl */
1528 uint16_t src_val = dst.val;
1529 dst = src;
1530 _regs.eflags &= ~EFLG_ZF;
1531 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1532 if ( _regs.eflags & EFLG_ZF )
1533 dst.val = (dst.val & ~3) | (src_val & 3);
1534 else
1535 dst.type = OP_NONE;
1536 generate_exception_if(in_realmode(ctxt, ops), EXC_UD, -1);
1538 break;
1540 case 0x69: /* imul imm16/32 */
1541 case 0x6b: /* imul imm8 */ {
1542 unsigned long src1; /* ModR/M source operand */
1543 if ( ea.type == OP_REG )
1544 src1 = *ea.reg;
1545 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
1546 &src1, op_bytes, ctxt)) )
1547 goto done;
1548 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1549 switch ( dst.bytes )
1551 case 2:
1552 dst.val = ((uint32_t)(int16_t)src.val *
1553 (uint32_t)(int16_t)src1);
1554 if ( (int16_t)dst.val != (uint32_t)dst.val )
1555 _regs.eflags |= EFLG_OF|EFLG_CF;
1556 break;
1557 #ifdef __x86_64__
1558 case 4:
1559 dst.val = ((uint64_t)(int32_t)src.val *
1560 (uint64_t)(int32_t)src1);
1561 if ( (int32_t)dst.val != dst.val )
1562 _regs.eflags |= EFLG_OF|EFLG_CF;
1563 break;
1564 #endif
1565 default: {
1566 unsigned long m[2] = { src.val, src1 };
1567 if ( imul_dbl(m) )
1568 _regs.eflags |= EFLG_OF|EFLG_CF;
1569 dst.val = m[0];
1570 break;
1573 break;
1576 case 0x82: /* Grp1 (x86/32 only) */
1577 generate_exception_if(mode_64bit(), EXC_UD, -1);
1578 case 0x80: case 0x81: case 0x83: /* Grp1 */
1579 switch ( modrm_reg & 7 )
1581 case 0: goto add;
1582 case 1: goto or;
1583 case 2: goto adc;
1584 case 3: goto sbb;
1585 case 4: goto and;
1586 case 5: goto sub;
1587 case 6: goto xor;
1588 case 7: goto cmp;
1590 break;
1592 case 0xa8 ... 0xa9: /* test imm,%%eax */
1593 dst.reg = (unsigned long *)&_regs.eax;
1594 dst.val = _regs.eax;
1595 case 0x84 ... 0x85: test: /* test */
1596 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1597 break;
1599 case 0x86 ... 0x87: xchg: /* xchg */
1600 /* Write back the register source. */
1601 switch ( dst.bytes )
1603 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1604 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1605 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1606 case 8: *src.reg = dst.val; break;
1608 /* Write back the memory destination with implicit LOCK prefix. */
1609 dst.val = src.val;
1610 lock_prefix = 1;
1611 break;
1613 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1614 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
1615 case 0x88 ... 0x8b: /* mov */
1616 dst.val = src.val;
1617 break;
1619 case 0x8c: /* mov Sreg,r/m */ {
1620 struct segment_register reg;
1621 enum x86_segment seg = decode_segment(modrm_reg);
1622 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
1623 fail_if(ops->read_segment == NULL);
1624 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
1625 goto done;
1626 dst.val = reg.sel;
1627 if ( dst.type == OP_MEM )
1628 dst.bytes = 2;
1629 break;
1632 case 0x8e: /* mov r/m,Sreg */ {
1633 enum x86_segment seg = decode_segment(modrm_reg);
1634 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
1635 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
1636 goto done;
1637 if ( seg == x86_seg_ss )
1638 ctxt->retire.flags.mov_ss = 1;
1639 dst.type = OP_NONE;
1640 break;
1643 case 0x8d: /* lea */
1644 dst.val = ea.mem.off;
1645 break;
1647 case 0x8f: /* pop (sole member of Grp1a) */
1648 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
1649 /* 64-bit mode: POP defaults to a 64-bit operand. */
1650 if ( mode_64bit() && (dst.bytes == 4) )
1651 dst.bytes = 8;
1652 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1653 &dst.val, dst.bytes, ctxt)) != 0 )
1654 goto done;
1655 break;
1657 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1658 dst.reg = decode_register(
1659 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1660 dst.val = src.val;
1661 break;
1663 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1664 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1665 src.val = ((uint32_t)src.val |
1666 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1667 dst.reg = decode_register(
1668 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1669 dst.val = src.val;
1670 break;
1672 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1673 switch ( modrm_reg & 7 )
1675 case 0: /* rol */
1676 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1677 break;
1678 case 1: /* ror */
1679 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1680 break;
1681 case 2: /* rcl */
1682 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1683 break;
1684 case 3: /* rcr */
1685 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1686 break;
1687 case 4: /* sal/shl */
1688 case 6: /* sal/shl */
1689 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1690 break;
1691 case 5: /* shr */
1692 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1693 break;
1694 case 7: /* sar */
1695 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1696 break;
1698 break;
1700 case 0xc4: /* les */ {
1701 unsigned long sel;
1702 dst.val = x86_seg_es;
1703 les: /* dst.val identifies the segment */
1704 generate_exception_if(src.type != OP_MEM, EXC_UD, -1);
1705 if ( (rc = ops->read(src.mem.seg, src.mem.off + src.bytes,
1706 &sel, 2, ctxt)) != 0 )
1707 goto done;
1708 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
1709 goto done;
1710 dst.val = src.val;
1711 break;
1714 case 0xc5: /* lds */
1715 dst.val = x86_seg_ds;
1716 goto les;
1718 case 0xd0 ... 0xd1: /* Grp2 */
1719 src.val = 1;
1720 goto grp2;
1722 case 0xd2 ... 0xd3: /* Grp2 */
1723 src.val = _regs.ecx;
1724 goto grp2;
1726 case 0xf6 ... 0xf7: /* Grp3 */
1727 switch ( modrm_reg & 7 )
1729 case 0 ... 1: /* test */
1730 /* Special case in Grp3: test has an immediate source operand. */
1731 src.type = OP_IMM;
1732 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1733 if ( src.bytes == 8 ) src.bytes = 4;
1734 switch ( src.bytes )
1736 case 1: src.val = insn_fetch_type(int8_t); break;
1737 case 2: src.val = insn_fetch_type(int16_t); break;
1738 case 4: src.val = insn_fetch_type(int32_t); break;
1740 goto test;
1741 case 2: /* not */
1742 dst.val = ~dst.val;
1743 break;
1744 case 3: /* neg */
1745 emulate_1op("neg", dst, _regs.eflags);
1746 break;
1747 case 4: /* mul */
1748 src = dst;
1749 dst.type = OP_REG;
1750 dst.reg = (unsigned long *)&_regs.eax;
1751 dst.val = *dst.reg;
1752 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1753 switch ( src.bytes )
1755 case 1:
1756 dst.val = (uint8_t)dst.val;
1757 dst.val *= src.val;
1758 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1759 _regs.eflags |= EFLG_OF|EFLG_CF;
1760 dst.bytes = 2;
1761 break;
1762 case 2:
1763 dst.val = (uint16_t)dst.val;
1764 dst.val *= src.val;
1765 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1766 _regs.eflags |= EFLG_OF|EFLG_CF;
1767 *(uint16_t *)&_regs.edx = dst.val >> 16;
1768 break;
1769 #ifdef __x86_64__
1770 case 4:
1771 dst.val = (uint32_t)dst.val;
1772 dst.val *= src.val;
1773 if ( (uint32_t)dst.val != dst.val )
1774 _regs.eflags |= EFLG_OF|EFLG_CF;
1775 _regs.edx = (uint32_t)(dst.val >> 32);
1776 break;
1777 #endif
1778 default: {
1779 unsigned long m[2] = { src.val, dst.val };
1780 if ( mul_dbl(m) )
1781 _regs.eflags |= EFLG_OF|EFLG_CF;
1782 _regs.edx = m[1];
1783 dst.val = m[0];
1784 break;
1787 break;
1788 case 5: /* imul */
1789 src = dst;
1790 dst.type = OP_REG;
1791 dst.reg = (unsigned long *)&_regs.eax;
1792 dst.val = *dst.reg;
1793 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1794 switch ( src.bytes )
1796 case 1:
1797 dst.val = ((uint16_t)(int8_t)src.val *
1798 (uint16_t)(int8_t)dst.val);
1799 if ( (int8_t)dst.val != (uint16_t)dst.val )
1800 _regs.eflags |= EFLG_OF|EFLG_CF;
1801 dst.bytes = 2;
1802 break;
1803 case 2:
1804 dst.val = ((uint32_t)(int16_t)src.val *
1805 (uint32_t)(int16_t)dst.val);
1806 if ( (int16_t)dst.val != (uint32_t)dst.val )
1807 _regs.eflags |= EFLG_OF|EFLG_CF;
1808 *(uint16_t *)&_regs.edx = dst.val >> 16;
1809 break;
1810 #ifdef __x86_64__
1811 case 4:
1812 dst.val = ((uint64_t)(int32_t)src.val *
1813 (uint64_t)(int32_t)dst.val);
1814 if ( (int32_t)dst.val != dst.val )
1815 _regs.eflags |= EFLG_OF|EFLG_CF;
1816 _regs.edx = (uint32_t)(dst.val >> 32);
1817 break;
1818 #endif
1819 default: {
1820 unsigned long m[2] = { src.val, dst.val };
1821 if ( imul_dbl(m) )
1822 _regs.eflags |= EFLG_OF|EFLG_CF;
1823 _regs.edx = m[1];
1824 dst.val = m[0];
1825 break;
1828 break;
1829 case 6: /* div */ {
1830 unsigned long u[2], v;
1831 src = dst;
1832 dst.type = OP_REG;
1833 dst.reg = (unsigned long *)&_regs.eax;
1834 switch ( src.bytes )
1836 case 1:
1837 u[0] = (uint16_t)_regs.eax;
1838 u[1] = 0;
1839 v = (uint8_t)src.val;
1840 generate_exception_if(
1841 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1842 EXC_DE, -1);
1843 dst.val = (uint8_t)u[0];
1844 ((uint8_t *)&_regs.eax)[1] = u[1];
1845 break;
1846 case 2:
1847 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1848 u[1] = 0;
1849 v = (uint16_t)src.val;
1850 generate_exception_if(
1851 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1852 EXC_DE, -1);
1853 dst.val = (uint16_t)u[0];
1854 *(uint16_t *)&_regs.edx = u[1];
1855 break;
1856 #ifdef __x86_64__
1857 case 4:
1858 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1859 u[1] = 0;
1860 v = (uint32_t)src.val;
1861 generate_exception_if(
1862 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1863 EXC_DE, -1);
1864 dst.val = (uint32_t)u[0];
1865 _regs.edx = (uint32_t)u[1];
1866 break;
1867 #endif
1868 default:
1869 u[0] = _regs.eax;
1870 u[1] = _regs.edx;
1871 v = src.val;
1872 generate_exception_if(div_dbl(u, v), EXC_DE, -1);
1873 dst.val = u[0];
1874 _regs.edx = u[1];
1875 break;
1877 break;
1879 case 7: /* idiv */ {
1880 unsigned long u[2], v;
1881 src = dst;
1882 dst.type = OP_REG;
1883 dst.reg = (unsigned long *)&_regs.eax;
1884 switch ( src.bytes )
1886 case 1:
1887 u[0] = (int16_t)_regs.eax;
1888 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1889 v = (int8_t)src.val;
1890 generate_exception_if(
1891 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1892 EXC_DE, -1);
1893 dst.val = (int8_t)u[0];
1894 ((int8_t *)&_regs.eax)[1] = u[1];
1895 break;
1896 case 2:
1897 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1898 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1899 v = (int16_t)src.val;
1900 generate_exception_if(
1901 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1902 EXC_DE, -1);
1903 dst.val = (int16_t)u[0];
1904 *(int16_t *)&_regs.edx = u[1];
1905 break;
1906 #ifdef __x86_64__
1907 case 4:
1908 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1909 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1910 v = (int32_t)src.val;
1911 generate_exception_if(
1912 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1913 EXC_DE, -1);
1914 dst.val = (int32_t)u[0];
1915 _regs.edx = (uint32_t)u[1];
1916 break;
1917 #endif
1918 default:
1919 u[0] = _regs.eax;
1920 u[1] = _regs.edx;
1921 v = src.val;
1922 generate_exception_if(idiv_dbl(u, v), EXC_DE, -1);
1923 dst.val = u[0];
1924 _regs.edx = u[1];
1925 break;
1927 break;
1929 default:
1930 goto cannot_emulate;
1932 break;
1934 case 0xfe: /* Grp4 */
1935 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD, -1);
1936 case 0xff: /* Grp5 */
1937 switch ( modrm_reg & 7 )
1939 case 0: /* inc */
1940 emulate_1op("inc", dst, _regs.eflags);
1941 break;
1942 case 1: /* dec */
1943 emulate_1op("dec", dst, _regs.eflags);
1944 break;
1945 case 2: /* call (near) */
1946 case 4: /* jmp (near) */
1947 if ( (dst.bytes != 8) && mode_64bit() )
1949 dst.bytes = op_bytes = 8;
1950 if ( dst.type == OP_REG )
1951 dst.val = *dst.reg;
1952 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1953 &dst.val, 8, ctxt)) != 0 )
1954 goto done;
1956 src.val = _regs.eip;
1957 _regs.eip = dst.val;
1958 if ( (modrm_reg & 7) == 2 )
1959 goto push; /* call */
1960 dst.type = OP_NONE;
1961 break;
1962 case 3: /* call (far, absolute indirect) */
1963 case 5: /* jmp (far, absolute indirect) */ {
1964 unsigned long sel;
1966 generate_exception_if(dst.type != OP_MEM, EXC_UD, -1);
1968 if ( (rc = ops->read(dst.mem.seg, dst.mem.off+dst.bytes,
1969 &sel, 2, ctxt)) )
1970 goto done;
1972 if ( (modrm_reg & 7) == 3 ) /* call */
1974 struct segment_register reg;
1975 fail_if(ops->read_segment == NULL);
1976 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
1977 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1978 reg.sel, op_bytes, ctxt)) ||
1979 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1980 _regs.eip, op_bytes, ctxt)) )
1981 goto done;
1984 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
1985 goto done;
1986 _regs.eip = dst.val;
1988 dst.type = OP_NONE;
1989 break;
1991 case 6: /* push */
1992 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1993 if ( mode_64bit() && (dst.bytes == 4) )
1995 dst.bytes = 8;
1996 if ( dst.type == OP_REG )
1997 dst.val = *dst.reg;
1998 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1999 &dst.val, 8, ctxt)) != 0 )
2000 goto done;
2002 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2003 dst.val, dst.bytes, ctxt)) != 0 )
2004 goto done;
2005 dst.type = OP_NONE;
2006 break;
2007 case 7:
2008 generate_exception_if(1, EXC_UD, -1);
2009 default:
2010 goto cannot_emulate;
2012 break;
2015 writeback:
2016 switch ( dst.type )
2018 case OP_REG:
2019 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
2020 switch ( dst.bytes )
2022 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2023 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2024 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2025 case 8: *dst.reg = dst.val; break;
2027 break;
2028 case OP_MEM:
2029 if ( !(d & Mov) && (dst.orig_val == dst.val) &&
2030 !ctxt->force_writeback )
2031 /* nothing to do */;
2032 else if ( lock_prefix )
2033 rc = ops->cmpxchg(
2034 dst.mem.seg, dst.mem.off, dst.orig_val,
2035 dst.val, dst.bytes, ctxt);
2036 else
2037 rc = ops->write(
2038 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
2039 if ( rc != 0 )
2040 goto done;
2041 default:
2042 break;
2045 /* Commit shadow register state. */
2046 _regs.eflags &= ~EFLG_RF;
2047 *ctxt->regs = _regs;
2048 if ( (_regs.eflags & EFLG_TF) && (rc == X86EMUL_OKAY) &&
2049 (ops->inject_hw_exception != NULL) )
2050 rc = ops->inject_hw_exception(EXC_DB, -1, ctxt) ? : X86EMUL_EXCEPTION;
2052 done:
2053 return rc;
2055 special_insn:
2056 dst.type = OP_NONE;
2058 /*
2059 * The only implicit-operands instructions allowed a LOCK prefix are
2060 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
2061 */
2062 generate_exception_if(lock_prefix &&
2063 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
2064 (b != 0xc7), /* CMPXCHG{8,16}B */
2065 EXC_GP, 0);
2067 if ( twobyte )
2068 goto twobyte_special_insn;
2070 switch ( b )
2072 case 0x06: /* push %%es */ {
2073 struct segment_register reg;
2074 src.val = x86_seg_es;
2075 push_seg:
2076 fail_if(ops->read_segment == NULL);
2077 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
2078 return rc;
2079 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
2080 if ( mode_64bit() && (op_bytes == 4) )
2081 op_bytes = 8;
2082 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2083 reg.sel, op_bytes, ctxt)) != 0 )
2084 goto done;
2085 break;
2088 case 0x07: /* pop %%es */
2089 src.val = x86_seg_es;
2090 pop_seg:
2091 fail_if(ops->write_segment == NULL);
2092 /* 64-bit mode: POP defaults to a 64-bit operand. */
2093 if ( mode_64bit() && (op_bytes == 4) )
2094 op_bytes = 8;
2095 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2096 &dst.val, op_bytes, ctxt)) != 0 )
2097 goto done;
2098 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
2099 return rc;
2100 break;
2102 case 0x0e: /* push %%cs */
2103 src.val = x86_seg_cs;
2104 goto push_seg;
2106 case 0x16: /* push %%ss */
2107 src.val = x86_seg_ss;
2108 goto push_seg;
2110 case 0x17: /* pop %%ss */
2111 src.val = x86_seg_ss;
2112 ctxt->retire.flags.mov_ss = 1;
2113 goto pop_seg;
2115 case 0x1e: /* push %%ds */
2116 src.val = x86_seg_ds;
2117 goto push_seg;
2119 case 0x1f: /* pop %%ds */
2120 src.val = x86_seg_ds;
2121 goto pop_seg;
2123 case 0x27: /* daa */ {
2124 uint8_t al = _regs.eax;
2125 unsigned long eflags = _regs.eflags;
2126 generate_exception_if(mode_64bit(), EXC_UD, -1);
2127 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
2128 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
2130 *(uint8_t *)&_regs.eax += 6;
2131 _regs.eflags |= EFLG_AF;
2133 if ( (al > 0x99) || (eflags & EFLG_CF) )
2135 *(uint8_t *)&_regs.eax += 0x60;
2136 _regs.eflags |= EFLG_CF;
2138 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2139 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2140 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2141 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2142 break;
2145 case 0x2f: /* das */ {
2146 uint8_t al = _regs.eax;
2147 unsigned long eflags = _regs.eflags;
2148 generate_exception_if(mode_64bit(), EXC_UD, -1);
2149 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
2150 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
2152 _regs.eflags |= EFLG_AF;
2153 if ( (al < 6) || (eflags & EFLG_CF) )
2154 _regs.eflags |= EFLG_CF;
2155 *(uint8_t *)&_regs.eax -= 6;
2157 if ( (al > 0x99) || (eflags & EFLG_CF) )
2159 *(uint8_t *)&_regs.eax -= 0x60;
2160 _regs.eflags |= EFLG_CF;
2162 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2163 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2164 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2165 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2166 break;
2169 case 0x37: /* aaa */
2170 case 0x3f: /* aas */
2171 generate_exception_if(mode_64bit(), EXC_UD, -1);
2172 _regs.eflags &= ~EFLG_CF;
2173 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
2175 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
2176 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
2177 _regs.eflags |= EFLG_CF | EFLG_AF;
2179 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
2180 break;
2182 case 0x40 ... 0x4f: /* inc/dec reg */
2183 dst.type = OP_REG;
2184 dst.reg = decode_register(b & 7, &_regs, 0);
2185 dst.bytes = op_bytes;
2186 dst.val = *dst.reg;
2187 if ( b & 8 )
2188 emulate_1op("dec", dst, _regs.eflags);
2189 else
2190 emulate_1op("inc", dst, _regs.eflags);
2191 break;
2193 case 0x50 ... 0x57: /* push reg */
2194 src.val = *(unsigned long *)decode_register(
2195 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2196 goto push;
2198 case 0x58 ... 0x5f: /* pop reg */
2199 dst.type = OP_REG;
2200 dst.reg = decode_register(
2201 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2202 dst.bytes = op_bytes;
2203 if ( mode_64bit() && (dst.bytes == 4) )
2204 dst.bytes = 8;
2205 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2206 &dst.val, dst.bytes, ctxt)) != 0 )
2207 goto done;
2208 break;
2210 case 0x60: /* pusha */ {
2211 int i;
2212 unsigned long regs[] = {
2213 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
2214 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
2215 generate_exception_if(mode_64bit(), EXC_UD, -1);
2216 for ( i = 0; i < 8; i++ )
2217 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2218 regs[i], op_bytes, ctxt)) != 0 )
2219 goto done;
2220 break;
2223 case 0x61: /* popa */ {
2224 int i;
2225 unsigned long dummy_esp, *regs[] = {
2226 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
2227 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
2228 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
2229 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
2230 generate_exception_if(mode_64bit(), EXC_UD, -1);
2231 for ( i = 0; i < 8; i++ )
2233 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2234 &dst.val, op_bytes, ctxt)) != 0 )
2235 goto done;
2236 switch ( op_bytes )
2238 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
2239 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
2240 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
2241 case 8: *regs[i] = dst.val; break;
2244 break;
2247 case 0x68: /* push imm{16,32,64} */
2248 src.val = ((op_bytes == 2)
2249 ? (int32_t)insn_fetch_type(int16_t)
2250 : insn_fetch_type(int32_t));
2251 goto push;
2253 case 0x6a: /* push imm8 */
2254 src.val = insn_fetch_type(int8_t);
2255 push:
2256 d |= Mov; /* force writeback */
2257 dst.type = OP_MEM;
2258 dst.bytes = op_bytes;
2259 if ( mode_64bit() && (dst.bytes == 4) )
2260 dst.bytes = 8;
2261 dst.val = src.val;
2262 dst.mem.seg = x86_seg_ss;
2263 dst.mem.off = sp_pre_dec(dst.bytes);
2264 break;
2266 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */ {
2267 unsigned long nr_reps = get_rep_prefix();
2268 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2269 dst.mem.seg = x86_seg_es;
2270 dst.mem.off = truncate_ea(_regs.edi);
2271 if ( (nr_reps > 1) && (ops->rep_ins != NULL) &&
2272 ((rc = ops->rep_ins((uint16_t)_regs.edx, dst.mem.seg,
2273 dst.mem.off, dst.bytes,
2274 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2276 if ( rc != 0 )
2277 goto done;
2279 else
2281 fail_if(ops->read_io == NULL);
2282 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
2283 &dst.val, ctxt)) != 0 )
2284 goto done;
2285 dst.type = OP_MEM;
2286 nr_reps = 1;
2288 register_address_increment(
2289 _regs.edi,
2290 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2291 put_rep_prefix(nr_reps);
2292 break;
2295 case 0x6e ... 0x6f: /* outs %esi,%dx */ {
2296 unsigned long nr_reps = get_rep_prefix();
2297 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2298 if ( (nr_reps > 1) && (ops->rep_outs != NULL) &&
2299 ((rc = ops->rep_outs(ea.mem.seg, truncate_ea(_regs.esi),
2300 (uint16_t)_regs.edx, dst.bytes,
2301 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2303 if ( rc != 0 )
2304 goto done;
2306 else
2308 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2309 &dst.val, dst.bytes, ctxt)) != 0 )
2310 goto done;
2311 fail_if(ops->write_io == NULL);
2312 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
2313 dst.val, ctxt)) != 0 )
2314 goto done;
2315 nr_reps = 1;
2317 register_address_increment(
2318 _regs.esi,
2319 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2320 put_rep_prefix(nr_reps);
2321 break;
2324 case 0x70 ... 0x7f: /* jcc (short) */ {
2325 int rel = insn_fetch_type(int8_t);
2326 if ( test_cc(b, _regs.eflags) )
2327 jmp_rel(rel);
2328 break;
2331 case 0x90: /* nop / xchg %%r8,%%rax */
2332 if ( !(rex_prefix & 1) )
2333 break; /* nop */
2335 case 0x91 ... 0x97: /* xchg reg,%%rax */
2336 src.type = dst.type = OP_REG;
2337 src.bytes = dst.bytes = op_bytes;
2338 src.reg = (unsigned long *)&_regs.eax;
2339 src.val = *src.reg;
2340 dst.reg = decode_register(
2341 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2342 dst.val = *dst.reg;
2343 goto xchg;
2345 case 0x98: /* cbw/cwde/cdqe */
2346 switch ( op_bytes )
2348 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2349 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2350 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2352 break;
2354 case 0x99: /* cwd/cdq/cqo */
2355 switch ( op_bytes )
2357 case 2:
2358 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2359 break;
2360 case 4:
2361 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2362 break;
2363 case 8:
2364 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2365 break;
2367 break;
2369 case 0x9a: /* call (far, absolute) */ {
2370 struct segment_register reg;
2371 uint16_t sel;
2372 uint32_t eip;
2374 fail_if(ops->read_segment == NULL);
2375 generate_exception_if(mode_64bit(), EXC_UD, -1);
2377 eip = insn_fetch_bytes(op_bytes);
2378 sel = insn_fetch_type(uint16_t);
2380 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2381 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2382 reg.sel, op_bytes, ctxt)) ||
2383 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2384 _regs.eip, op_bytes, ctxt)) )
2385 goto done;
2387 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2388 goto done;
2389 _regs.eip = eip;
2390 break;
2393 case 0x9b: /* wait/fwait */
2394 fail_if(ops->load_fpu_ctxt == NULL);
2395 ops->load_fpu_ctxt(ctxt);
2396 __emulate_fpu_insn("fwait");
2397 break;
2399 case 0x9c: /* pushf */
2400 src.val = _regs.eflags;
2401 goto push;
2403 case 0x9d: /* popf */ {
2404 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2405 if ( !mode_ring0() )
2406 mask |= EFLG_IOPL;
2407 if ( !mode_iopl() )
2408 mask |= EFLG_IF;
2409 /* 64-bit mode: POP defaults to a 64-bit operand. */
2410 if ( mode_64bit() && (op_bytes == 4) )
2411 op_bytes = 8;
2412 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2413 &dst.val, op_bytes, ctxt)) != 0 )
2414 goto done;
2415 if ( op_bytes == 2 )
2416 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2417 dst.val &= 0x257fd5;
2418 _regs.eflags &= mask;
2419 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2420 break;
2423 case 0x9e: /* sahf */
2424 *(uint8_t *)&_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2425 break;
2427 case 0x9f: /* lahf */
2428 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2429 break;
2431 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2432 /* Source EA is not encoded via ModRM. */
2433 dst.type = OP_REG;
2434 dst.reg = (unsigned long *)&_regs.eax;
2435 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2436 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2437 &dst.val, dst.bytes, ctxt)) != 0 )
2438 goto done;
2439 break;
2441 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2442 /* Destination EA is not encoded via ModRM. */
2443 dst.type = OP_MEM;
2444 dst.mem.seg = ea.mem.seg;
2445 dst.mem.off = insn_fetch_bytes(ad_bytes);
2446 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2447 dst.val = (unsigned long)_regs.eax;
2448 break;
2450 case 0xa4 ... 0xa5: /* movs */ {
2451 unsigned long nr_reps = get_rep_prefix();
2452 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2453 dst.mem.seg = x86_seg_es;
2454 dst.mem.off = truncate_ea(_regs.edi);
2455 if ( (nr_reps > 1) && (ops->rep_movs != NULL) &&
2456 ((rc = ops->rep_movs(ea.mem.seg, truncate_ea(_regs.esi),
2457 dst.mem.seg, dst.mem.off, dst.bytes,
2458 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2460 if ( rc != 0 )
2461 goto done;
2463 else
2465 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2466 &dst.val, dst.bytes, ctxt)) != 0 )
2467 goto done;
2468 dst.type = OP_MEM;
2469 nr_reps = 1;
2471 register_address_increment(
2472 _regs.esi,
2473 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2474 register_address_increment(
2475 _regs.edi,
2476 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2477 put_rep_prefix(nr_reps);
2478 break;
2481 case 0xa6 ... 0xa7: /* cmps */ {
2482 unsigned long next_eip = _regs.eip;
2483 get_rep_prefix();
2484 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2485 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2486 &dst.val, dst.bytes, ctxt)) ||
2487 (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2488 &src.val, src.bytes, ctxt)) )
2489 goto done;
2490 register_address_increment(
2491 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2492 register_address_increment(
2493 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2494 put_rep_prefix(1);
2495 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2496 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2497 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2498 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2499 _regs.eip = next_eip;
2500 break;
2503 case 0xaa ... 0xab: /* stos */ {
2504 /* unsigned long max_reps = */get_rep_prefix();
2505 dst.type = OP_MEM;
2506 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2507 dst.mem.seg = x86_seg_es;
2508 dst.mem.off = truncate_ea(_regs.edi);
2509 dst.val = _regs.eax;
2510 register_address_increment(
2511 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2512 put_rep_prefix(1);
2513 break;
2516 case 0xac ... 0xad: /* lods */ {
2517 /* unsigned long max_reps = */get_rep_prefix();
2518 dst.type = OP_REG;
2519 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2520 dst.reg = (unsigned long *)&_regs.eax;
2521 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2522 &dst.val, dst.bytes, ctxt)) != 0 )
2523 goto done;
2524 register_address_increment(
2525 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2526 put_rep_prefix(1);
2527 break;
2530 case 0xae ... 0xaf: /* scas */ {
2531 unsigned long next_eip = _regs.eip;
2532 get_rep_prefix();
2533 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2534 dst.val = _regs.eax;
2535 if ( (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2536 &src.val, src.bytes, ctxt)) != 0 )
2537 goto done;
2538 register_address_increment(
2539 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2540 put_rep_prefix(1);
2541 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2542 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2543 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2544 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2545 _regs.eip = next_eip;
2546 break;
2549 case 0xc2: /* ret imm16 (near) */
2550 case 0xc3: /* ret (near) */ {
2551 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2552 op_bytes = mode_64bit() ? 8 : op_bytes;
2553 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2554 &dst.val, op_bytes, ctxt)) != 0 )
2555 goto done;
2556 _regs.eip = dst.val;
2557 break;
2560 case 0xc8: /* enter imm16,imm8 */ {
2561 uint16_t size = insn_fetch_type(uint16_t);
2562 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2563 int i;
2565 dst.type = OP_REG;
2566 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2567 dst.reg = (unsigned long *)&_regs.ebp;
2568 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2569 _regs.ebp, dst.bytes, ctxt)) )
2570 goto done;
2571 dst.val = _regs.esp;
2573 if ( depth > 0 )
2575 for ( i = 1; i < depth; i++ )
2577 unsigned long ebp, temp_data;
2578 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2579 if ( (rc = ops->read(x86_seg_ss, ebp,
2580 &temp_data, dst.bytes, ctxt)) ||
2581 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2582 temp_data, dst.bytes, ctxt)) )
2583 goto done;
2585 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2586 dst.val, dst.bytes, ctxt)) )
2587 goto done;
2590 sp_pre_dec(size);
2591 break;
2594 case 0xc9: /* leave */
2595 /* First writeback, to %%esp. */
2596 dst.type = OP_REG;
2597 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2598 dst.reg = (unsigned long *)&_regs.esp;
2599 dst.val = _regs.ebp;
2601 /* Flush first writeback, since there is a second. */
2602 switch ( dst.bytes )
2604 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2605 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2606 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2607 case 8: *dst.reg = dst.val; break;
2610 /* Second writeback, to %%ebp. */
2611 dst.reg = (unsigned long *)&_regs.ebp;
2612 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2613 &dst.val, dst.bytes, ctxt)) )
2614 goto done;
2615 break;
2617 case 0xca: /* ret imm16 (far) */
2618 case 0xcb: /* ret (far) */ {
2619 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2620 op_bytes = mode_64bit() ? 8 : op_bytes;
2621 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2622 &dst.val, op_bytes, ctxt)) ||
2623 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2624 &src.val, op_bytes, ctxt)) ||
2625 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2626 goto done;
2627 _regs.eip = dst.val;
2628 break;
2631 case 0xcc: /* int3 */
2632 src.val = EXC_BP;
2633 goto swint;
2635 case 0xcd: /* int imm8 */
2636 src.val = insn_fetch_type(uint8_t);
2637 swint:
2638 fail_if(ops->inject_sw_interrupt == NULL);
2639 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2640 ctxt) ? : X86EMUL_EXCEPTION;
2641 goto done;
2643 case 0xce: /* into */
2644 generate_exception_if(mode_64bit(), EXC_UD, -1);
2645 if ( !(_regs.eflags & EFLG_OF) )
2646 break;
2647 src.val = EXC_OF;
2648 goto swint;
2650 case 0xcf: /* iret */ {
2651 unsigned long cs, eip, eflags;
2652 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2653 if ( !mode_ring0() )
2654 mask |= EFLG_IOPL;
2655 if ( !mode_iopl() )
2656 mask |= EFLG_IF;
2657 fail_if(!in_realmode(ctxt, ops));
2658 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2659 &eip, op_bytes, ctxt)) ||
2660 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2661 &cs, op_bytes, ctxt)) ||
2662 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2663 &eflags, op_bytes, ctxt)) )
2664 goto done;
2665 if ( op_bytes == 2 )
2666 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2667 eflags &= 0x257fd5;
2668 _regs.eflags &= mask;
2669 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2670 _regs.eip = eip;
2671 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2672 goto done;
2673 break;
2676 case 0xd4: /* aam */ {
2677 unsigned int base = insn_fetch_type(uint8_t);
2678 uint8_t al = _regs.eax;
2679 generate_exception_if(mode_64bit(), EXC_UD, -1);
2680 generate_exception_if(base == 0, EXC_DE, -1);
2681 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2682 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2683 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2684 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2685 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2686 break;
2689 case 0xd5: /* aad */ {
2690 unsigned int base = insn_fetch_type(uint8_t);
2691 uint16_t ax = _regs.eax;
2692 generate_exception_if(mode_64bit(), EXC_UD, -1);
2693 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2694 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2695 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2696 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2697 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2698 break;
2701 case 0xd6: /* salc */
2702 generate_exception_if(mode_64bit(), EXC_UD, -1);
2703 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2704 break;
2706 case 0xd7: /* xlat */ {
2707 unsigned long al = (uint8_t)_regs.eax;
2708 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
2709 &al, 1, ctxt)) != 0 )
2710 goto done;
2711 *(uint8_t *)&_regs.eax = al;
2712 break;
2715 case 0xd9: /* FPU 0xd9 */
2716 fail_if(ops->load_fpu_ctxt == NULL);
2717 ops->load_fpu_ctxt(ctxt);
2718 switch ( modrm )
2720 case 0xc0: __emulate_fpu_insn(".byte 0xd9,0xc0"); break;
2721 case 0xc1: __emulate_fpu_insn(".byte 0xd9,0xc1"); break;
2722 case 0xc2: __emulate_fpu_insn(".byte 0xd9,0xc2"); break;
2723 case 0xc3: __emulate_fpu_insn(".byte 0xd9,0xc3"); break;
2724 case 0xc4: __emulate_fpu_insn(".byte 0xd9,0xc4"); break;
2725 case 0xc5: __emulate_fpu_insn(".byte 0xd9,0xc5"); break;
2726 case 0xc6: __emulate_fpu_insn(".byte 0xd9,0xc6"); break;
2727 case 0xc7: __emulate_fpu_insn(".byte 0xd9,0xc7"); break;
2728 case 0xe0: __emulate_fpu_insn(".byte 0xd9,0xe0"); break;
2729 case 0xe8: __emulate_fpu_insn(".byte 0xd9,0xe8"); break;
2730 case 0xee: __emulate_fpu_insn(".byte 0xd9,0xee"); break;
2731 default:
2732 fail_if((modrm_reg & 7) != 7);
2733 fail_if(modrm >= 0xc0);
2734 /* fnstcw m2byte */
2735 ea.bytes = 2;
2736 dst = ea;
2737 asm volatile ( "fnstcw %0" : "=m" (dst.val) );
2739 break;
2741 case 0xdb: /* FPU 0xdb */
2742 fail_if(ops->load_fpu_ctxt == NULL);
2743 ops->load_fpu_ctxt(ctxt);
2744 fail_if(modrm != 0xe3);
2745 /* fninit */
2746 asm volatile ( "fninit" );
2747 break;
2749 case 0xdd: /* FPU 0xdd */
2750 fail_if(ops->load_fpu_ctxt == NULL);
2751 ops->load_fpu_ctxt(ctxt);
2752 fail_if((modrm_reg & 7) != 7);
2753 fail_if(modrm >= 0xc0);
2754 /* fnstsw m2byte */
2755 ea.bytes = 2;
2756 dst = ea;
2757 asm volatile ( "fnstsw %0" : "=m" (dst.val) );
2758 break;
2760 case 0xde: /* FPU 0xde */
2761 fail_if(ops->load_fpu_ctxt == NULL);
2762 ops->load_fpu_ctxt(ctxt);
2763 switch ( modrm )
2765 case 0xd9: __emulate_fpu_insn(".byte 0xde,0xd9"); break;
2766 case 0xf8: __emulate_fpu_insn(".byte 0xde,0xf8"); break;
2767 case 0xf9: __emulate_fpu_insn(".byte 0xde,0xf9"); break;
2768 case 0xfa: __emulate_fpu_insn(".byte 0xde,0xfa"); break;
2769 case 0xfb: __emulate_fpu_insn(".byte 0xde,0xfb"); break;
2770 case 0xfc: __emulate_fpu_insn(".byte 0xde,0xfc"); break;
2771 case 0xfd: __emulate_fpu_insn(".byte 0xde,0xfd"); break;
2772 case 0xfe: __emulate_fpu_insn(".byte 0xde,0xfe"); break;
2773 case 0xff: __emulate_fpu_insn(".byte 0xde,0xff"); break;
2774 default: goto cannot_emulate;
2776 break;
2778 case 0xdf: /* FPU 0xdf */
2779 fail_if(ops->load_fpu_ctxt == NULL);
2780 ops->load_fpu_ctxt(ctxt);
2781 fail_if(modrm != 0xe0);
2782 /* fnstsw %ax */
2783 dst.bytes = 2;
2784 dst.type = OP_REG;
2785 dst.reg = (unsigned long *)&_regs.eax;
2786 asm volatile ( "fnstsw %0" : "=m" (dst.val) );
2787 break;
2789 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2790 int rel = insn_fetch_type(int8_t);
2791 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2792 if ( b == 0xe1 )
2793 do_jmp = !do_jmp; /* loopz */
2794 else if ( b == 0xe2 )
2795 do_jmp = 1; /* loop */
2796 switch ( ad_bytes )
2798 case 2:
2799 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2800 break;
2801 case 4:
2802 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
2803 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
2804 break;
2805 default: /* case 8: */
2806 do_jmp &= --_regs.ecx != 0;
2807 break;
2809 if ( do_jmp )
2810 jmp_rel(rel);
2811 break;
2814 case 0xe3: /* jcxz/jecxz (short) */ {
2815 int rel = insn_fetch_type(int8_t);
2816 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2817 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2818 jmp_rel(rel);
2819 break;
2822 case 0xe4: /* in imm8,%al */
2823 case 0xe5: /* in imm8,%eax */
2824 case 0xe6: /* out %al,imm8 */
2825 case 0xe7: /* out %eax,imm8 */
2826 case 0xec: /* in %dx,%al */
2827 case 0xed: /* in %dx,%eax */
2828 case 0xee: /* out %al,%dx */
2829 case 0xef: /* out %eax,%dx */ {
2830 unsigned int port = ((b < 0xe8)
2831 ? insn_fetch_type(uint8_t)
2832 : (uint16_t)_regs.edx);
2833 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2834 if ( b & 2 )
2836 /* out */
2837 fail_if(ops->write_io == NULL);
2838 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2841 else
2843 /* in */
2844 dst.type = OP_REG;
2845 dst.bytes = op_bytes;
2846 dst.reg = (unsigned long *)&_regs.eax;
2847 fail_if(ops->read_io == NULL);
2848 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2850 if ( rc != 0 )
2851 goto done;
2852 break;
2855 case 0xe8: /* call (near) */ {
2856 int rel = (((op_bytes == 2) && !mode_64bit())
2857 ? (int32_t)insn_fetch_type(int16_t)
2858 : insn_fetch_type(int32_t));
2859 op_bytes = mode_64bit() ? 8 : op_bytes;
2860 src.val = _regs.eip;
2861 jmp_rel(rel);
2862 goto push;
2865 case 0xe9: /* jmp (near) */ {
2866 int rel = (((op_bytes == 2) && !mode_64bit())
2867 ? (int32_t)insn_fetch_type(int16_t)
2868 : insn_fetch_type(int32_t));
2869 jmp_rel(rel);
2870 break;
2873 case 0xea: /* jmp (far, absolute) */ {
2874 uint16_t sel;
2875 uint32_t eip;
2876 generate_exception_if(mode_64bit(), EXC_UD, -1);
2877 eip = insn_fetch_bytes(op_bytes);
2878 sel = insn_fetch_type(uint16_t);
2879 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2880 goto done;
2881 _regs.eip = eip;
2882 break;
2885 case 0xeb: /* jmp (short) */ {
2886 int rel = insn_fetch_type(int8_t);
2887 jmp_rel(rel);
2888 break;
2891 case 0xf1: /* int1 (icebp) */
2892 src.val = EXC_DB;
2893 goto swint;
2895 case 0xf4: /* hlt */
2896 ctxt->retire.flags.hlt = 1;
2897 break;
2899 case 0xf5: /* cmc */
2900 _regs.eflags ^= EFLG_CF;
2901 break;
2903 case 0xf8: /* clc */
2904 _regs.eflags &= ~EFLG_CF;
2905 break;
2907 case 0xf9: /* stc */
2908 _regs.eflags |= EFLG_CF;
2909 break;
2911 case 0xfa: /* cli */
2912 generate_exception_if(!mode_iopl(), EXC_GP, 0);
2913 _regs.eflags &= ~EFLG_IF;
2914 break;
2916 case 0xfb: /* sti */
2917 generate_exception_if(!mode_iopl(), EXC_GP, 0);
2918 if ( !(_regs.eflags & EFLG_IF) )
2920 _regs.eflags |= EFLG_IF;
2921 ctxt->retire.flags.sti = 1;
2923 break;
2925 case 0xfc: /* cld */
2926 _regs.eflags &= ~EFLG_DF;
2927 break;
2929 case 0xfd: /* std */
2930 _regs.eflags |= EFLG_DF;
2931 break;
2933 goto writeback;
2935 twobyte_insn:
2936 switch ( b )
2938 case 0x40 ... 0x4f: /* cmovcc */
2939 dst.val = src.val;
2940 if ( !test_cc(b, _regs.eflags) )
2941 dst.type = OP_NONE;
2942 break;
2944 case 0x90 ... 0x9f: /* setcc */
2945 dst.val = test_cc(b, _regs.eflags);
2946 break;
2948 case 0xb0 ... 0xb1: /* cmpxchg */
2949 /* Save real source value, then compare EAX against destination. */
2950 src.orig_val = src.val;
2951 src.val = _regs.eax;
2952 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2953 if ( _regs.eflags & EFLG_ZF )
2955 /* Success: write back to memory. */
2956 dst.val = src.orig_val;
2958 else
2960 /* Failure: write the value we saw to EAX. */
2961 dst.type = OP_REG;
2962 dst.reg = (unsigned long *)&_regs.eax;
2964 break;
2966 case 0xa3: bt: /* bt */
2967 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2968 break;
2970 case 0xa4: /* shld imm8,r,r/m */
2971 case 0xa5: /* shld %%cl,r,r/m */
2972 case 0xac: /* shrd imm8,r,r/m */
2973 case 0xad: /* shrd %%cl,r,r/m */ {
2974 uint8_t shift, width = dst.bytes << 3;
2975 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
2976 if ( (shift &= width - 1) == 0 )
2977 break;
2978 dst.orig_val = truncate_word(dst.val, dst.bytes);
2979 dst.val = ((shift == width) ? src.val :
2980 (b & 8) ?
2981 /* shrd */
2982 ((dst.orig_val >> shift) |
2983 truncate_word(src.val << (width - shift), dst.bytes)) :
2984 /* shld */
2985 ((dst.orig_val << shift) |
2986 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
2987 dst.val = truncate_word(dst.val, dst.bytes);
2988 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
2989 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
2990 _regs.eflags |= EFLG_CF;
2991 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
2992 _regs.eflags |= EFLG_OF;
2993 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
2994 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
2995 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
2996 break;
2999 case 0xb3: btr: /* btr */
3000 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
3001 break;
3003 case 0xab: bts: /* bts */
3004 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
3005 break;
3007 case 0xaf: /* imul */
3008 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3009 switch ( dst.bytes )
3011 case 2:
3012 dst.val = ((uint32_t)(int16_t)src.val *
3013 (uint32_t)(int16_t)dst.val);
3014 if ( (int16_t)dst.val != (uint32_t)dst.val )
3015 _regs.eflags |= EFLG_OF|EFLG_CF;
3016 break;
3017 #ifdef __x86_64__
3018 case 4:
3019 dst.val = ((uint64_t)(int32_t)src.val *
3020 (uint64_t)(int32_t)dst.val);
3021 if ( (int32_t)dst.val != dst.val )
3022 _regs.eflags |= EFLG_OF|EFLG_CF;
3023 break;
3024 #endif
3025 default: {
3026 unsigned long m[2] = { src.val, dst.val };
3027 if ( imul_dbl(m) )
3028 _regs.eflags |= EFLG_OF|EFLG_CF;
3029 dst.val = m[0];
3030 break;
3033 break;
3035 case 0xb2: /* lss */
3036 dst.val = x86_seg_ss;
3037 goto les;
3039 case 0xb4: /* lfs */
3040 dst.val = x86_seg_fs;
3041 goto les;
3043 case 0xb5: /* lgs */
3044 dst.val = x86_seg_gs;
3045 goto les;
3047 case 0xb6: /* movzx rm8,r{16,32,64} */
3048 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3049 dst.reg = decode_register(modrm_reg, &_regs, 0);
3050 dst.bytes = op_bytes;
3051 dst.val = (uint8_t)src.val;
3052 break;
3054 case 0xbc: /* bsf */ {
3055 int zf;
3056 asm ( "bsf %2,%0; setz %b1"
3057 : "=r" (dst.val), "=q" (zf)
3058 : "r" (src.val), "1" (0) );
3059 _regs.eflags &= ~EFLG_ZF;
3060 _regs.eflags |= zf ? EFLG_ZF : 0;
3061 break;
3064 case 0xbd: /* bsr */ {
3065 int zf;
3066 asm ( "bsr %2,%0; setz %b1"
3067 : "=r" (dst.val), "=q" (zf)
3068 : "r" (src.val), "1" (0) );
3069 _regs.eflags &= ~EFLG_ZF;
3070 _regs.eflags |= zf ? EFLG_ZF : 0;
3071 break;
3074 case 0xb7: /* movzx rm16,r{16,32,64} */
3075 dst.val = (uint16_t)src.val;
3076 break;
3078 case 0xbb: btc: /* btc */
3079 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
3080 break;
3082 case 0xba: /* Grp8 */
3083 switch ( modrm_reg & 7 )
3085 case 4: goto bt;
3086 case 5: goto bts;
3087 case 6: goto btr;
3088 case 7: goto btc;
3089 default: generate_exception_if(1, EXC_UD, -1);
3091 break;
3093 case 0xbe: /* movsx rm8,r{16,32,64} */
3094 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3095 dst.reg = decode_register(modrm_reg, &_regs, 0);
3096 dst.bytes = op_bytes;
3097 dst.val = (int8_t)src.val;
3098 break;
3100 case 0xbf: /* movsx rm16,r{16,32,64} */
3101 dst.val = (int16_t)src.val;
3102 break;
3104 case 0xc0 ... 0xc1: /* xadd */
3105 /* Write back the register source. */
3106 switch ( dst.bytes )
3108 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
3109 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
3110 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
3111 case 8: *src.reg = dst.val; break;
3113 goto add;
3115 goto writeback;
3117 twobyte_special_insn:
3118 switch ( b )
3120 case 0x01: /* Grp7 */ {
3121 struct segment_register reg;
3122 unsigned long base, limit, cr0, cr0w;
3124 if ( modrm == 0xdf ) /* invlpga */
3126 generate_exception_if(in_realmode(ctxt, ops), EXC_UD, -1);
3127 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3128 fail_if(ops->invlpg == NULL);
3129 if ( (rc = ops->invlpg(x86_seg_none, truncate_ea(_regs.eax),
3130 ctxt)) )
3131 goto done;
3132 break;
3135 switch ( modrm_reg & 7 )
3137 case 0: /* sgdt */
3138 case 1: /* sidt */
3139 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3140 fail_if(ops->read_segment == NULL);
3141 if ( (rc = ops->read_segment((modrm_reg & 1) ?
3142 x86_seg_idtr : x86_seg_gdtr,
3143 &reg, ctxt)) )
3144 goto done;
3145 if ( op_bytes == 2 )
3146 reg.base &= 0xffffff;
3147 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
3148 reg.limit, 2, ctxt)) ||
3149 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
3150 reg.base, mode_64bit() ? 8 : 4, ctxt)) )
3151 goto done;
3152 break;
3153 case 2: /* lgdt */
3154 case 3: /* lidt */
3155 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3156 fail_if(ops->write_segment == NULL);
3157 memset(&reg, 0, sizeof(reg));
3158 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0,
3159 &limit, 2, ctxt)) ||
3160 (rc = ops->read(ea.mem.seg, ea.mem.off+2,
3161 &base, mode_64bit() ? 8 : 4, ctxt)) )
3162 goto done;
3163 reg.base = base;
3164 reg.limit = limit;
3165 if ( op_bytes == 2 )
3166 reg.base &= 0xffffff;
3167 if ( (rc = ops->write_segment((modrm_reg & 1) ?
3168 x86_seg_idtr : x86_seg_gdtr,
3169 &reg, ctxt)) )
3170 goto done;
3171 break;
3172 case 4: /* smsw */
3173 ea.bytes = 2;
3174 dst = ea;
3175 fail_if(ops->read_cr == NULL);
3176 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
3177 goto done;
3178 d |= Mov; /* force writeback */
3179 break;
3180 case 6: /* lmsw */
3181 fail_if(ops->read_cr == NULL);
3182 fail_if(ops->write_cr == NULL);
3183 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
3184 goto done;
3185 if ( ea.type == OP_REG )
3186 cr0w = *ea.reg;
3187 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
3188 &cr0w, 2, ctxt)) )
3189 goto done;
3190 cr0 &= 0xffff0000;
3191 cr0 |= (uint16_t)cr0w;
3192 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
3193 goto done;
3194 break;
3195 case 7: /* invlpg */
3196 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3197 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3198 fail_if(ops->invlpg == NULL);
3199 if ( (rc = ops->invlpg(ea.mem.seg, ea.mem.off, ctxt)) )
3200 goto done;
3201 break;
3202 default:
3203 goto cannot_emulate;
3205 break;
3208 case 0x06: /* clts */
3209 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3210 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
3211 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
3212 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
3213 goto done;
3214 break;
3216 case 0x08: /* invd */
3217 case 0x09: /* wbinvd */
3218 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3219 fail_if(ops->wbinvd == NULL);
3220 if ( (rc = ops->wbinvd(ctxt)) != 0 )
3221 goto done;
3222 break;
3224 case 0x0d: /* GrpP (prefetch) */
3225 case 0x18: /* Grp16 (prefetch/nop) */
3226 case 0x19 ... 0x1f: /* nop (amd-defined) */
3227 break;
3229 case 0x20: /* mov cr,reg */
3230 case 0x21: /* mov dr,reg */
3231 case 0x22: /* mov reg,cr */
3232 case 0x23: /* mov reg,dr */
3233 generate_exception_if(ea.type != OP_REG, EXC_UD, -1);
3234 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3235 modrm_reg |= lock_prefix << 3;
3236 if ( b & 2 )
3238 /* Write to CR/DR. */
3239 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
3240 if ( !mode_64bit() )
3241 src.val = (uint32_t)src.val;
3242 rc = ((b & 1)
3243 ? (ops->write_dr
3244 ? ops->write_dr(modrm_reg, src.val, ctxt)
3245 : X86EMUL_UNHANDLEABLE)
3246 : (ops->write_cr
3247 ? ops->write_cr(modrm_reg, src.val, ctxt)
3248 : X86EMUL_UNHANDLEABLE));
3250 else
3252 /* Read from CR/DR. */
3253 dst.type = OP_REG;
3254 dst.bytes = mode_64bit() ? 8 : 4;
3255 dst.reg = decode_register(modrm_rm, &_regs, 0);
3256 rc = ((b & 1)
3257 ? (ops->read_dr
3258 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
3259 : X86EMUL_UNHANDLEABLE)
3260 : (ops->read_cr
3261 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
3262 : X86EMUL_UNHANDLEABLE));
3264 if ( rc != 0 )
3265 goto done;
3266 break;
3268 case 0x30: /* wrmsr */ {
3269 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
3270 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3271 fail_if(ops->write_msr == NULL);
3272 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
3273 goto done;
3274 break;
3277 case 0x31: /* rdtsc */ {
3278 unsigned long cr4;
3279 uint64_t val;
3280 fail_if(ops->read_cr == NULL);
3281 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
3282 goto done;
3283 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP, 0);
3284 fail_if(ops->read_msr == NULL);
3285 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
3286 goto done;
3287 _regs.edx = (uint32_t)(val >> 32);
3288 _regs.eax = (uint32_t)(val >> 0);
3289 break;
3292 case 0x32: /* rdmsr */ {
3293 uint64_t val;
3294 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3295 fail_if(ops->read_msr == NULL);
3296 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
3297 goto done;
3298 _regs.edx = (uint32_t)(val >> 32);
3299 _regs.eax = (uint32_t)(val >> 0);
3300 break;
3303 case 0x80 ... 0x8f: /* jcc (near) */ {
3304 int rel = (((op_bytes == 2) && !mode_64bit())
3305 ? (int32_t)insn_fetch_type(int16_t)
3306 : insn_fetch_type(int32_t));
3307 if ( test_cc(b, _regs.eflags) )
3308 jmp_rel(rel);
3309 break;
3312 case 0xa0: /* push %%fs */
3313 src.val = x86_seg_fs;
3314 goto push_seg;
3316 case 0xa1: /* pop %%fs */
3317 src.val = x86_seg_fs;
3318 goto pop_seg;
3320 case 0xa2: /* cpuid */ {
3321 unsigned int eax = _regs.eax, ebx = _regs.ebx;
3322 unsigned int ecx = _regs.ecx, edx = _regs.edx;
3323 fail_if(ops->cpuid == NULL);
3324 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
3325 goto done;
3326 _regs.eax = eax; _regs.ebx = ebx;
3327 _regs.ecx = ecx; _regs.edx = edx;
3328 break;
3331 case 0xa8: /* push %%gs */
3332 src.val = x86_seg_gs;
3333 goto push_seg;
3335 case 0xa9: /* pop %%gs */
3336 src.val = x86_seg_gs;
3337 goto pop_seg;
3339 case 0xc7: /* Grp9 (cmpxchg8b) */
3340 #if defined(__i386__)
3342 unsigned long old_lo, old_hi;
3343 generate_exception_if((modrm_reg & 7) != 1, EXC_UD, -1);
3344 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3345 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
3346 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
3347 goto done;
3348 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
3350 _regs.eax = old_lo;
3351 _regs.edx = old_hi;
3352 _regs.eflags &= ~EFLG_ZF;
3354 else if ( ops->cmpxchg8b == NULL )
3356 rc = X86EMUL_UNHANDLEABLE;
3357 goto done;
3359 else
3361 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
3362 _regs.ebx, _regs.ecx, ctxt)) != 0 )
3363 goto done;
3364 _regs.eflags |= EFLG_ZF;
3366 break;
3368 #elif defined(__x86_64__)
3370 unsigned long old, new;
3371 generate_exception_if((modrm_reg & 7) != 1, EXC_UD, -1);
3372 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3373 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
3374 goto done;
3375 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
3376 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
3378 _regs.eax = (uint32_t)(old>>0);
3379 _regs.edx = (uint32_t)(old>>32);
3380 _regs.eflags &= ~EFLG_ZF;
3382 else
3384 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
3385 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3386 new, 8, ctxt)) != 0 )
3387 goto done;
3388 _regs.eflags |= EFLG_ZF;
3390 break;
3392 #endif
3394 case 0xc8 ... 0xcf: /* bswap */
3395 dst.type = OP_REG;
3396 dst.reg = decode_register(
3397 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3398 switch ( dst.bytes = op_bytes )
3400 default: /* case 2: */
3401 /* Undefined behaviour. Writes zero on all tested CPUs. */
3402 dst.val = 0;
3403 break;
3404 case 4:
3405 #ifdef __x86_64__
3406 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3407 break;
3408 case 8:
3409 #endif
3410 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3411 break;
3413 break;
3415 goto writeback;
3417 cannot_emulate:
3418 #if 0
3419 gdprintk(XENLOG_DEBUG, "Instr:");
3420 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
3422 unsigned long x;
3423 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
3424 printk(" %02x", (uint8_t)x);
3426 printk("\n");
3427 #endif
3428 return X86EMUL_UNHANDLEABLE;