ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 16989:92734271810a

vmx realmode: Emulate protected-mode transition while CS and SS have
bad selector values (bottom two bits non-zero).

Allows opensuse 10.3 install CD to boot. Unfortunately SUSE Linux 10.1
install CD still fails to work...

Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Tue Feb 05 15:45:10 2008 +0000 (2008-02-05)
parents 32e9c52fc6d9
children bf4a24c172d2
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <public/xen.h>
28 #else
29 #include <xen/config.h>
30 #include <xen/types.h>
31 #include <xen/lib.h>
32 #include <asm/regs.h>
33 #undef cmpxchg
34 #endif
35 #include <asm-x86/x86_emulate.h>
37 /* Operand sizes: 8-bit operands or specified/overridden size. */
38 #define ByteOp (1<<0) /* 8-bit operands. */
39 /* Destination operand type. */
40 #define DstBitBase (0<<1) /* Memory operand, bit string. */
41 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
42 #define DstReg (2<<1) /* Register operand. */
43 #define DstMem (3<<1) /* Memory operand. */
44 #define DstMask (3<<1)
45 /* Source operand type. */
46 #define SrcNone (0<<3) /* No source operand. */
47 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
48 #define SrcReg (1<<3) /* Register operand. */
49 #define SrcMem (2<<3) /* Memory operand. */
50 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
51 #define SrcImm (4<<3) /* Immediate operand. */
52 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
53 #define SrcMask (7<<3)
54 /* Generic ModRM decode. */
55 #define ModRM (1<<6)
56 /* Destination is only written; never read. */
57 #define Mov (1<<7)
59 static uint8_t opcode_table[256] = {
60 /* 0x00 - 0x07 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
64 /* 0x08 - 0x0F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
68 /* 0x10 - 0x17 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
72 /* 0x18 - 0x1F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
76 /* 0x20 - 0x27 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
80 /* 0x28 - 0x2F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
84 /* 0x30 - 0x37 */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
88 /* 0x38 - 0x3F */
89 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
90 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
91 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
92 /* 0x40 - 0x4F */
93 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 /* 0x50 - 0x5F */
98 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 /* 0x60 - 0x67 */
103 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
104 0, 0, 0, 0,
105 /* 0x68 - 0x6F */
106 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
107 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
108 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
109 /* 0x70 - 0x77 */
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 /* 0x78 - 0x7F */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x80 - 0x87 */
116 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
118 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 /* 0x88 - 0x8F */
121 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
122 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
123 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
124 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
125 /* 0x90 - 0x97 */
126 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
128 /* 0x98 - 0x9F */
129 ImplicitOps, ImplicitOps, ImplicitOps, 0,
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 /* 0xA0 - 0xA7 */
132 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
133 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
134 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
135 ByteOp|ImplicitOps, ImplicitOps,
136 /* 0xA8 - 0xAF */
137 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
138 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
139 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
140 ByteOp|ImplicitOps, ImplicitOps,
141 /* 0xB0 - 0xB7 */
142 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
143 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
144 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
145 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
146 /* 0xB8 - 0xBF */
147 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
148 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
149 /* 0xC0 - 0xC7 */
150 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
151 ImplicitOps, ImplicitOps,
152 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
153 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
154 /* 0xC8 - 0xCF */
155 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
156 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
157 /* 0xD0 - 0xD7 */
158 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
159 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
160 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
161 /* 0xD8 - 0xDF */
162 0, ImplicitOps|ModRM, 0, ImplicitOps|ModRM, 0, ImplicitOps|ModRM, 0, 0,
163 /* 0xE0 - 0xE7 */
164 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
165 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
166 /* 0xE8 - 0xEF */
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
169 /* 0xF0 - 0xF7 */
170 0, ImplicitOps, 0, 0,
171 ImplicitOps, ImplicitOps,
172 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
173 /* 0xF8 - 0xFF */
174 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
175 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
176 };
178 static uint8_t twobyte_table[256] = {
179 /* 0x00 - 0x07 */
180 0, ImplicitOps|ModRM, 0, 0, 0, 0, ImplicitOps, 0,
181 /* 0x08 - 0x0F */
182 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
183 /* 0x10 - 0x17 */
184 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0x18 - 0x1F */
186 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
187 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
188 /* 0x20 - 0x27 */
189 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
190 0, 0, 0, 0,
191 /* 0x28 - 0x2F */
192 0, 0, 0, 0, 0, 0, 0, 0,
193 /* 0x30 - 0x37 */
194 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
195 /* 0x38 - 0x3F */
196 0, 0, 0, 0, 0, 0, 0, 0,
197 /* 0x40 - 0x47 */
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
202 /* 0x48 - 0x4F */
203 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
204 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
205 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
206 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
207 /* 0x50 - 0x5F */
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 /* 0x60 - 0x6F */
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 /* 0x70 - 0x7F */
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 /* 0x80 - 0x87 */
214 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
215 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
216 /* 0x88 - 0x8F */
217 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
218 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
219 /* 0x90 - 0x97 */
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
224 /* 0x98 - 0x9F */
225 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
226 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
227 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
228 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
229 /* 0xA0 - 0xA7 */
230 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
231 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
232 /* 0xA8 - 0xAF */
233 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
234 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
235 /* 0xB0 - 0xB7 */
236 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
237 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
238 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
239 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
240 /* 0xB8 - 0xBF */
241 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
242 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
243 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
244 /* 0xC0 - 0xC7 */
245 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
246 0, 0, 0, ImplicitOps|ModRM,
247 /* 0xC8 - 0xCF */
248 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
249 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
250 /* 0xD0 - 0xDF */
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
252 /* 0xE0 - 0xEF */
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
254 /* 0xF0 - 0xFF */
255 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
256 };
258 /* Type, address-of, and value of an instruction's operand. */
259 struct operand {
260 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
261 unsigned int bytes;
262 unsigned long val, orig_val;
263 union {
264 /* OP_REG: Pointer to register field. */
265 unsigned long *reg;
266 /* OP_MEM: Segment and offset. */
267 struct {
268 enum x86_segment seg;
269 unsigned long off;
270 } mem;
271 };
272 };
274 /* MSRs. */
275 #define MSR_TSC 0x10
277 /* Control register flags. */
278 #define CR0_PE (1<<0)
279 #define CR4_TSD (1<<2)
281 /* EFLAGS bit definitions. */
282 #define EFLG_VIP (1<<20)
283 #define EFLG_VIF (1<<19)
284 #define EFLG_AC (1<<18)
285 #define EFLG_VM (1<<17)
286 #define EFLG_RF (1<<16)
287 #define EFLG_NT (1<<14)
288 #define EFLG_IOPL (3<<12)
289 #define EFLG_OF (1<<11)
290 #define EFLG_DF (1<<10)
291 #define EFLG_IF (1<<9)
292 #define EFLG_TF (1<<8)
293 #define EFLG_SF (1<<7)
294 #define EFLG_ZF (1<<6)
295 #define EFLG_AF (1<<4)
296 #define EFLG_PF (1<<2)
297 #define EFLG_CF (1<<0)
299 /* Exception definitions. */
300 #define EXC_DE 0
301 #define EXC_DB 1
302 #define EXC_BP 3
303 #define EXC_OF 4
304 #define EXC_BR 5
305 #define EXC_UD 6
306 #define EXC_TS 10
307 #define EXC_NP 11
308 #define EXC_SS 12
309 #define EXC_GP 13
310 #define EXC_PF 14
312 /*
313 * Instruction emulation:
314 * Most instructions are emulated directly via a fragment of inline assembly
315 * code. This allows us to save/restore EFLAGS and thus very easily pick up
316 * any modified flags.
317 */
319 #if defined(__x86_64__)
320 #define _LO32 "k" /* force 32-bit operand */
321 #define _STK "%%rsp" /* stack pointer */
322 #define _BYTES_PER_LONG "8"
323 #elif defined(__i386__)
324 #define _LO32 "" /* force 32-bit operand */
325 #define _STK "%%esp" /* stack pointer */
326 #define _BYTES_PER_LONG "4"
327 #endif
329 /*
330 * These EFLAGS bits are restored from saved value during emulation, and
331 * any changes are written back to the saved value after emulation.
332 */
333 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
335 /* Before executing instruction: restore necessary bits in EFLAGS. */
336 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
337 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
338 "movl %"_sav",%"_LO32 _tmp"; " \
339 "push %"_tmp"; " \
340 "push %"_tmp"; " \
341 "movl %"_msk",%"_LO32 _tmp"; " \
342 "andl %"_LO32 _tmp",("_STK"); " \
343 "pushf; " \
344 "notl %"_LO32 _tmp"; " \
345 "andl %"_LO32 _tmp",("_STK"); " \
346 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
347 "pop %"_tmp"; " \
348 "orl %"_LO32 _tmp",("_STK"); " \
349 "popf; " \
350 "pop %"_sav"; "
352 /* After executing instruction: write-back necessary bits in EFLAGS. */
353 #define _POST_EFLAGS(_sav, _msk, _tmp) \
354 /* _sav |= EFLAGS & _msk; */ \
355 "pushf; " \
356 "pop %"_tmp"; " \
357 "andl %"_msk",%"_LO32 _tmp"; " \
358 "orl %"_LO32 _tmp",%"_sav"; "
360 /* Raw emulation: instruction has two explicit operands. */
361 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
362 do{ unsigned long _tmp; \
363 switch ( (_dst).bytes ) \
364 { \
365 case 2: \
366 asm volatile ( \
367 _PRE_EFLAGS("0","4","2") \
368 _op"w %"_wx"3,%1; " \
369 _POST_EFLAGS("0","4","2") \
370 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
371 : _wy ((_src).val), "i" (EFLAGS_MASK), \
372 "m" (_eflags), "m" ((_dst).val) ); \
373 break; \
374 case 4: \
375 asm volatile ( \
376 _PRE_EFLAGS("0","4","2") \
377 _op"l %"_lx"3,%1; " \
378 _POST_EFLAGS("0","4","2") \
379 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
380 : _ly ((_src).val), "i" (EFLAGS_MASK), \
381 "m" (_eflags), "m" ((_dst).val) ); \
382 break; \
383 case 8: \
384 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
385 break; \
386 } \
387 } while (0)
388 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
389 do{ unsigned long _tmp; \
390 switch ( (_dst).bytes ) \
391 { \
392 case 1: \
393 asm volatile ( \
394 _PRE_EFLAGS("0","4","2") \
395 _op"b %"_bx"3,%1; " \
396 _POST_EFLAGS("0","4","2") \
397 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
398 : _by ((_src).val), "i" (EFLAGS_MASK), \
399 "m" (_eflags), "m" ((_dst).val) ); \
400 break; \
401 default: \
402 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
403 break; \
404 } \
405 } while (0)
406 /* Source operand is byte-sized and may be restricted to just %cl. */
407 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
408 __emulate_2op(_op, _src, _dst, _eflags, \
409 "b", "c", "b", "c", "b", "c", "b", "c")
410 /* Source operand is byte, word, long or quad sized. */
411 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
412 __emulate_2op(_op, _src, _dst, _eflags, \
413 "b", "q", "w", "r", _LO32, "r", "", "r")
414 /* Source operand is word, long or quad sized. */
415 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
416 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
417 "w", "r", _LO32, "r", "", "r")
419 /* Instruction has only one explicit operand (no source operand). */
420 #define emulate_1op(_op,_dst,_eflags) \
421 do{ unsigned long _tmp; \
422 switch ( (_dst).bytes ) \
423 { \
424 case 1: \
425 asm volatile ( \
426 _PRE_EFLAGS("0","3","2") \
427 _op"b %1; " \
428 _POST_EFLAGS("0","3","2") \
429 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
430 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
431 break; \
432 case 2: \
433 asm volatile ( \
434 _PRE_EFLAGS("0","3","2") \
435 _op"w %1; " \
436 _POST_EFLAGS("0","3","2") \
437 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
438 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
439 break; \
440 case 4: \
441 asm volatile ( \
442 _PRE_EFLAGS("0","3","2") \
443 _op"l %1; " \
444 _POST_EFLAGS("0","3","2") \
445 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
446 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
447 break; \
448 case 8: \
449 __emulate_1op_8byte(_op, _dst, _eflags); \
450 break; \
451 } \
452 } while (0)
454 /* Emulate an instruction with quadword operands (x86/64 only). */
455 #if defined(__x86_64__)
456 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
457 do{ asm volatile ( \
458 _PRE_EFLAGS("0","4","2") \
459 _op"q %"_qx"3,%1; " \
460 _POST_EFLAGS("0","4","2") \
461 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
462 : _qy ((_src).val), "i" (EFLAGS_MASK), \
463 "m" (_eflags), "m" ((_dst).val) ); \
464 } while (0)
465 #define __emulate_1op_8byte(_op, _dst, _eflags) \
466 do{ asm volatile ( \
467 _PRE_EFLAGS("0","3","2") \
468 _op"q %1; " \
469 _POST_EFLAGS("0","3","2") \
470 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
471 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
472 } while (0)
473 #elif defined(__i386__)
474 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
475 #define __emulate_1op_8byte(_op, _dst, _eflags)
476 #endif /* __i386__ */
478 /* Fetch next part of the instruction being emulated. */
479 #define insn_fetch_bytes(_size) \
480 ({ unsigned long _x, _eip = _regs.eip; \
481 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
482 _regs.eip += (_size); /* real hardware doesn't truncate */ \
483 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
484 EXC_GP); \
485 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
486 if ( rc ) goto done; \
487 _x; \
488 })
489 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
491 #define truncate_word(ea, byte_width) \
492 ({ unsigned long __ea = (ea); \
493 unsigned int _width = (byte_width); \
494 ((_width == sizeof(unsigned long)) ? __ea : \
495 (__ea & ((1UL << (_width << 3)) - 1))); \
496 })
497 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
499 #define mode_64bit() (def_ad_bytes == 8)
501 #define fail_if(p) \
502 do { \
503 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
504 if ( rc ) goto done; \
505 } while (0)
507 #define generate_exception_if(p, e) \
508 ({ if ( (p) ) { \
509 fail_if(ops->inject_hw_exception == NULL); \
510 rc = ops->inject_hw_exception(e, 0, ctxt) ? : X86EMUL_EXCEPTION; \
511 goto done; \
512 } \
513 })
515 /*
516 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
517 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
518 */
519 static int even_parity(uint8_t v)
520 {
521 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
522 return v;
523 }
525 /* Update address held in a register, based on addressing mode. */
526 #define _register_address_increment(reg, inc, byte_width) \
527 do { \
528 int _inc = (inc); /* signed type ensures sign extension to long */ \
529 unsigned int _width = (byte_width); \
530 if ( _width == sizeof(unsigned long) ) \
531 (reg) += _inc; \
532 else if ( mode_64bit() ) \
533 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
534 else \
535 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
536 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
537 } while (0)
538 #define register_address_increment(reg, inc) \
539 _register_address_increment((reg), (inc), ad_bytes)
541 #define sp_pre_dec(dec) ({ \
542 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
543 truncate_word(_regs.esp, ctxt->sp_size/8); \
544 })
545 #define sp_post_inc(inc) ({ \
546 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
547 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
548 __esp; \
549 })
551 #define jmp_rel(rel) \
552 do { \
553 _regs.eip += (int)(rel); \
554 if ( !mode_64bit() ) \
555 _regs.eip = ((op_bytes == 2) \
556 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
557 } while (0)
559 static unsigned long __get_rep_prefix(
560 struct cpu_user_regs *int_regs,
561 struct cpu_user_regs *ext_regs,
562 int ad_bytes)
563 {
564 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
565 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
566 int_regs->ecx);
568 /* Skip the instruction if no repetitions are required. */
569 if ( ecx == 0 )
570 ext_regs->eip = int_regs->eip;
572 return ecx;
573 }
575 #define get_rep_prefix() ({ \
576 unsigned long max_reps = 1; \
577 if ( rep_prefix ) \
578 max_reps = __get_rep_prefix(&_regs, ctxt->regs, ad_bytes); \
579 if ( max_reps == 0 ) \
580 goto done; \
581 max_reps; \
582 })
584 static void __put_rep_prefix(
585 struct cpu_user_regs *int_regs,
586 struct cpu_user_regs *ext_regs,
587 int ad_bytes,
588 unsigned long reps_completed)
589 {
590 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
591 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
592 int_regs->ecx);
594 /* Reduce counter appropriately, and repeat instruction if non-zero. */
595 ecx -= reps_completed;
596 if ( ecx != 0 )
597 int_regs->eip = ext_regs->eip;
599 if ( ad_bytes == 2 )
600 *(uint16_t *)&int_regs->ecx = ecx;
601 else if ( ad_bytes == 4 )
602 int_regs->ecx = (uint32_t)ecx;
603 else
604 int_regs->ecx = ecx;
605 }
607 #define put_rep_prefix(reps_completed) ({ \
608 if ( rep_prefix ) \
609 __put_rep_prefix(&_regs, ctxt->regs, ad_bytes, reps_completed); \
610 })
612 /*
613 * Unsigned multiplication with double-word result.
614 * IN: Multiplicand=m[0], Multiplier=m[1]
615 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
616 */
617 static int mul_dbl(unsigned long m[2])
618 {
619 int rc;
620 asm ( "mul %4; seto %b2"
621 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
622 : "0" (m[0]), "1" (m[1]), "2" (0) );
623 return rc;
624 }
626 /*
627 * Signed multiplication with double-word result.
628 * IN: Multiplicand=m[0], Multiplier=m[1]
629 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
630 */
631 static int imul_dbl(unsigned long m[2])
632 {
633 int rc;
634 asm ( "imul %4; seto %b2"
635 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
636 : "0" (m[0]), "1" (m[1]), "2" (0) );
637 return rc;
638 }
640 /*
641 * Unsigned division of double-word dividend.
642 * IN: Dividend=u[1]:u[0], Divisor=v
643 * OUT: Return 1: #DE
644 * Return 0: Quotient=u[0], Remainder=u[1]
645 */
646 static int div_dbl(unsigned long u[2], unsigned long v)
647 {
648 if ( (v == 0) || (u[1] >= v) )
649 return 1;
650 asm ( "div %4"
651 : "=a" (u[0]), "=d" (u[1])
652 : "0" (u[0]), "1" (u[1]), "r" (v) );
653 return 0;
654 }
656 /*
657 * Signed division of double-word dividend.
658 * IN: Dividend=u[1]:u[0], Divisor=v
659 * OUT: Return 1: #DE
660 * Return 0: Quotient=u[0], Remainder=u[1]
661 * NB. We don't use idiv directly as it's moderately hard to work out
662 * ahead of time whether it will #DE, which we cannot allow to happen.
663 */
664 static int idiv_dbl(unsigned long u[2], unsigned long v)
665 {
666 int negu = (long)u[1] < 0, negv = (long)v < 0;
668 /* u = abs(u) */
669 if ( negu )
670 {
671 u[1] = ~u[1];
672 if ( (u[0] = -u[0]) == 0 )
673 u[1]++;
674 }
676 /* abs(u) / abs(v) */
677 if ( div_dbl(u, negv ? -v : v) )
678 return 1;
680 /* Remainder has same sign as dividend. It cannot overflow. */
681 if ( negu )
682 u[1] = -u[1];
684 /* Quotient is overflowed if sign bit is set. */
685 if ( negu ^ negv )
686 {
687 if ( (long)u[0] >= 0 )
688 u[0] = -u[0];
689 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
690 return 1;
691 }
692 else if ( (long)u[0] < 0 )
693 return 1;
695 return 0;
696 }
698 static int
699 test_cc(
700 unsigned int condition, unsigned int flags)
701 {
702 int rc = 0;
704 switch ( (condition & 15) >> 1 )
705 {
706 case 0: /* o */
707 rc |= (flags & EFLG_OF);
708 break;
709 case 1: /* b/c/nae */
710 rc |= (flags & EFLG_CF);
711 break;
712 case 2: /* z/e */
713 rc |= (flags & EFLG_ZF);
714 break;
715 case 3: /* be/na */
716 rc |= (flags & (EFLG_CF|EFLG_ZF));
717 break;
718 case 4: /* s */
719 rc |= (flags & EFLG_SF);
720 break;
721 case 5: /* p/pe */
722 rc |= (flags & EFLG_PF);
723 break;
724 case 7: /* le/ng */
725 rc |= (flags & EFLG_ZF);
726 /* fall through */
727 case 6: /* l/nge */
728 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
729 break;
730 }
732 /* Odd condition identifiers (lsb == 1) have inverted sense. */
733 return (!!rc ^ (condition & 1));
734 }
736 static int
737 get_cpl(
738 struct x86_emulate_ctxt *ctxt,
739 struct x86_emulate_ops *ops)
740 {
741 struct segment_register reg;
743 if ( ctxt->regs->eflags & EFLG_VM )
744 return 3;
746 if ( (ops->read_segment == NULL) ||
747 ops->read_segment(x86_seg_ss, &reg, ctxt) )
748 return -1;
750 return reg.attr.fields.dpl;
751 }
753 static int
754 _mode_iopl(
755 struct x86_emulate_ctxt *ctxt,
756 struct x86_emulate_ops *ops)
757 {
758 int cpl = get_cpl(ctxt, ops);
759 return ((cpl >= 0) && (cpl <= ((ctxt->regs->eflags >> 12) & 3)));
760 }
762 #define mode_ring0() (get_cpl(ctxt, ops) == 0)
763 #define mode_iopl() _mode_iopl(ctxt, ops)
765 static int
766 in_realmode(
767 struct x86_emulate_ctxt *ctxt,
768 struct x86_emulate_ops *ops)
769 {
770 unsigned long cr0;
771 int rc;
773 if ( ops->read_cr == NULL )
774 return 0;
776 rc = ops->read_cr(0, &cr0, ctxt);
777 return (!rc && !(cr0 & CR0_PE));
778 }
780 static int
781 realmode_load_seg(
782 enum x86_segment seg,
783 uint16_t sel,
784 struct x86_emulate_ctxt *ctxt,
785 struct x86_emulate_ops *ops)
786 {
787 struct segment_register reg;
788 int rc;
790 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
791 return rc;
793 reg.sel = sel;
794 reg.base = (uint32_t)sel << 4;
796 return ops->write_segment(seg, &reg, ctxt);
797 }
799 static int
800 protmode_load_seg(
801 enum x86_segment seg,
802 uint16_t sel,
803 struct x86_emulate_ctxt *ctxt,
804 struct x86_emulate_ops *ops)
805 {
806 struct segment_register desctab, cs, segr;
807 struct { uint32_t a, b; } desc;
808 unsigned long val;
809 uint8_t dpl, rpl, cpl;
810 int rc, fault_type = EXC_TS;
812 /* NULL selector? */
813 if ( (sel & 0xfffc) == 0 )
814 {
815 if ( (seg == x86_seg_cs) || (seg == x86_seg_ss) )
816 goto raise_exn;
817 memset(&segr, 0, sizeof(segr));
818 return ops->write_segment(seg, &segr, ctxt);
819 }
821 /* LDT descriptor must be in the GDT. */
822 if ( (seg == x86_seg_ldtr) && (sel & 4) )
823 goto raise_exn;
825 if ( (rc = ops->read_segment(x86_seg_cs, &cs, ctxt)) ||
826 (rc = ops->read_segment((sel & 4) ? x86_seg_ldtr : x86_seg_gdtr,
827 &desctab, ctxt)) )
828 return rc;
830 /* Check against descriptor table limit. */
831 if ( ((sel & 0xfff8) + 7) > desctab.limit )
832 goto raise_exn;
834 do {
835 if ( (rc = ops->read(x86_seg_none, desctab.base + (sel & 0xfff8),
836 &val, 4, ctxt)) )
837 return rc;
838 desc.a = val;
839 if ( (rc = ops->read(x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
840 &val, 4, ctxt)) )
841 return rc;
842 desc.b = val;
844 /* Segment present in memory? */
845 if ( !(desc.b & (1u<<15)) )
846 {
847 fault_type = EXC_NP;
848 goto raise_exn;
849 }
851 /* LDT descriptor is a system segment. All others are code/data. */
852 if ( (desc.b & (1u<<12)) == ((seg == x86_seg_ldtr) << 12) )
853 goto raise_exn;
855 dpl = (desc.b >> 13) & 3;
856 rpl = sel & 3;
857 cpl = cs.sel & 3;
859 switch ( seg )
860 {
861 case x86_seg_cs:
862 /* Code segment? */
863 if ( !(desc.b & (1u<<11)) )
864 goto raise_exn;
865 /* Non-conforming segment: check DPL against RPL. */
866 if ( ((desc.b & (6u<<9)) != 6) && (dpl != rpl) )
867 goto raise_exn;
868 break;
869 case x86_seg_ss:
870 /* Writable data segment? */
871 if ( (desc.b & (5u<<9)) != (1u<<9) )
872 goto raise_exn;
873 if ( (dpl != cpl) || (dpl != rpl) )
874 goto raise_exn;
875 break;
876 case x86_seg_ldtr:
877 /* LDT system segment? */
878 if ( (desc.b & (15u<<8)) != (2u<<8) )
879 goto raise_exn;
880 goto skip_accessed_flag;
881 default:
882 /* Readable code or data segment? */
883 if ( (desc.b & (5u<<9)) == (4u<<9) )
884 goto raise_exn;
885 /* Non-conforming segment: check DPL against RPL and CPL. */
886 if ( ((desc.b & (6u<<9)) != 6) && ((dpl < cpl) || (dpl < rpl)) )
887 goto raise_exn;
888 break;
889 }
891 /* Ensure Accessed flag is set. */
892 rc = ((desc.b & 0x100) ? X86EMUL_OKAY :
893 ops->cmpxchg(
894 x86_seg_none, desctab.base + (sel & 0xfff8) + 4, desc.b,
895 desc.b | 0x100, 4, ctxt));
896 } while ( rc == X86EMUL_CMPXCHG_FAILED );
898 if ( rc )
899 return rc;
901 /* Force the Accessed flag in our local copy. */
902 desc.b |= 0x100;
904 skip_accessed_flag:
905 segr.base = (((desc.b << 0) & 0xff000000u) |
906 ((desc.b << 16) & 0x00ff0000u) |
907 ((desc.a >> 16) & 0x0000ffffu));
908 segr.attr.bytes = (((desc.b >> 8) & 0x00ffu) |
909 ((desc.b >> 12) & 0x0f00u));
910 segr.limit = (desc.b & 0x000f0000u) | (desc.a & 0x0000ffffu);
911 if ( segr.attr.fields.g )
912 segr.limit = (segr.limit << 12) | 0xfffu;
913 segr.sel = sel;
914 return ops->write_segment(seg, &segr, ctxt);
916 raise_exn:
917 if ( ops->inject_hw_exception == NULL )
918 return X86EMUL_UNHANDLEABLE;
919 if ( (rc = ops->inject_hw_exception(fault_type, sel & 0xfffc, ctxt)) )
920 return rc;
921 return X86EMUL_EXCEPTION;
922 }
924 static int
925 load_seg(
926 enum x86_segment seg,
927 uint16_t sel,
928 struct x86_emulate_ctxt *ctxt,
929 struct x86_emulate_ops *ops)
930 {
931 if ( (ops->read_segment == NULL) ||
932 (ops->write_segment == NULL) )
933 return X86EMUL_UNHANDLEABLE;
935 if ( in_realmode(ctxt, ops) )
936 return realmode_load_seg(seg, sel, ctxt, ops);
938 return protmode_load_seg(seg, sel, ctxt, ops);
939 }
941 void *
942 decode_register(
943 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
944 {
945 void *p;
947 switch ( modrm_reg )
948 {
949 case 0: p = &regs->eax; break;
950 case 1: p = &regs->ecx; break;
951 case 2: p = &regs->edx; break;
952 case 3: p = &regs->ebx; break;
953 case 4: p = (highbyte_regs ?
954 ((unsigned char *)&regs->eax + 1) :
955 (unsigned char *)&regs->esp); break;
956 case 5: p = (highbyte_regs ?
957 ((unsigned char *)&regs->ecx + 1) :
958 (unsigned char *)&regs->ebp); break;
959 case 6: p = (highbyte_regs ?
960 ((unsigned char *)&regs->edx + 1) :
961 (unsigned char *)&regs->esi); break;
962 case 7: p = (highbyte_regs ?
963 ((unsigned char *)&regs->ebx + 1) :
964 (unsigned char *)&regs->edi); break;
965 #if defined(__x86_64__)
966 case 8: p = &regs->r8; break;
967 case 9: p = &regs->r9; break;
968 case 10: p = &regs->r10; break;
969 case 11: p = &regs->r11; break;
970 case 12: p = &regs->r12; break;
971 case 13: p = &regs->r13; break;
972 case 14: p = &regs->r14; break;
973 case 15: p = &regs->r15; break;
974 #endif
975 default: p = NULL; break;
976 }
978 return p;
979 }
981 #define decode_segment_failed x86_seg_tr
982 enum x86_segment
983 decode_segment(
984 uint8_t modrm_reg)
985 {
986 switch ( modrm_reg )
987 {
988 case 0: return x86_seg_es;
989 case 1: return x86_seg_cs;
990 case 2: return x86_seg_ss;
991 case 3: return x86_seg_ds;
992 case 4: return x86_seg_fs;
993 case 5: return x86_seg_gs;
994 default: break;
995 }
996 return decode_segment_failed;
997 }
999 int
1000 x86_emulate(
1001 struct x86_emulate_ctxt *ctxt,
1002 struct x86_emulate_ops *ops)
1004 /* Shadow copy of register state. Committed on successful emulation. */
1005 struct cpu_user_regs _regs = *ctxt->regs;
1007 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
1008 uint8_t modrm = 0, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
1009 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
1010 #define REPE_PREFIX 1
1011 #define REPNE_PREFIX 2
1012 unsigned int lock_prefix = 0, rep_prefix = 0;
1013 int override_seg = -1, rc = X86EMUL_OKAY;
1014 struct operand src, dst;
1016 /* Data operand effective address (usually computed from ModRM). */
1017 struct operand ea;
1019 /* Default is a memory operand relative to segment DS. */
1020 ea.type = OP_MEM;
1021 ea.mem.seg = x86_seg_ds;
1022 ea.mem.off = 0;
1024 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
1025 if ( op_bytes == 8 )
1027 op_bytes = def_op_bytes = 4;
1028 #ifndef __x86_64__
1029 return X86EMUL_UNHANDLEABLE;
1030 #endif
1033 /* Prefix bytes. */
1034 for ( ; ; )
1036 switch ( b = insn_fetch_type(uint8_t) )
1038 case 0x66: /* operand-size override */
1039 op_bytes = def_op_bytes ^ 6;
1040 break;
1041 case 0x67: /* address-size override */
1042 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
1043 break;
1044 case 0x2e: /* CS override */
1045 override_seg = x86_seg_cs;
1046 break;
1047 case 0x3e: /* DS override */
1048 override_seg = x86_seg_ds;
1049 break;
1050 case 0x26: /* ES override */
1051 override_seg = x86_seg_es;
1052 break;
1053 case 0x64: /* FS override */
1054 override_seg = x86_seg_fs;
1055 break;
1056 case 0x65: /* GS override */
1057 override_seg = x86_seg_gs;
1058 break;
1059 case 0x36: /* SS override */
1060 override_seg = x86_seg_ss;
1061 break;
1062 case 0xf0: /* LOCK */
1063 lock_prefix = 1;
1064 break;
1065 case 0xf2: /* REPNE/REPNZ */
1066 rep_prefix = REPNE_PREFIX;
1067 break;
1068 case 0xf3: /* REP/REPE/REPZ */
1069 rep_prefix = REPE_PREFIX;
1070 break;
1071 case 0x40 ... 0x4f: /* REX */
1072 if ( !mode_64bit() )
1073 goto done_prefixes;
1074 rex_prefix = b;
1075 continue;
1076 default:
1077 goto done_prefixes;
1080 /* Any legacy prefix after a REX prefix nullifies its effect. */
1081 rex_prefix = 0;
1083 done_prefixes:
1085 if ( rex_prefix & 8 ) /* REX.W */
1086 op_bytes = 8;
1088 /* Opcode byte(s). */
1089 d = opcode_table[b];
1090 if ( d == 0 )
1092 /* Two-byte opcode? */
1093 if ( b == 0x0f )
1095 twobyte = 1;
1096 b = insn_fetch_type(uint8_t);
1097 d = twobyte_table[b];
1100 /* Unrecognised? */
1101 if ( d == 0 )
1102 goto cannot_emulate;
1105 /* Lock prefix is allowed only on RMW instructions. */
1106 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
1108 /* ModRM and SIB bytes. */
1109 if ( d & ModRM )
1111 modrm = insn_fetch_type(uint8_t);
1112 modrm_mod = (modrm & 0xc0) >> 6;
1113 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
1114 modrm_rm = modrm & 0x07;
1116 if ( modrm_mod == 3 )
1118 modrm_rm |= (rex_prefix & 1) << 3;
1119 ea.type = OP_REG;
1120 ea.reg = decode_register(
1121 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
1123 else if ( ad_bytes == 2 )
1125 /* 16-bit ModR/M decode. */
1126 switch ( modrm_rm )
1128 case 0:
1129 ea.mem.off = _regs.ebx + _regs.esi;
1130 break;
1131 case 1:
1132 ea.mem.off = _regs.ebx + _regs.edi;
1133 break;
1134 case 2:
1135 ea.mem.seg = x86_seg_ss;
1136 ea.mem.off = _regs.ebp + _regs.esi;
1137 break;
1138 case 3:
1139 ea.mem.seg = x86_seg_ss;
1140 ea.mem.off = _regs.ebp + _regs.edi;
1141 break;
1142 case 4:
1143 ea.mem.off = _regs.esi;
1144 break;
1145 case 5:
1146 ea.mem.off = _regs.edi;
1147 break;
1148 case 6:
1149 if ( modrm_mod == 0 )
1150 break;
1151 ea.mem.seg = x86_seg_ss;
1152 ea.mem.off = _regs.ebp;
1153 break;
1154 case 7:
1155 ea.mem.off = _regs.ebx;
1156 break;
1158 switch ( modrm_mod )
1160 case 0:
1161 if ( modrm_rm == 6 )
1162 ea.mem.off = insn_fetch_type(int16_t);
1163 break;
1164 case 1:
1165 ea.mem.off += insn_fetch_type(int8_t);
1166 break;
1167 case 2:
1168 ea.mem.off += insn_fetch_type(int16_t);
1169 break;
1171 ea.mem.off = truncate_ea(ea.mem.off);
1173 else
1175 /* 32/64-bit ModR/M decode. */
1176 if ( modrm_rm == 4 )
1178 sib = insn_fetch_type(uint8_t);
1179 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1180 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1181 if ( sib_index != 4 )
1182 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1183 ea.mem.off <<= (sib >> 6) & 3;
1184 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1185 ea.mem.off += insn_fetch_type(int32_t);
1186 else if ( sib_base == 4 )
1188 ea.mem.seg = x86_seg_ss;
1189 ea.mem.off += _regs.esp;
1190 if ( !twobyte && (b == 0x8f) )
1191 /* POP <rm> computes its EA post increment. */
1192 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1193 ? 8 : op_bytes);
1195 else if ( sib_base == 5 )
1197 ea.mem.seg = x86_seg_ss;
1198 ea.mem.off += _regs.ebp;
1200 else
1201 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1203 else
1205 modrm_rm |= (rex_prefix & 1) << 3;
1206 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1207 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1208 ea.mem.seg = x86_seg_ss;
1210 switch ( modrm_mod )
1212 case 0:
1213 if ( (modrm_rm & 7) != 5 )
1214 break;
1215 ea.mem.off = insn_fetch_type(int32_t);
1216 if ( !mode_64bit() )
1217 break;
1218 /* Relative to RIP of next instruction. Argh! */
1219 ea.mem.off += _regs.eip;
1220 if ( (d & SrcMask) == SrcImm )
1221 ea.mem.off += (d & ByteOp) ? 1 :
1222 ((op_bytes == 8) ? 4 : op_bytes);
1223 else if ( (d & SrcMask) == SrcImmByte )
1224 ea.mem.off += 1;
1225 else if ( !twobyte && ((b & 0xfe) == 0xf6) &&
1226 ((modrm_reg & 7) <= 1) )
1227 /* Special case in Grp3: test has immediate operand. */
1228 ea.mem.off += (d & ByteOp) ? 1
1229 : ((op_bytes == 8) ? 4 : op_bytes);
1230 else if ( twobyte && ((b & 0xf7) == 0xa4) )
1231 /* SHLD/SHRD with immediate byte third operand. */
1232 ea.mem.off++;
1233 break;
1234 case 1:
1235 ea.mem.off += insn_fetch_type(int8_t);
1236 break;
1237 case 2:
1238 ea.mem.off += insn_fetch_type(int32_t);
1239 break;
1241 ea.mem.off = truncate_ea(ea.mem.off);
1245 if ( override_seg != -1 )
1246 ea.mem.seg = override_seg;
1248 /* Special instructions do their own operand decoding. */
1249 if ( (d & DstMask) == ImplicitOps )
1250 goto special_insn;
1252 /* Decode and fetch the source operand: register, memory or immediate. */
1253 switch ( d & SrcMask )
1255 case SrcNone:
1256 break;
1257 case SrcReg:
1258 src.type = OP_REG;
1259 if ( d & ByteOp )
1261 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1262 src.val = *(uint8_t *)src.reg;
1263 src.bytes = 1;
1265 else
1267 src.reg = decode_register(modrm_reg, &_regs, 0);
1268 switch ( (src.bytes = op_bytes) )
1270 case 2: src.val = *(uint16_t *)src.reg; break;
1271 case 4: src.val = *(uint32_t *)src.reg; break;
1272 case 8: src.val = *(uint64_t *)src.reg; break;
1275 break;
1276 case SrcMem16:
1277 ea.bytes = 2;
1278 goto srcmem_common;
1279 case SrcMem:
1280 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1281 srcmem_common:
1282 src = ea;
1283 if ( src.type == OP_REG )
1285 switch ( src.bytes )
1287 case 1: src.val = *(uint8_t *)src.reg; break;
1288 case 2: src.val = *(uint16_t *)src.reg; break;
1289 case 4: src.val = *(uint32_t *)src.reg; break;
1290 case 8: src.val = *(uint64_t *)src.reg; break;
1293 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1294 &src.val, src.bytes, ctxt)) )
1295 goto done;
1296 break;
1297 case SrcImm:
1298 src.type = OP_IMM;
1299 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1300 if ( src.bytes == 8 ) src.bytes = 4;
1301 /* NB. Immediates are sign-extended as necessary. */
1302 switch ( src.bytes )
1304 case 1: src.val = insn_fetch_type(int8_t); break;
1305 case 2: src.val = insn_fetch_type(int16_t); break;
1306 case 4: src.val = insn_fetch_type(int32_t); break;
1308 break;
1309 case SrcImmByte:
1310 src.type = OP_IMM;
1311 src.bytes = 1;
1312 src.val = insn_fetch_type(int8_t);
1313 break;
1316 /* Decode and fetch the destination operand: register or memory. */
1317 switch ( d & DstMask )
1319 case DstReg:
1320 dst.type = OP_REG;
1321 if ( d & ByteOp )
1323 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1324 dst.val = *(uint8_t *)dst.reg;
1325 dst.bytes = 1;
1327 else
1329 dst.reg = decode_register(modrm_reg, &_regs, 0);
1330 switch ( (dst.bytes = op_bytes) )
1332 case 2: dst.val = *(uint16_t *)dst.reg; break;
1333 case 4: dst.val = *(uint32_t *)dst.reg; break;
1334 case 8: dst.val = *(uint64_t *)dst.reg; break;
1337 break;
1338 case DstBitBase:
1339 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1341 src.val &= (op_bytes << 3) - 1;
1343 else
1345 /*
1346 * EA += BitOffset DIV op_bytes*8
1347 * BitOffset = BitOffset MOD op_bytes*8
1348 * DIV truncates towards negative infinity.
1349 * MOD always produces a positive result.
1350 */
1351 if ( op_bytes == 2 )
1352 src.val = (int16_t)src.val;
1353 else if ( op_bytes == 4 )
1354 src.val = (int32_t)src.val;
1355 if ( (long)src.val < 0 )
1357 unsigned long byte_offset;
1358 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1359 ea.mem.off -= byte_offset;
1360 src.val = (byte_offset << 3) + src.val;
1362 else
1364 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1365 src.val &= (op_bytes << 3) - 1;
1368 /* Becomes a normal DstMem operation from here on. */
1369 d = (d & ~DstMask) | DstMem;
1370 case DstMem:
1371 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1372 dst = ea;
1373 if ( dst.type == OP_REG )
1375 switch ( dst.bytes )
1377 case 1: dst.val = *(uint8_t *)dst.reg; break;
1378 case 2: dst.val = *(uint16_t *)dst.reg; break;
1379 case 4: dst.val = *(uint32_t *)dst.reg; break;
1380 case 8: dst.val = *(uint64_t *)dst.reg; break;
1383 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1385 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1386 &dst.val, dst.bytes, ctxt)) )
1387 goto done;
1388 dst.orig_val = dst.val;
1390 break;
1393 /* LOCK prefix allowed only on instructions with memory destination. */
1394 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
1396 if ( twobyte )
1397 goto twobyte_insn;
1399 switch ( b )
1401 case 0x04 ... 0x05: /* add imm,%%eax */
1402 dst.reg = (unsigned long *)&_regs.eax;
1403 dst.val = _regs.eax;
1404 case 0x00 ... 0x03: add: /* add */
1405 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1406 break;
1408 case 0x0c ... 0x0d: /* or imm,%%eax */
1409 dst.reg = (unsigned long *)&_regs.eax;
1410 dst.val = _regs.eax;
1411 case 0x08 ... 0x0b: or: /* or */
1412 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1413 break;
1415 case 0x14 ... 0x15: /* adc imm,%%eax */
1416 dst.reg = (unsigned long *)&_regs.eax;
1417 dst.val = _regs.eax;
1418 case 0x10 ... 0x13: adc: /* adc */
1419 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1420 break;
1422 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1423 dst.reg = (unsigned long *)&_regs.eax;
1424 dst.val = _regs.eax;
1425 case 0x18 ... 0x1b: sbb: /* sbb */
1426 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1427 break;
1429 case 0x24 ... 0x25: /* and imm,%%eax */
1430 dst.reg = (unsigned long *)&_regs.eax;
1431 dst.val = _regs.eax;
1432 case 0x20 ... 0x23: and: /* and */
1433 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1434 break;
1436 case 0x2c ... 0x2d: /* sub imm,%%eax */
1437 dst.reg = (unsigned long *)&_regs.eax;
1438 dst.val = _regs.eax;
1439 case 0x28 ... 0x2b: sub: /* sub */
1440 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1441 break;
1443 case 0x34 ... 0x35: /* xor imm,%%eax */
1444 dst.reg = (unsigned long *)&_regs.eax;
1445 dst.val = _regs.eax;
1446 case 0x30 ... 0x33: xor: /* xor */
1447 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1448 break;
1450 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1451 dst.reg = (unsigned long *)&_regs.eax;
1452 dst.val = _regs.eax;
1453 case 0x38 ... 0x3b: cmp: /* cmp */
1454 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1455 break;
1457 case 0x62: /* bound */ {
1458 unsigned long src_val2;
1459 int lb, ub, idx;
1460 generate_exception_if(mode_64bit() || (src.type != OP_MEM), EXC_UD);
1461 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1462 &src_val2, op_bytes, ctxt)) )
1463 goto done;
1464 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1465 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1466 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1467 generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
1468 dst.type = OP_NONE;
1469 break;
1472 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1473 if ( mode_64bit() )
1475 /* movsxd */
1476 if ( src.type == OP_REG )
1477 src.val = *(int32_t *)src.reg;
1478 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1479 &src.val, 4, ctxt)) )
1480 goto done;
1481 dst.val = (int32_t)src.val;
1483 else
1485 /* arpl */
1486 uint16_t src_val = dst.val;
1487 dst = src;
1488 _regs.eflags &= ~EFLG_ZF;
1489 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1490 if ( _regs.eflags & EFLG_ZF )
1491 dst.val = (dst.val & ~3) | (src_val & 3);
1492 else
1493 dst.type = OP_NONE;
1494 generate_exception_if(in_realmode(ctxt, ops), EXC_UD);
1496 break;
1498 case 0x69: /* imul imm16/32 */
1499 case 0x6b: /* imul imm8 */ {
1500 unsigned long src1; /* ModR/M source operand */
1501 if ( ea.type == OP_REG )
1502 src1 = *ea.reg;
1503 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
1504 &src1, op_bytes, ctxt)) )
1505 goto done;
1506 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1507 switch ( dst.bytes )
1509 case 2:
1510 dst.val = ((uint32_t)(int16_t)src.val *
1511 (uint32_t)(int16_t)src1);
1512 if ( (int16_t)dst.val != (uint32_t)dst.val )
1513 _regs.eflags |= EFLG_OF|EFLG_CF;
1514 break;
1515 #ifdef __x86_64__
1516 case 4:
1517 dst.val = ((uint64_t)(int32_t)src.val *
1518 (uint64_t)(int32_t)src1);
1519 if ( (int32_t)dst.val != dst.val )
1520 _regs.eflags |= EFLG_OF|EFLG_CF;
1521 break;
1522 #endif
1523 default: {
1524 unsigned long m[2] = { src.val, src1 };
1525 if ( imul_dbl(m) )
1526 _regs.eflags |= EFLG_OF|EFLG_CF;
1527 dst.val = m[0];
1528 break;
1531 break;
1534 case 0x82: /* Grp1 (x86/32 only) */
1535 generate_exception_if(mode_64bit(), EXC_UD);
1536 case 0x80: case 0x81: case 0x83: /* Grp1 */
1537 switch ( modrm_reg & 7 )
1539 case 0: goto add;
1540 case 1: goto or;
1541 case 2: goto adc;
1542 case 3: goto sbb;
1543 case 4: goto and;
1544 case 5: goto sub;
1545 case 6: goto xor;
1546 case 7: goto cmp;
1548 break;
1550 case 0xa8 ... 0xa9: /* test imm,%%eax */
1551 dst.reg = (unsigned long *)&_regs.eax;
1552 dst.val = _regs.eax;
1553 case 0x84 ... 0x85: test: /* test */
1554 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1555 break;
1557 case 0x86 ... 0x87: xchg: /* xchg */
1558 /* Write back the register source. */
1559 switch ( dst.bytes )
1561 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1562 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1563 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1564 case 8: *src.reg = dst.val; break;
1566 /* Write back the memory destination with implicit LOCK prefix. */
1567 dst.val = src.val;
1568 lock_prefix = 1;
1569 break;
1571 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1572 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1573 case 0x88 ... 0x8b: /* mov */
1574 dst.val = src.val;
1575 break;
1577 case 0x8c: /* mov Sreg,r/m */ {
1578 struct segment_register reg;
1579 enum x86_segment seg = decode_segment(modrm_reg);
1580 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1581 fail_if(ops->read_segment == NULL);
1582 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
1583 goto done;
1584 dst.val = reg.sel;
1585 if ( dst.type == OP_MEM )
1586 dst.bytes = 2;
1587 break;
1590 case 0x8e: /* mov r/m,Sreg */ {
1591 enum x86_segment seg = decode_segment(modrm_reg);
1592 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1593 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
1594 goto done;
1595 dst.type = OP_NONE;
1596 break;
1599 case 0x8d: /* lea */
1600 dst.val = ea.mem.off;
1601 break;
1603 case 0x8f: /* pop (sole member of Grp1a) */
1604 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1605 /* 64-bit mode: POP defaults to a 64-bit operand. */
1606 if ( mode_64bit() && (dst.bytes == 4) )
1607 dst.bytes = 8;
1608 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1609 &dst.val, dst.bytes, ctxt)) != 0 )
1610 goto done;
1611 break;
1613 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1614 dst.reg = decode_register(
1615 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1616 dst.val = src.val;
1617 break;
1619 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1620 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1621 src.val = ((uint32_t)src.val |
1622 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1623 dst.reg = decode_register(
1624 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1625 dst.val = src.val;
1626 break;
1628 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1629 switch ( modrm_reg & 7 )
1631 case 0: /* rol */
1632 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1633 break;
1634 case 1: /* ror */
1635 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1636 break;
1637 case 2: /* rcl */
1638 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1639 break;
1640 case 3: /* rcr */
1641 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1642 break;
1643 case 4: /* sal/shl */
1644 case 6: /* sal/shl */
1645 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1646 break;
1647 case 5: /* shr */
1648 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1649 break;
1650 case 7: /* sar */
1651 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1652 break;
1654 break;
1656 case 0xc4: /* les */ {
1657 unsigned long sel;
1658 dst.val = x86_seg_es;
1659 les: /* dst.val identifies the segment */
1660 generate_exception_if(src.type != OP_MEM, EXC_UD);
1661 if ( (rc = ops->read(src.mem.seg, src.mem.off + src.bytes,
1662 &sel, 2, ctxt)) != 0 )
1663 goto done;
1664 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
1665 goto done;
1666 dst.val = src.val;
1667 break;
1670 case 0xc5: /* lds */
1671 dst.val = x86_seg_ds;
1672 goto les;
1674 case 0xd0 ... 0xd1: /* Grp2 */
1675 src.val = 1;
1676 goto grp2;
1678 case 0xd2 ... 0xd3: /* Grp2 */
1679 src.val = _regs.ecx;
1680 goto grp2;
1682 case 0xf6 ... 0xf7: /* Grp3 */
1683 switch ( modrm_reg & 7 )
1685 case 0 ... 1: /* test */
1686 /* Special case in Grp3: test has an immediate source operand. */
1687 src.type = OP_IMM;
1688 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1689 if ( src.bytes == 8 ) src.bytes = 4;
1690 switch ( src.bytes )
1692 case 1: src.val = insn_fetch_type(int8_t); break;
1693 case 2: src.val = insn_fetch_type(int16_t); break;
1694 case 4: src.val = insn_fetch_type(int32_t); break;
1696 goto test;
1697 case 2: /* not */
1698 dst.val = ~dst.val;
1699 break;
1700 case 3: /* neg */
1701 emulate_1op("neg", dst, _regs.eflags);
1702 break;
1703 case 4: /* mul */
1704 src = dst;
1705 dst.type = OP_REG;
1706 dst.reg = (unsigned long *)&_regs.eax;
1707 dst.val = *dst.reg;
1708 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1709 switch ( src.bytes )
1711 case 1:
1712 dst.val *= src.val;
1713 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1714 _regs.eflags |= EFLG_OF|EFLG_CF;
1715 break;
1716 case 2:
1717 dst.val *= src.val;
1718 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1719 _regs.eflags |= EFLG_OF|EFLG_CF;
1720 *(uint16_t *)&_regs.edx = dst.val >> 16;
1721 break;
1722 #ifdef __x86_64__
1723 case 4:
1724 dst.val *= src.val;
1725 if ( (uint32_t)dst.val != dst.val )
1726 _regs.eflags |= EFLG_OF|EFLG_CF;
1727 _regs.edx = (uint32_t)(dst.val >> 32);
1728 break;
1729 #endif
1730 default: {
1731 unsigned long m[2] = { src.val, dst.val };
1732 if ( mul_dbl(m) )
1733 _regs.eflags |= EFLG_OF|EFLG_CF;
1734 _regs.edx = m[1];
1735 dst.val = m[0];
1736 break;
1739 break;
1740 case 5: /* imul */
1741 src = dst;
1742 dst.type = OP_REG;
1743 dst.reg = (unsigned long *)&_regs.eax;
1744 dst.val = *dst.reg;
1745 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1746 switch ( src.bytes )
1748 case 1:
1749 dst.val = ((uint16_t)(int8_t)src.val *
1750 (uint16_t)(int8_t)dst.val);
1751 if ( (int8_t)dst.val != (uint16_t)dst.val )
1752 _regs.eflags |= EFLG_OF|EFLG_CF;
1753 break;
1754 case 2:
1755 dst.val = ((uint32_t)(int16_t)src.val *
1756 (uint32_t)(int16_t)dst.val);
1757 if ( (int16_t)dst.val != (uint32_t)dst.val )
1758 _regs.eflags |= EFLG_OF|EFLG_CF;
1759 *(uint16_t *)&_regs.edx = dst.val >> 16;
1760 break;
1761 #ifdef __x86_64__
1762 case 4:
1763 dst.val = ((uint64_t)(int32_t)src.val *
1764 (uint64_t)(int32_t)dst.val);
1765 if ( (int32_t)dst.val != dst.val )
1766 _regs.eflags |= EFLG_OF|EFLG_CF;
1767 _regs.edx = (uint32_t)(dst.val >> 32);
1768 break;
1769 #endif
1770 default: {
1771 unsigned long m[2] = { src.val, dst.val };
1772 if ( imul_dbl(m) )
1773 _regs.eflags |= EFLG_OF|EFLG_CF;
1774 _regs.edx = m[1];
1775 dst.val = m[0];
1776 break;
1779 break;
1780 case 6: /* div */ {
1781 unsigned long u[2], v;
1782 src = dst;
1783 dst.type = OP_REG;
1784 dst.reg = (unsigned long *)&_regs.eax;
1785 switch ( src.bytes )
1787 case 1:
1788 u[0] = (uint16_t)_regs.eax;
1789 u[1] = 0;
1790 v = (uint8_t)src.val;
1791 generate_exception_if(
1792 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1793 EXC_DE);
1794 dst.val = (uint8_t)u[0];
1795 ((uint8_t *)&_regs.eax)[1] = u[1];
1796 break;
1797 case 2:
1798 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1799 u[1] = 0;
1800 v = (uint16_t)src.val;
1801 generate_exception_if(
1802 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1803 EXC_DE);
1804 dst.val = (uint16_t)u[0];
1805 *(uint16_t *)&_regs.edx = u[1];
1806 break;
1807 #ifdef __x86_64__
1808 case 4:
1809 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1810 u[1] = 0;
1811 v = (uint32_t)src.val;
1812 generate_exception_if(
1813 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1814 EXC_DE);
1815 dst.val = (uint32_t)u[0];
1816 _regs.edx = (uint32_t)u[1];
1817 break;
1818 #endif
1819 default:
1820 u[0] = _regs.eax;
1821 u[1] = _regs.edx;
1822 v = src.val;
1823 generate_exception_if(div_dbl(u, v), EXC_DE);
1824 dst.val = u[0];
1825 _regs.edx = u[1];
1826 break;
1828 break;
1830 case 7: /* idiv */ {
1831 unsigned long u[2], v;
1832 src = dst;
1833 dst.type = OP_REG;
1834 dst.reg = (unsigned long *)&_regs.eax;
1835 switch ( src.bytes )
1837 case 1:
1838 u[0] = (int16_t)_regs.eax;
1839 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1840 v = (int8_t)src.val;
1841 generate_exception_if(
1842 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1843 EXC_DE);
1844 dst.val = (int8_t)u[0];
1845 ((int8_t *)&_regs.eax)[1] = u[1];
1846 break;
1847 case 2:
1848 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1849 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1850 v = (int16_t)src.val;
1851 generate_exception_if(
1852 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1853 EXC_DE);
1854 dst.val = (int16_t)u[0];
1855 *(int16_t *)&_regs.edx = u[1];
1856 break;
1857 #ifdef __x86_64__
1858 case 4:
1859 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1860 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1861 v = (int32_t)src.val;
1862 generate_exception_if(
1863 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1864 EXC_DE);
1865 dst.val = (int32_t)u[0];
1866 _regs.edx = (uint32_t)u[1];
1867 break;
1868 #endif
1869 default:
1870 u[0] = _regs.eax;
1871 u[1] = _regs.edx;
1872 v = src.val;
1873 generate_exception_if(idiv_dbl(u, v), EXC_DE);
1874 dst.val = u[0];
1875 _regs.edx = u[1];
1876 break;
1878 break;
1880 default:
1881 goto cannot_emulate;
1883 break;
1885 case 0xfe: /* Grp4 */
1886 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
1887 case 0xff: /* Grp5 */
1888 switch ( modrm_reg & 7 )
1890 case 0: /* inc */
1891 emulate_1op("inc", dst, _regs.eflags);
1892 break;
1893 case 1: /* dec */
1894 emulate_1op("dec", dst, _regs.eflags);
1895 break;
1896 case 2: /* call (near) */
1897 case 4: /* jmp (near) */
1898 if ( (dst.bytes != 8) && mode_64bit() )
1900 dst.bytes = op_bytes = 8;
1901 if ( dst.type == OP_REG )
1902 dst.val = *dst.reg;
1903 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1904 &dst.val, 8, ctxt)) != 0 )
1905 goto done;
1907 src.val = _regs.eip;
1908 _regs.eip = dst.val;
1909 if ( (modrm_reg & 7) == 2 )
1910 goto push; /* call */
1911 dst.type = OP_NONE;
1912 break;
1913 case 3: /* call (far, absolute indirect) */
1914 case 5: /* jmp (far, absolute indirect) */ {
1915 unsigned long sel;
1917 generate_exception_if(dst.type != OP_MEM, EXC_UD);
1919 if ( (rc = ops->read(dst.mem.seg, dst.mem.off+dst.bytes,
1920 &sel, 2, ctxt)) )
1921 goto done;
1923 if ( (modrm_reg & 7) == 3 ) /* call */
1925 struct segment_register reg;
1926 fail_if(ops->read_segment == NULL);
1927 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
1928 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1929 reg.sel, op_bytes, ctxt)) ||
1930 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1931 _regs.eip, op_bytes, ctxt)) )
1932 goto done;
1935 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
1936 goto done;
1937 _regs.eip = dst.val;
1939 dst.type = OP_NONE;
1940 break;
1942 case 6: /* push */
1943 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1944 if ( mode_64bit() && (dst.bytes == 4) )
1946 dst.bytes = 8;
1947 if ( dst.type == OP_REG )
1948 dst.val = *dst.reg;
1949 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1950 &dst.val, 8, ctxt)) != 0 )
1951 goto done;
1953 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
1954 dst.val, dst.bytes, ctxt)) != 0 )
1955 goto done;
1956 dst.type = OP_NONE;
1957 break;
1958 case 7:
1959 generate_exception_if(1, EXC_UD);
1960 default:
1961 goto cannot_emulate;
1963 break;
1966 writeback:
1967 switch ( dst.type )
1969 case OP_REG:
1970 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1971 switch ( dst.bytes )
1973 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1974 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1975 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1976 case 8: *dst.reg = dst.val; break;
1978 break;
1979 case OP_MEM:
1980 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1981 /* nothing to do */;
1982 else if ( lock_prefix )
1983 rc = ops->cmpxchg(
1984 dst.mem.seg, dst.mem.off, dst.orig_val,
1985 dst.val, dst.bytes, ctxt);
1986 else
1987 rc = ops->write(
1988 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1989 if ( rc != 0 )
1990 goto done;
1991 default:
1992 break;
1995 /* Commit shadow register state. */
1996 _regs.eflags &= ~EFLG_RF;
1997 *ctxt->regs = _regs;
1999 if ( (_regs.eflags & EFLG_TF) &&
2000 (rc == X86EMUL_OKAY) &&
2001 (ops->inject_hw_exception != NULL) )
2002 rc = ops->inject_hw_exception(EXC_DB, 0, ctxt) ? : X86EMUL_EXCEPTION;
2004 done:
2005 return rc;
2007 special_insn:
2008 dst.type = OP_NONE;
2010 /*
2011 * The only implicit-operands instructions allowed a LOCK prefix are
2012 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
2013 */
2014 generate_exception_if(lock_prefix &&
2015 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
2016 (b != 0xc7), /* CMPXCHG{8,16}B */
2017 EXC_GP);
2019 if ( twobyte )
2020 goto twobyte_special_insn;
2022 switch ( b )
2024 case 0x06: /* push %%es */ {
2025 struct segment_register reg;
2026 src.val = x86_seg_es;
2027 push_seg:
2028 fail_if(ops->read_segment == NULL);
2029 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
2030 return rc;
2031 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
2032 if ( mode_64bit() && (op_bytes == 4) )
2033 op_bytes = 8;
2034 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2035 reg.sel, op_bytes, ctxt)) != 0 )
2036 goto done;
2037 break;
2040 case 0x07: /* pop %%es */
2041 src.val = x86_seg_es;
2042 pop_seg:
2043 fail_if(ops->write_segment == NULL);
2044 /* 64-bit mode: POP defaults to a 64-bit operand. */
2045 if ( mode_64bit() && (op_bytes == 4) )
2046 op_bytes = 8;
2047 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2048 &dst.val, op_bytes, ctxt)) != 0 )
2049 goto done;
2050 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
2051 return rc;
2052 break;
2054 case 0x0e: /* push %%cs */
2055 src.val = x86_seg_cs;
2056 goto push_seg;
2058 case 0x16: /* push %%ss */
2059 src.val = x86_seg_ss;
2060 goto push_seg;
2062 case 0x17: /* pop %%ss */
2063 src.val = x86_seg_ss;
2064 goto pop_seg;
2066 case 0x1e: /* push %%ds */
2067 src.val = x86_seg_ds;
2068 goto push_seg;
2070 case 0x1f: /* pop %%ds */
2071 src.val = x86_seg_ds;
2072 goto pop_seg;
2074 case 0x27: /* daa */ {
2075 uint8_t al = _regs.eax;
2076 unsigned long eflags = _regs.eflags;
2077 generate_exception_if(mode_64bit(), EXC_UD);
2078 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
2079 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
2081 *(uint8_t *)&_regs.eax += 6;
2082 _regs.eflags |= EFLG_AF;
2084 if ( (al > 0x99) || (eflags & EFLG_CF) )
2086 *(uint8_t *)&_regs.eax += 0x60;
2087 _regs.eflags |= EFLG_CF;
2089 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2090 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2091 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2092 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2093 break;
2096 case 0x2f: /* das */ {
2097 uint8_t al = _regs.eax;
2098 unsigned long eflags = _regs.eflags;
2099 generate_exception_if(mode_64bit(), EXC_UD);
2100 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
2101 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
2103 _regs.eflags |= EFLG_AF;
2104 if ( (al < 6) || (eflags & EFLG_CF) )
2105 _regs.eflags |= EFLG_CF;
2106 *(uint8_t *)&_regs.eax -= 6;
2108 if ( (al > 0x99) || (eflags & EFLG_CF) )
2110 *(uint8_t *)&_regs.eax -= 0x60;
2111 _regs.eflags |= EFLG_CF;
2113 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2114 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2115 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2116 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2117 break;
2120 case 0x37: /* aaa */
2121 case 0x3f: /* aas */
2122 generate_exception_if(mode_64bit(), EXC_UD);
2123 _regs.eflags &= ~EFLG_CF;
2124 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
2126 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
2127 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
2128 _regs.eflags |= EFLG_CF | EFLG_AF;
2130 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
2131 break;
2133 case 0x40 ... 0x4f: /* inc/dec reg */
2134 dst.type = OP_REG;
2135 dst.reg = decode_register(b & 7, &_regs, 0);
2136 dst.bytes = op_bytes;
2137 dst.val = *dst.reg;
2138 if ( b & 8 )
2139 emulate_1op("dec", dst, _regs.eflags);
2140 else
2141 emulate_1op("inc", dst, _regs.eflags);
2142 break;
2144 case 0x50 ... 0x57: /* push reg */
2145 src.val = *(unsigned long *)decode_register(
2146 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2147 goto push;
2149 case 0x58 ... 0x5f: /* pop reg */
2150 dst.type = OP_REG;
2151 dst.reg = decode_register(
2152 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2153 dst.bytes = op_bytes;
2154 if ( mode_64bit() && (dst.bytes == 4) )
2155 dst.bytes = 8;
2156 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2157 &dst.val, dst.bytes, ctxt)) != 0 )
2158 goto done;
2159 break;
2161 case 0x60: /* pusha */ {
2162 int i;
2163 unsigned long regs[] = {
2164 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
2165 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
2166 generate_exception_if(mode_64bit(), EXC_UD);
2167 for ( i = 0; i < 8; i++ )
2168 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2169 regs[i], op_bytes, ctxt)) != 0 )
2170 goto done;
2171 break;
2174 case 0x61: /* popa */ {
2175 int i;
2176 unsigned long dummy_esp, *regs[] = {
2177 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
2178 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
2179 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
2180 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
2181 generate_exception_if(mode_64bit(), EXC_UD);
2182 for ( i = 0; i < 8; i++ )
2184 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2185 &dst.val, op_bytes, ctxt)) != 0 )
2186 goto done;
2187 switch ( op_bytes )
2189 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
2190 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
2191 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
2192 case 8: *regs[i] = dst.val; break;
2195 break;
2198 case 0x68: /* push imm{16,32,64} */
2199 src.val = ((op_bytes == 2)
2200 ? (int32_t)insn_fetch_type(int16_t)
2201 : insn_fetch_type(int32_t));
2202 goto push;
2204 case 0x6a: /* push imm8 */
2205 src.val = insn_fetch_type(int8_t);
2206 push:
2207 d |= Mov; /* force writeback */
2208 dst.type = OP_MEM;
2209 dst.bytes = op_bytes;
2210 if ( mode_64bit() && (dst.bytes == 4) )
2211 dst.bytes = 8;
2212 dst.val = src.val;
2213 dst.mem.seg = x86_seg_ss;
2214 dst.mem.off = sp_pre_dec(dst.bytes);
2215 break;
2217 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */ {
2218 unsigned long nr_reps = get_rep_prefix();
2219 generate_exception_if(!mode_iopl(), EXC_GP);
2220 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2221 dst.mem.seg = x86_seg_es;
2222 dst.mem.off = truncate_ea(_regs.edi);
2223 if ( (nr_reps > 1) && (ops->rep_ins != NULL) )
2225 if ( (rc = ops->rep_ins((uint16_t)_regs.edx, dst.mem.seg,
2226 dst.mem.off, dst.bytes,
2227 &nr_reps, ctxt)) != 0 )
2228 goto done;
2230 else
2232 fail_if(ops->read_io == NULL);
2233 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
2234 &dst.val, ctxt)) != 0 )
2235 goto done;
2236 dst.type = OP_MEM;
2237 nr_reps = 1;
2239 register_address_increment(
2240 _regs.edi,
2241 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2242 put_rep_prefix(nr_reps);
2243 break;
2246 case 0x6e ... 0x6f: /* outs %esi,%dx */ {
2247 unsigned long nr_reps = get_rep_prefix();
2248 generate_exception_if(!mode_iopl(), EXC_GP);
2249 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2250 if ( (nr_reps > 1) && (ops->rep_outs != NULL) )
2252 if ( (rc = ops->rep_outs(ea.mem.seg, truncate_ea(_regs.esi),
2253 (uint16_t)_regs.edx, dst.bytes,
2254 &nr_reps, ctxt)) != 0 )
2255 goto done;
2257 else
2259 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2260 &dst.val, dst.bytes, ctxt)) != 0 )
2261 goto done;
2262 fail_if(ops->write_io == NULL);
2263 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
2264 dst.val, ctxt)) != 0 )
2265 goto done;
2266 nr_reps = 1;
2268 register_address_increment(
2269 _regs.esi,
2270 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2271 put_rep_prefix(nr_reps);
2272 break;
2275 case 0x70 ... 0x7f: /* jcc (short) */ {
2276 int rel = insn_fetch_type(int8_t);
2277 if ( test_cc(b, _regs.eflags) )
2278 jmp_rel(rel);
2279 break;
2282 case 0x90: /* nop / xchg %%r8,%%rax */
2283 if ( !(rex_prefix & 1) )
2284 break; /* nop */
2286 case 0x91 ... 0x97: /* xchg reg,%%rax */
2287 src.type = dst.type = OP_REG;
2288 src.bytes = dst.bytes = op_bytes;
2289 src.reg = (unsigned long *)&_regs.eax;
2290 src.val = *src.reg;
2291 dst.reg = decode_register(
2292 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2293 dst.val = *dst.reg;
2294 goto xchg;
2296 case 0x98: /* cbw/cwde/cdqe */
2297 switch ( op_bytes )
2299 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2300 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2301 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2303 break;
2305 case 0x99: /* cwd/cdq/cqo */
2306 switch ( op_bytes )
2308 case 2:
2309 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2310 break;
2311 case 4:
2312 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2313 break;
2314 case 8:
2315 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2316 break;
2318 break;
2320 case 0x9a: /* call (far, absolute) */ {
2321 struct segment_register reg;
2322 uint16_t sel;
2323 uint32_t eip;
2325 fail_if(ops->read_segment == NULL);
2326 generate_exception_if(mode_64bit(), EXC_UD);
2328 eip = insn_fetch_bytes(op_bytes);
2329 sel = insn_fetch_type(uint16_t);
2331 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2332 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2333 reg.sel, op_bytes, ctxt)) ||
2334 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2335 _regs.eip, op_bytes, ctxt)) )
2336 goto done;
2338 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2339 goto done;
2340 _regs.eip = eip;
2341 break;
2344 case 0x9c: /* pushf */
2345 src.val = _regs.eflags;
2346 goto push;
2348 case 0x9d: /* popf */ {
2349 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2350 if ( !mode_iopl() )
2351 mask |= EFLG_IOPL;
2352 fail_if(ops->write_rflags == NULL);
2353 /* 64-bit mode: POP defaults to a 64-bit operand. */
2354 if ( mode_64bit() && (op_bytes == 4) )
2355 op_bytes = 8;
2356 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2357 &dst.val, op_bytes, ctxt)) != 0 )
2358 goto done;
2359 if ( op_bytes == 2 )
2360 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2361 dst.val &= 0x257fd5;
2362 _regs.eflags &= mask;
2363 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2364 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2365 goto done;
2366 break;
2369 case 0x9e: /* sahf */
2370 *(uint8_t *)&_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2371 break;
2373 case 0x9f: /* lahf */
2374 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2375 break;
2377 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2378 /* Source EA is not encoded via ModRM. */
2379 dst.type = OP_REG;
2380 dst.reg = (unsigned long *)&_regs.eax;
2381 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2382 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2383 &dst.val, dst.bytes, ctxt)) != 0 )
2384 goto done;
2385 break;
2387 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2388 /* Destination EA is not encoded via ModRM. */
2389 dst.type = OP_MEM;
2390 dst.mem.seg = ea.mem.seg;
2391 dst.mem.off = insn_fetch_bytes(ad_bytes);
2392 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2393 dst.val = (unsigned long)_regs.eax;
2394 break;
2396 case 0xa4 ... 0xa5: /* movs */ {
2397 unsigned long nr_reps = get_rep_prefix();
2398 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2399 dst.mem.seg = x86_seg_es;
2400 dst.mem.off = truncate_ea(_regs.edi);
2401 if ( (nr_reps > 1) && (ops->rep_movs != NULL) )
2403 if ( (rc = ops->rep_movs(ea.mem.seg, truncate_ea(_regs.esi),
2404 dst.mem.seg, dst.mem.off, dst.bytes,
2405 &nr_reps, ctxt)) != 0 )
2406 goto done;
2408 else
2410 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2411 &dst.val, dst.bytes, ctxt)) != 0 )
2412 goto done;
2413 dst.type = OP_MEM;
2414 nr_reps = 1;
2416 register_address_increment(
2417 _regs.esi,
2418 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2419 register_address_increment(
2420 _regs.edi,
2421 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2422 put_rep_prefix(nr_reps);
2423 break;
2426 case 0xa6 ... 0xa7: /* cmps */ {
2427 unsigned long next_eip = _regs.eip;
2428 get_rep_prefix();
2429 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2430 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2431 &dst.val, dst.bytes, ctxt)) ||
2432 (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2433 &src.val, src.bytes, ctxt)) )
2434 goto done;
2435 register_address_increment(
2436 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2437 register_address_increment(
2438 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2439 put_rep_prefix(1);
2440 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2441 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2442 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2443 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2444 _regs.eip = next_eip;
2445 break;
2448 case 0xaa ... 0xab: /* stos */ {
2449 /* unsigned long max_reps = */get_rep_prefix();
2450 dst.type = OP_MEM;
2451 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2452 dst.mem.seg = x86_seg_es;
2453 dst.mem.off = truncate_ea(_regs.edi);
2454 dst.val = _regs.eax;
2455 register_address_increment(
2456 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2457 put_rep_prefix(1);
2458 break;
2461 case 0xac ... 0xad: /* lods */ {
2462 /* unsigned long max_reps = */get_rep_prefix();
2463 dst.type = OP_REG;
2464 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2465 dst.reg = (unsigned long *)&_regs.eax;
2466 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2467 &dst.val, dst.bytes, ctxt)) != 0 )
2468 goto done;
2469 register_address_increment(
2470 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2471 put_rep_prefix(1);
2472 break;
2475 case 0xae ... 0xaf: /* scas */ {
2476 unsigned long next_eip = _regs.eip;
2477 get_rep_prefix();
2478 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2479 dst.val = _regs.eax;
2480 if ( (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2481 &src.val, src.bytes, ctxt)) != 0 )
2482 goto done;
2483 register_address_increment(
2484 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2485 put_rep_prefix(1);
2486 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2487 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2488 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2489 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2490 _regs.eip = next_eip;
2491 break;
2494 case 0xc2: /* ret imm16 (near) */
2495 case 0xc3: /* ret (near) */ {
2496 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2497 op_bytes = mode_64bit() ? 8 : op_bytes;
2498 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2499 &dst.val, op_bytes, ctxt)) != 0 )
2500 goto done;
2501 _regs.eip = dst.val;
2502 break;
2505 case 0xc8: /* enter imm16,imm8 */ {
2506 uint16_t size = insn_fetch_type(uint16_t);
2507 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2508 int i;
2510 dst.type = OP_REG;
2511 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2512 dst.reg = (unsigned long *)&_regs.ebp;
2513 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2514 _regs.ebp, dst.bytes, ctxt)) )
2515 goto done;
2516 dst.val = _regs.esp;
2518 if ( depth > 0 )
2520 for ( i = 1; i < depth; i++ )
2522 unsigned long ebp, temp_data;
2523 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2524 if ( (rc = ops->read(x86_seg_ss, ebp,
2525 &temp_data, dst.bytes, ctxt)) ||
2526 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2527 temp_data, dst.bytes, ctxt)) )
2528 goto done;
2530 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2531 dst.val, dst.bytes, ctxt)) )
2532 goto done;
2535 sp_pre_dec(size);
2536 break;
2539 case 0xc9: /* leave */
2540 /* First writeback, to %%esp. */
2541 dst.type = OP_REG;
2542 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2543 dst.reg = (unsigned long *)&_regs.esp;
2544 dst.val = _regs.ebp;
2546 /* Flush first writeback, since there is a second. */
2547 switch ( dst.bytes )
2549 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2550 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2551 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2552 case 8: *dst.reg = dst.val; break;
2555 /* Second writeback, to %%ebp. */
2556 dst.reg = (unsigned long *)&_regs.ebp;
2557 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2558 &dst.val, dst.bytes, ctxt)) )
2559 goto done;
2560 break;
2562 case 0xca: /* ret imm16 (far) */
2563 case 0xcb: /* ret (far) */ {
2564 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2565 op_bytes = mode_64bit() ? 8 : op_bytes;
2566 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2567 &dst.val, op_bytes, ctxt)) ||
2568 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2569 &src.val, op_bytes, ctxt)) ||
2570 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2571 goto done;
2572 _regs.eip = dst.val;
2573 break;
2576 case 0xcc: /* int3 */
2577 src.val = EXC_BP;
2578 goto swint;
2580 case 0xcd: /* int imm8 */
2581 src.val = insn_fetch_type(uint8_t);
2582 swint:
2583 fail_if(ops->inject_sw_interrupt == NULL);
2584 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2585 ctxt) ? : X86EMUL_EXCEPTION;
2586 goto done;
2588 case 0xce: /* into */
2589 generate_exception_if(mode_64bit(), EXC_UD);
2590 if ( !(_regs.eflags & EFLG_OF) )
2591 break;
2592 src.val = EXC_OF;
2593 goto swint;
2595 case 0xcf: /* iret */ {
2596 unsigned long cs, eip, eflags;
2597 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2598 if ( !mode_iopl() )
2599 mask |= EFLG_IOPL;
2600 fail_if(!in_realmode(ctxt, ops));
2601 fail_if(ops->write_rflags == NULL);
2602 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2603 &eip, op_bytes, ctxt)) ||
2604 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2605 &cs, op_bytes, ctxt)) ||
2606 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2607 &eflags, op_bytes, ctxt)) )
2608 goto done;
2609 if ( op_bytes == 2 )
2610 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2611 eflags &= 0x257fd5;
2612 _regs.eflags &= mask;
2613 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2614 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2615 goto done;
2616 _regs.eip = eip;
2617 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2618 goto done;
2619 break;
2622 case 0xd4: /* aam */ {
2623 unsigned int base = insn_fetch_type(uint8_t);
2624 uint8_t al = _regs.eax;
2625 generate_exception_if(mode_64bit(), EXC_UD);
2626 generate_exception_if(base == 0, EXC_DE);
2627 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2628 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2629 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2630 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2631 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2632 break;
2635 case 0xd5: /* aad */ {
2636 unsigned int base = insn_fetch_type(uint8_t);
2637 uint16_t ax = _regs.eax;
2638 generate_exception_if(mode_64bit(), EXC_UD);
2639 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2640 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2641 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2642 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2643 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2644 break;
2647 case 0xd6: /* salc */
2648 generate_exception_if(mode_64bit(), EXC_UD);
2649 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2650 break;
2652 case 0xd7: /* xlat */ {
2653 unsigned long al = (uint8_t)_regs.eax;
2654 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
2655 &al, 1, ctxt)) != 0 )
2656 goto done;
2657 *(uint8_t *)&_regs.eax = al;
2658 break;
2661 case 0xd9: /* FPU 0xd9 */
2662 fail_if(ops->load_fpu_ctxt == NULL);
2663 ops->load_fpu_ctxt(ctxt);
2664 fail_if((modrm_reg & 7) != 7);
2665 fail_if(modrm_reg >= 0xc0);
2666 /* fnstcw m2byte */
2667 ea.bytes = 2;
2668 dst = ea;
2669 asm volatile ( "fnstcw %0" : "=m" (dst.val) );
2670 break;
2672 case 0xdb: /* FPU 0xdb */
2673 fail_if(ops->load_fpu_ctxt == NULL);
2674 ops->load_fpu_ctxt(ctxt);
2675 fail_if(modrm != 0xe3);
2676 /* fninit */
2677 asm volatile ( "fninit" );
2678 break;
2680 case 0xdd: /* FPU 0xdd */
2681 fail_if(ops->load_fpu_ctxt == NULL);
2682 ops->load_fpu_ctxt(ctxt);
2683 fail_if((modrm_reg & 7) != 7);
2684 fail_if(modrm_reg >= 0xc0);
2685 /* fnstsw m2byte */
2686 ea.bytes = 2;
2687 dst = ea;
2688 asm volatile ( "fnstsw %0" : "=m" (dst.val) );
2689 break;
2691 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2692 int rel = insn_fetch_type(int8_t);
2693 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2694 if ( b == 0xe1 )
2695 do_jmp = !do_jmp; /* loopz */
2696 else if ( b == 0xe2 )
2697 do_jmp = 1; /* loop */
2698 switch ( ad_bytes )
2700 case 2:
2701 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2702 break;
2703 case 4:
2704 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
2705 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
2706 break;
2707 default: /* case 8: */
2708 do_jmp &= --_regs.ecx != 0;
2709 break;
2711 if ( do_jmp )
2712 jmp_rel(rel);
2713 break;
2716 case 0xe3: /* jcxz/jecxz (short) */ {
2717 int rel = insn_fetch_type(int8_t);
2718 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2719 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2720 jmp_rel(rel);
2721 break;
2724 case 0xe4: /* in imm8,%al */
2725 case 0xe5: /* in imm8,%eax */
2726 case 0xe6: /* out %al,imm8 */
2727 case 0xe7: /* out %eax,imm8 */
2728 case 0xec: /* in %dx,%al */
2729 case 0xed: /* in %dx,%eax */
2730 case 0xee: /* out %al,%dx */
2731 case 0xef: /* out %eax,%dx */ {
2732 unsigned int port = ((b < 0xe8)
2733 ? insn_fetch_type(uint8_t)
2734 : (uint16_t)_regs.edx);
2735 generate_exception_if(!mode_iopl(), EXC_GP);
2736 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2737 if ( b & 2 )
2739 /* out */
2740 fail_if(ops->write_io == NULL);
2741 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2744 else
2746 /* in */
2747 dst.type = OP_REG;
2748 dst.bytes = op_bytes;
2749 dst.reg = (unsigned long *)&_regs.eax;
2750 fail_if(ops->read_io == NULL);
2751 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2753 if ( rc != 0 )
2754 goto done;
2755 break;
2758 case 0xe8: /* call (near) */ {
2759 int rel = (((op_bytes == 2) && !mode_64bit())
2760 ? (int32_t)insn_fetch_type(int16_t)
2761 : insn_fetch_type(int32_t));
2762 op_bytes = mode_64bit() ? 8 : op_bytes;
2763 src.val = _regs.eip;
2764 jmp_rel(rel);
2765 goto push;
2768 case 0xe9: /* jmp (near) */ {
2769 int rel = (((op_bytes == 2) && !mode_64bit())
2770 ? (int32_t)insn_fetch_type(int16_t)
2771 : insn_fetch_type(int32_t));
2772 jmp_rel(rel);
2773 break;
2776 case 0xea: /* jmp (far, absolute) */ {
2777 uint16_t sel;
2778 uint32_t eip;
2779 generate_exception_if(mode_64bit(), EXC_UD);
2780 eip = insn_fetch_bytes(op_bytes);
2781 sel = insn_fetch_type(uint16_t);
2782 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2783 goto done;
2784 _regs.eip = eip;
2785 break;
2788 case 0xeb: /* jmp (short) */
2789 jmp_rel(insn_fetch_type(int8_t));
2790 break;
2792 case 0xf1: /* int1 (icebp) */
2793 src.val = EXC_DB;
2794 goto swint;
2796 case 0xf4: /* hlt */
2797 fail_if(ops->hlt == NULL);
2798 if ( (rc = ops->hlt(ctxt)) != 0 )
2799 goto done;
2800 break;
2802 case 0xf5: /* cmc */
2803 _regs.eflags ^= EFLG_CF;
2804 break;
2806 case 0xf8: /* clc */
2807 _regs.eflags &= ~EFLG_CF;
2808 break;
2810 case 0xf9: /* stc */
2811 _regs.eflags |= EFLG_CF;
2812 break;
2814 case 0xfa: /* cli */
2815 case 0xfb: /* sti */
2816 generate_exception_if(!mode_iopl(), EXC_GP);
2817 fail_if(ops->write_rflags == NULL);
2818 _regs.eflags &= ~EFLG_IF;
2819 if ( b == 0xfb ) /* sti */
2820 _regs.eflags |= EFLG_IF;
2821 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2822 goto done;
2823 break;
2825 case 0xfc: /* cld */
2826 _regs.eflags &= ~EFLG_DF;
2827 break;
2829 case 0xfd: /* std */
2830 _regs.eflags |= EFLG_DF;
2831 break;
2833 goto writeback;
2835 twobyte_insn:
2836 switch ( b )
2838 case 0x40 ... 0x4f: /* cmovcc */
2839 dst.val = src.val;
2840 if ( !test_cc(b, _regs.eflags) )
2841 dst.type = OP_NONE;
2842 break;
2844 case 0x90 ... 0x9f: /* setcc */
2845 dst.val = test_cc(b, _regs.eflags);
2846 break;
2848 case 0xb0 ... 0xb1: /* cmpxchg */
2849 /* Save real source value, then compare EAX against destination. */
2850 src.orig_val = src.val;
2851 src.val = _regs.eax;
2852 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2853 /* Always write back. The question is: where to? */
2854 d |= Mov;
2855 if ( _regs.eflags & EFLG_ZF )
2857 /* Success: write back to memory. */
2858 dst.val = src.orig_val;
2860 else
2862 /* Failure: write the value we saw to EAX. */
2863 dst.type = OP_REG;
2864 dst.reg = (unsigned long *)&_regs.eax;
2866 break;
2868 case 0xa3: bt: /* bt */
2869 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2870 break;
2872 case 0xa4: /* shld imm8,r,r/m */
2873 case 0xa5: /* shld %%cl,r,r/m */
2874 case 0xac: /* shrd imm8,r,r/m */
2875 case 0xad: /* shrd %%cl,r,r/m */ {
2876 uint8_t shift, width = dst.bytes << 3;
2877 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
2878 if ( (shift &= width - 1) == 0 )
2879 break;
2880 dst.orig_val = truncate_word(dst.val, dst.bytes);
2881 dst.val = ((shift == width) ? src.val :
2882 (b & 8) ?
2883 /* shrd */
2884 ((dst.orig_val >> shift) |
2885 truncate_word(src.val << (width - shift), dst.bytes)) :
2886 /* shld */
2887 ((dst.orig_val << shift) |
2888 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
2889 dst.val = truncate_word(dst.val, dst.bytes);
2890 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
2891 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
2892 _regs.eflags |= EFLG_CF;
2893 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
2894 _regs.eflags |= EFLG_OF;
2895 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
2896 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
2897 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
2898 break;
2901 case 0xb3: btr: /* btr */
2902 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
2903 break;
2905 case 0xab: bts: /* bts */
2906 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
2907 break;
2909 case 0xaf: /* imul */
2910 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
2911 switch ( dst.bytes )
2913 case 2:
2914 dst.val = ((uint32_t)(int16_t)src.val *
2915 (uint32_t)(int16_t)dst.val);
2916 if ( (int16_t)dst.val != (uint32_t)dst.val )
2917 _regs.eflags |= EFLG_OF|EFLG_CF;
2918 break;
2919 #ifdef __x86_64__
2920 case 4:
2921 dst.val = ((uint64_t)(int32_t)src.val *
2922 (uint64_t)(int32_t)dst.val);
2923 if ( (int32_t)dst.val != dst.val )
2924 _regs.eflags |= EFLG_OF|EFLG_CF;
2925 break;
2926 #endif
2927 default: {
2928 unsigned long m[2] = { src.val, dst.val };
2929 if ( imul_dbl(m) )
2930 _regs.eflags |= EFLG_OF|EFLG_CF;
2931 dst.val = m[0];
2932 break;
2935 break;
2937 case 0xb2: /* lss */
2938 dst.val = x86_seg_ss;
2939 goto les;
2941 case 0xb4: /* lfs */
2942 dst.val = x86_seg_fs;
2943 goto les;
2945 case 0xb5: /* lgs */
2946 dst.val = x86_seg_gs;
2947 goto les;
2949 case 0xb6: /* movzx rm8,r{16,32,64} */
2950 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2951 dst.reg = decode_register(modrm_reg, &_regs, 0);
2952 dst.bytes = op_bytes;
2953 dst.val = (uint8_t)src.val;
2954 break;
2956 case 0xbc: /* bsf */ {
2957 int zf;
2958 asm ( "bsf %2,%0; setz %b1"
2959 : "=r" (dst.val), "=q" (zf)
2960 : "r" (src.val), "1" (0) );
2961 _regs.eflags &= ~EFLG_ZF;
2962 _regs.eflags |= zf ? EFLG_ZF : 0;
2963 break;
2966 case 0xbd: /* bsr */ {
2967 int zf;
2968 asm ( "bsr %2,%0; setz %b1"
2969 : "=r" (dst.val), "=q" (zf)
2970 : "r" (src.val), "1" (0) );
2971 _regs.eflags &= ~EFLG_ZF;
2972 _regs.eflags |= zf ? EFLG_ZF : 0;
2973 break;
2976 case 0xb7: /* movzx rm16,r{16,32,64} */
2977 dst.val = (uint16_t)src.val;
2978 break;
2980 case 0xbb: btc: /* btc */
2981 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
2982 break;
2984 case 0xba: /* Grp8 */
2985 switch ( modrm_reg & 7 )
2987 case 4: goto bt;
2988 case 5: goto bts;
2989 case 6: goto btr;
2990 case 7: goto btc;
2991 default: generate_exception_if(1, EXC_UD);
2993 break;
2995 case 0xbe: /* movsx rm8,r{16,32,64} */
2996 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2997 dst.reg = decode_register(modrm_reg, &_regs, 0);
2998 dst.bytes = op_bytes;
2999 dst.val = (int8_t)src.val;
3000 break;
3002 case 0xbf: /* movsx rm16,r{16,32,64} */
3003 dst.val = (int16_t)src.val;
3004 break;
3006 case 0xc0 ... 0xc1: /* xadd */
3007 /* Write back the register source. */
3008 switch ( dst.bytes )
3010 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
3011 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
3012 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
3013 case 8: *src.reg = dst.val; break;
3015 goto add;
3017 goto writeback;
3019 twobyte_special_insn:
3020 switch ( b )
3022 case 0x01: /* Grp7 */ {
3023 struct segment_register reg;
3024 unsigned long base, limit, cr0, cr0w;
3026 switch ( modrm_reg & 7 )
3028 case 0: /* sgdt */
3029 case 1: /* sidt */
3030 generate_exception_if(ea.type != OP_MEM, EXC_UD);
3031 fail_if(ops->read_segment == NULL);
3032 if ( (rc = ops->read_segment((modrm_reg & 1) ?
3033 x86_seg_idtr : x86_seg_gdtr,
3034 &reg, ctxt)) )
3035 goto done;
3036 if ( op_bytes == 2 )
3037 reg.base &= 0xffffff;
3038 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
3039 reg.limit, 2, ctxt)) ||
3040 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
3041 reg.base, mode_64bit() ? 8 : 4, ctxt)) )
3042 goto done;
3043 break;
3044 case 2: /* lgdt */
3045 case 3: /* lidt */
3046 generate_exception_if(ea.type != OP_MEM, EXC_UD);
3047 fail_if(ops->write_segment == NULL);
3048 memset(&reg, 0, sizeof(reg));
3049 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0,
3050 &limit, 2, ctxt)) ||
3051 (rc = ops->read(ea.mem.seg, ea.mem.off+2,
3052 &base, mode_64bit() ? 8 : 4, ctxt)) )
3053 goto done;
3054 reg.base = base;
3055 reg.limit = limit;
3056 if ( op_bytes == 2 )
3057 reg.base &= 0xffffff;
3058 if ( (rc = ops->write_segment((modrm_reg & 1) ?
3059 x86_seg_idtr : x86_seg_gdtr,
3060 &reg, ctxt)) )
3061 goto done;
3062 break;
3063 case 4: /* smsw */
3064 ea.bytes = 2;
3065 dst = ea;
3066 fail_if(ops->read_cr == NULL);
3067 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
3068 goto done;
3069 d |= Mov; /* force writeback */
3070 break;
3071 case 6: /* lmsw */
3072 fail_if(ops->read_cr == NULL);
3073 fail_if(ops->write_cr == NULL);
3074 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
3075 goto done;
3076 if ( ea.type == OP_REG )
3077 cr0w = *ea.reg;
3078 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
3079 &cr0w, 2, ctxt)) )
3080 goto done;
3081 cr0 &= 0xffff0000;
3082 cr0 |= (uint16_t)cr0w;
3083 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
3084 goto done;
3085 break;
3086 default:
3087 goto cannot_emulate;
3089 break;
3092 case 0x06: /* clts */
3093 generate_exception_if(!mode_ring0(), EXC_GP);
3094 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
3095 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
3096 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
3097 goto done;
3098 break;
3100 case 0x08: /* invd */
3101 case 0x09: /* wbinvd */
3102 generate_exception_if(!mode_ring0(), EXC_GP);
3103 fail_if(ops->wbinvd == NULL);
3104 if ( (rc = ops->wbinvd(ctxt)) != 0 )
3105 goto done;
3106 break;
3108 case 0x0d: /* GrpP (prefetch) */
3109 case 0x18: /* Grp16 (prefetch/nop) */
3110 case 0x19 ... 0x1f: /* nop (amd-defined) */
3111 break;
3113 case 0x20: /* mov cr,reg */
3114 case 0x21: /* mov dr,reg */
3115 case 0x22: /* mov reg,cr */
3116 case 0x23: /* mov reg,dr */
3117 generate_exception_if(!mode_ring0(), EXC_GP);
3118 modrm_rm |= (rex_prefix & 1) << 3;
3119 modrm_reg |= lock_prefix << 3;
3120 if ( b & 2 )
3122 /* Write to CR/DR. */
3123 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
3124 if ( !mode_64bit() )
3125 src.val = (uint32_t)src.val;
3126 rc = ((b & 1)
3127 ? (ops->write_dr
3128 ? ops->write_dr(modrm_reg, src.val, ctxt)
3129 : X86EMUL_UNHANDLEABLE)
3130 : (ops->write_cr
3131 ? ops->write_cr(modrm_reg, src.val, ctxt)
3132 : X86EMUL_UNHANDLEABLE));
3134 else
3136 /* Read from CR/DR. */
3137 dst.type = OP_REG;
3138 dst.bytes = mode_64bit() ? 8 : 4;
3139 dst.reg = decode_register(modrm_rm, &_regs, 0);
3140 rc = ((b & 1)
3141 ? (ops->read_dr
3142 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
3143 : X86EMUL_UNHANDLEABLE)
3144 : (ops->read_cr
3145 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
3146 : X86EMUL_UNHANDLEABLE));
3148 if ( rc != 0 )
3149 goto done;
3150 break;
3152 case 0x30: /* wrmsr */ {
3153 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
3154 generate_exception_if(!mode_ring0(), EXC_GP);
3155 fail_if(ops->write_msr == NULL);
3156 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
3157 goto done;
3158 break;
3161 case 0x31: /* rdtsc */ {
3162 unsigned long cr4;
3163 uint64_t val;
3164 fail_if(ops->read_cr == NULL);
3165 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
3166 goto done;
3167 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP);
3168 fail_if(ops->read_msr == NULL);
3169 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
3170 goto done;
3171 _regs.edx = (uint32_t)(val >> 32);
3172 _regs.eax = (uint32_t)(val >> 0);
3173 break;
3176 case 0x32: /* rdmsr */ {
3177 uint64_t val;
3178 generate_exception_if(!mode_ring0(), EXC_GP);
3179 fail_if(ops->read_msr == NULL);
3180 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
3181 goto done;
3182 _regs.edx = (uint32_t)(val >> 32);
3183 _regs.eax = (uint32_t)(val >> 0);
3184 break;
3187 case 0x80 ... 0x8f: /* jcc (near) */ {
3188 int rel = (((op_bytes == 2) && !mode_64bit())
3189 ? (int32_t)insn_fetch_type(int16_t)
3190 : insn_fetch_type(int32_t));
3191 if ( test_cc(b, _regs.eflags) )
3192 jmp_rel(rel);
3193 break;
3196 case 0xa0: /* push %%fs */
3197 src.val = x86_seg_fs;
3198 goto push_seg;
3200 case 0xa1: /* pop %%fs */
3201 src.val = x86_seg_fs;
3202 goto pop_seg;
3204 case 0xa2: /* cpuid */ {
3205 unsigned int eax = _regs.eax, ebx = _regs.ebx;
3206 unsigned int ecx = _regs.ecx, edx = _regs.edx;
3207 fail_if(ops->cpuid == NULL);
3208 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
3209 goto done;
3210 _regs.eax = eax; _regs.ebx = ebx;
3211 _regs.ecx = ecx; _regs.edx = edx;
3212 break;
3215 case 0xa8: /* push %%gs */
3216 src.val = x86_seg_gs;
3217 goto push_seg;
3219 case 0xa9: /* pop %%gs */
3220 src.val = x86_seg_gs;
3221 goto pop_seg;
3223 case 0xc7: /* Grp9 (cmpxchg8b) */
3224 #if defined(__i386__)
3226 unsigned long old_lo, old_hi;
3227 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
3228 generate_exception_if(ea.type != OP_MEM, EXC_UD);
3229 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
3230 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
3231 goto done;
3232 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
3234 _regs.eax = old_lo;
3235 _regs.edx = old_hi;
3236 _regs.eflags &= ~EFLG_ZF;
3238 else if ( ops->cmpxchg8b == NULL )
3240 rc = X86EMUL_UNHANDLEABLE;
3241 goto done;
3243 else
3245 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
3246 _regs.ebx, _regs.ecx, ctxt)) != 0 )
3247 goto done;
3248 _regs.eflags |= EFLG_ZF;
3250 break;
3252 #elif defined(__x86_64__)
3254 unsigned long old, new;
3255 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
3256 generate_exception_if(ea.type != OP_MEM, EXC_UD);
3257 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
3258 goto done;
3259 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
3260 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
3262 _regs.eax = (uint32_t)(old>>0);
3263 _regs.edx = (uint32_t)(old>>32);
3264 _regs.eflags &= ~EFLG_ZF;
3266 else
3268 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
3269 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3270 new, 8, ctxt)) != 0 )
3271 goto done;
3272 _regs.eflags |= EFLG_ZF;
3274 break;
3276 #endif
3278 case 0xc8 ... 0xcf: /* bswap */
3279 dst.type = OP_REG;
3280 dst.reg = decode_register(
3281 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3282 switch ( dst.bytes = op_bytes )
3284 default: /* case 2: */
3285 /* Undefined behaviour. Writes zero on all tested CPUs. */
3286 dst.val = 0;
3287 break;
3288 case 4:
3289 #ifdef __x86_64__
3290 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3291 break;
3292 case 8:
3293 #endif
3294 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3295 break;
3297 break;
3299 goto writeback;
3301 cannot_emulate:
3302 #if 0
3303 gdprintk(XENLOG_DEBUG, "Instr:");
3304 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
3306 unsigned long x;
3307 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
3308 printk(" %02x", (uint8_t)x);
3310 printk("\n");
3311 #endif
3312 return X86EMUL_UNHANDLEABLE;