ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 16492:43b7d24acf9c

x86_emulate: Emulate RDTSC instruction.
Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Wed Nov 28 12:44:46 2007 +0000 (2007-11-28)
parents cca2f2fb857d
children 0b9048f7f257
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <public/xen.h>
28 #else
29 #include <xen/config.h>
30 #include <xen/types.h>
31 #include <xen/lib.h>
32 #include <asm/regs.h>
33 #undef cmpxchg
34 #endif
35 #include <asm-x86/x86_emulate.h>
37 /* Operand sizes: 8-bit operands or specified/overridden size. */
38 #define ByteOp (1<<0) /* 8-bit operands. */
39 /* Destination operand type. */
40 #define DstBitBase (0<<1) /* Memory operand, bit string. */
41 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
42 #define DstReg (2<<1) /* Register operand. */
43 #define DstMem (3<<1) /* Memory operand. */
44 #define DstMask (3<<1)
45 /* Source operand type. */
46 #define SrcNone (0<<3) /* No source operand. */
47 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
48 #define SrcReg (1<<3) /* Register operand. */
49 #define SrcMem (2<<3) /* Memory operand. */
50 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
51 #define SrcImm (4<<3) /* Immediate operand. */
52 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
53 #define SrcMask (7<<3)
54 /* Generic ModRM decode. */
55 #define ModRM (1<<6)
56 /* Destination is only written; never read. */
57 #define Mov (1<<7)
59 static uint8_t opcode_table[256] = {
60 /* 0x00 - 0x07 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
64 /* 0x08 - 0x0F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
68 /* 0x10 - 0x17 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
72 /* 0x18 - 0x1F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
76 /* 0x20 - 0x27 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
80 /* 0x28 - 0x2F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
84 /* 0x30 - 0x37 */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
88 /* 0x38 - 0x3F */
89 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
90 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
91 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
92 /* 0x40 - 0x4F */
93 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 /* 0x50 - 0x5F */
98 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 /* 0x60 - 0x67 */
103 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
104 0, 0, 0, 0,
105 /* 0x68 - 0x6F */
106 ImplicitOps|Mov, DstMem|SrcImm|ModRM|Mov,
107 ImplicitOps|Mov, DstMem|SrcImmByte|ModRM|Mov,
108 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
109 /* 0x70 - 0x77 */
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 /* 0x78 - 0x7F */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x80 - 0x87 */
116 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
118 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 /* 0x88 - 0x8F */
121 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
122 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
123 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
124 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
125 /* 0x90 - 0x97 */
126 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
128 /* 0x98 - 0x9F */
129 ImplicitOps, ImplicitOps, ImplicitOps, 0,
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 /* 0xA0 - 0xA7 */
132 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
133 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
134 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
135 ByteOp|ImplicitOps, ImplicitOps,
136 /* 0xA8 - 0xAF */
137 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
138 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
139 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
140 ByteOp|ImplicitOps, ImplicitOps,
141 /* 0xB0 - 0xB7 */
142 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
143 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
144 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
145 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
146 /* 0xB8 - 0xBF */
147 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
148 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
149 /* 0xC0 - 0xC7 */
150 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
151 ImplicitOps, ImplicitOps,
152 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
153 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
154 /* 0xC8 - 0xCF */
155 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
156 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
157 /* 0xD0 - 0xD7 */
158 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
159 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
160 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
161 /* 0xD8 - 0xDF */
162 0, 0, 0, 0, 0, 0, 0, 0,
163 /* 0xE0 - 0xE7 */
164 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
165 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
166 /* 0xE8 - 0xEF */
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
169 /* 0xF0 - 0xF7 */
170 0, ImplicitOps, 0, 0,
171 ImplicitOps, ImplicitOps,
172 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
173 /* 0xF8 - 0xFF */
174 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
175 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
176 };
178 static uint8_t twobyte_table[256] = {
179 /* 0x00 - 0x07 */
180 0, ImplicitOps|ModRM, 0, 0, 0, ImplicitOps, 0, 0,
181 /* 0x08 - 0x0F */
182 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
183 /* 0x10 - 0x17 */
184 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0x18 - 0x1F */
186 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
187 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
188 /* 0x20 - 0x27 */
189 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
190 0, 0, 0, 0,
191 /* 0x28 - 0x2F */
192 0, 0, 0, 0, 0, 0, 0, 0,
193 /* 0x30 - 0x37 */
194 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
195 /* 0x38 - 0x3F */
196 0, 0, 0, 0, 0, 0, 0, 0,
197 /* 0x40 - 0x47 */
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
202 /* 0x48 - 0x4F */
203 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
204 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
205 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
206 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
207 /* 0x50 - 0x5F */
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 /* 0x60 - 0x6F */
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 /* 0x70 - 0x7F */
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 /* 0x80 - 0x87 */
214 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
215 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
216 /* 0x88 - 0x8F */
217 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
218 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
219 /* 0x90 - 0x97 */
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
224 /* 0x98 - 0x9F */
225 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
226 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
227 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
228 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
229 /* 0xA0 - 0xA7 */
230 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
231 0, 0, 0, 0,
232 /* 0xA8 - 0xAF */
233 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
234 0, 0, 0, DstReg|SrcMem|ModRM,
235 /* 0xB0 - 0xB7 */
236 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
237 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
238 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
239 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
240 /* 0xB8 - 0xBF */
241 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
242 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
243 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
244 /* 0xC0 - 0xC7 */
245 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
246 0, 0, 0, ImplicitOps|ModRM,
247 /* 0xC8 - 0xCF */
248 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
249 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
250 /* 0xD0 - 0xDF */
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
252 /* 0xE0 - 0xEF */
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
254 /* 0xF0 - 0xFF */
255 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
256 };
258 /* Type, address-of, and value of an instruction's operand. */
259 struct operand {
260 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
261 unsigned int bytes;
262 unsigned long val, orig_val;
263 union {
264 /* OP_REG: Pointer to register field. */
265 unsigned long *reg;
266 /* OP_MEM: Segment and offset. */
267 struct {
268 enum x86_segment seg;
269 unsigned long off;
270 } mem;
271 };
272 };
274 /* MSRs. */
275 #define MSR_TSC 0x10
277 /* Control register flags. */
278 #define CR0_PE (1<<0)
279 #define CR4_TSD (1<<2)
281 /* EFLAGS bit definitions. */
282 #define EFLG_VIP (1<<20)
283 #define EFLG_VIF (1<<19)
284 #define EFLG_AC (1<<18)
285 #define EFLG_VM (1<<17)
286 #define EFLG_RF (1<<16)
287 #define EFLG_NT (1<<14)
288 #define EFLG_IOPL (3<<12)
289 #define EFLG_OF (1<<11)
290 #define EFLG_DF (1<<10)
291 #define EFLG_IF (1<<9)
292 #define EFLG_TF (1<<8)
293 #define EFLG_SF (1<<7)
294 #define EFLG_ZF (1<<6)
295 #define EFLG_AF (1<<4)
296 #define EFLG_PF (1<<2)
297 #define EFLG_CF (1<<0)
299 /* Exception definitions. */
300 #define EXC_DE 0
301 #define EXC_DB 1
302 #define EXC_BP 3
303 #define EXC_OF 4
304 #define EXC_BR 5
305 #define EXC_UD 6
306 #define EXC_GP 13
308 /*
309 * Instruction emulation:
310 * Most instructions are emulated directly via a fragment of inline assembly
311 * code. This allows us to save/restore EFLAGS and thus very easily pick up
312 * any modified flags.
313 */
315 #if defined(__x86_64__)
316 #define _LO32 "k" /* force 32-bit operand */
317 #define _STK "%%rsp" /* stack pointer */
318 #define _BYTES_PER_LONG "8"
319 #elif defined(__i386__)
320 #define _LO32 "" /* force 32-bit operand */
321 #define _STK "%%esp" /* stack pointer */
322 #define _BYTES_PER_LONG "4"
323 #endif
325 /*
326 * These EFLAGS bits are restored from saved value during emulation, and
327 * any changes are written back to the saved value after emulation.
328 */
329 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
331 /* Before executing instruction: restore necessary bits in EFLAGS. */
332 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
333 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
334 "movl %"_sav",%"_LO32 _tmp"; " \
335 "push %"_tmp"; " \
336 "push %"_tmp"; " \
337 "movl %"_msk",%"_LO32 _tmp"; " \
338 "andl %"_LO32 _tmp",("_STK"); " \
339 "pushf; " \
340 "notl %"_LO32 _tmp"; " \
341 "andl %"_LO32 _tmp",("_STK"); " \
342 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
343 "pop %"_tmp"; " \
344 "orl %"_LO32 _tmp",("_STK"); " \
345 "popf; " \
346 "pop %"_sav"; "
348 /* After executing instruction: write-back necessary bits in EFLAGS. */
349 #define _POST_EFLAGS(_sav, _msk, _tmp) \
350 /* _sav |= EFLAGS & _msk; */ \
351 "pushf; " \
352 "pop %"_tmp"; " \
353 "andl %"_msk",%"_LO32 _tmp"; " \
354 "orl %"_LO32 _tmp",%"_sav"; "
356 /* Raw emulation: instruction has two explicit operands. */
357 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
358 do{ unsigned long _tmp; \
359 switch ( (_dst).bytes ) \
360 { \
361 case 2: \
362 asm volatile ( \
363 _PRE_EFLAGS("0","4","2") \
364 _op"w %"_wx"3,%1; " \
365 _POST_EFLAGS("0","4","2") \
366 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
367 : _wy ((_src).val), "i" (EFLAGS_MASK), \
368 "m" (_eflags), "m" ((_dst).val) ); \
369 break; \
370 case 4: \
371 asm volatile ( \
372 _PRE_EFLAGS("0","4","2") \
373 _op"l %"_lx"3,%1; " \
374 _POST_EFLAGS("0","4","2") \
375 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
376 : _ly ((_src).val), "i" (EFLAGS_MASK), \
377 "m" (_eflags), "m" ((_dst).val) ); \
378 break; \
379 case 8: \
380 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
381 break; \
382 } \
383 } while (0)
384 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
385 do{ unsigned long _tmp; \
386 switch ( (_dst).bytes ) \
387 { \
388 case 1: \
389 asm volatile ( \
390 _PRE_EFLAGS("0","4","2") \
391 _op"b %"_bx"3,%1; " \
392 _POST_EFLAGS("0","4","2") \
393 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
394 : _by ((_src).val), "i" (EFLAGS_MASK), \
395 "m" (_eflags), "m" ((_dst).val) ); \
396 break; \
397 default: \
398 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
399 break; \
400 } \
401 } while (0)
402 /* Source operand is byte-sized and may be restricted to just %cl. */
403 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
404 __emulate_2op(_op, _src, _dst, _eflags, \
405 "b", "c", "b", "c", "b", "c", "b", "c")
406 /* Source operand is byte, word, long or quad sized. */
407 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
408 __emulate_2op(_op, _src, _dst, _eflags, \
409 "b", "q", "w", "r", _LO32, "r", "", "r")
410 /* Source operand is word, long or quad sized. */
411 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
412 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
413 "w", "r", _LO32, "r", "", "r")
415 /* Instruction has only one explicit operand (no source operand). */
416 #define emulate_1op(_op,_dst,_eflags) \
417 do{ unsigned long _tmp; \
418 switch ( (_dst).bytes ) \
419 { \
420 case 1: \
421 asm volatile ( \
422 _PRE_EFLAGS("0","3","2") \
423 _op"b %1; " \
424 _POST_EFLAGS("0","3","2") \
425 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
426 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
427 break; \
428 case 2: \
429 asm volatile ( \
430 _PRE_EFLAGS("0","3","2") \
431 _op"w %1; " \
432 _POST_EFLAGS("0","3","2") \
433 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
434 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
435 break; \
436 case 4: \
437 asm volatile ( \
438 _PRE_EFLAGS("0","3","2") \
439 _op"l %1; " \
440 _POST_EFLAGS("0","3","2") \
441 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
442 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
443 break; \
444 case 8: \
445 __emulate_1op_8byte(_op, _dst, _eflags); \
446 break; \
447 } \
448 } while (0)
450 /* Emulate an instruction with quadword operands (x86/64 only). */
451 #if defined(__x86_64__)
452 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
453 do{ asm volatile ( \
454 _PRE_EFLAGS("0","4","2") \
455 _op"q %"_qx"3,%1; " \
456 _POST_EFLAGS("0","4","2") \
457 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
458 : _qy ((_src).val), "i" (EFLAGS_MASK), \
459 "m" (_eflags), "m" ((_dst).val) ); \
460 } while (0)
461 #define __emulate_1op_8byte(_op, _dst, _eflags) \
462 do{ asm volatile ( \
463 _PRE_EFLAGS("0","3","2") \
464 _op"q %1; " \
465 _POST_EFLAGS("0","3","2") \
466 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
467 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
468 } while (0)
469 #elif defined(__i386__)
470 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
471 #define __emulate_1op_8byte(_op, _dst, _eflags)
472 #endif /* __i386__ */
474 /* Fetch next part of the instruction being emulated. */
475 #define insn_fetch_bytes(_size) \
476 ({ unsigned long _x, _eip = _regs.eip; \
477 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
478 _regs.eip += (_size); /* real hardware doesn't truncate */ \
479 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
480 EXC_GP); \
481 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
482 if ( rc ) goto done; \
483 _x; \
484 })
485 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
487 #define _truncate_ea(ea, byte_width) \
488 ({ unsigned long __ea = (ea); \
489 unsigned int _width = (byte_width); \
490 ((_width == sizeof(unsigned long)) ? __ea : \
491 (__ea & ((1UL << (_width << 3)) - 1))); \
492 })
493 #define truncate_ea(ea) _truncate_ea((ea), ad_bytes)
495 #define mode_64bit() (def_ad_bytes == 8)
497 #define fail_if(p) \
498 do { \
499 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
500 if ( rc ) goto done; \
501 } while (0)
503 #define generate_exception_if(p, e) \
504 ({ if ( (p) ) { \
505 fail_if(ops->inject_hw_exception == NULL); \
506 rc = ops->inject_hw_exception(e, ctxt) ? : X86EMUL_EXCEPTION; \
507 goto done; \
508 } \
509 })
511 /* Given byte has even parity (even number of 1s)? */
512 static int even_parity(uint8_t v)
513 {
514 asm ( "test %%al,%%al; setp %%al"
515 : "=a" (v) : "0" (v) );
516 return v;
517 }
519 /* Update address held in a register, based on addressing mode. */
520 #define _register_address_increment(reg, inc, byte_width) \
521 do { \
522 int _inc = (inc); /* signed type ensures sign extension to long */ \
523 unsigned int _width = (byte_width); \
524 if ( _width == sizeof(unsigned long) ) \
525 (reg) += _inc; \
526 else if ( mode_64bit() ) \
527 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
528 else \
529 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
530 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
531 } while (0)
532 #define register_address_increment(reg, inc) \
533 _register_address_increment((reg), (inc), ad_bytes)
535 #define sp_pre_dec(dec) ({ \
536 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
537 _truncate_ea(_regs.esp, ctxt->sp_size/8); \
538 })
539 #define sp_post_inc(inc) ({ \
540 unsigned long __esp = _truncate_ea(_regs.esp, ctxt->sp_size/8); \
541 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
542 __esp; \
543 })
545 #define jmp_rel(rel) \
546 do { \
547 _regs.eip += (int)(rel); \
548 if ( !mode_64bit() ) \
549 _regs.eip = ((op_bytes == 2) \
550 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
551 } while (0)
553 static int __handle_rep_prefix(
554 struct cpu_user_regs *int_regs,
555 struct cpu_user_regs *ext_regs,
556 int ad_bytes)
557 {
558 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
559 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
560 int_regs->ecx);
562 if ( ecx-- == 0 )
563 {
564 ext_regs->eip = int_regs->eip;
565 return 1;
566 }
568 if ( ad_bytes == 2 )
569 *(uint16_t *)&int_regs->ecx = ecx;
570 else if ( ad_bytes == 4 )
571 int_regs->ecx = (uint32_t)ecx;
572 else
573 int_regs->ecx = ecx;
574 int_regs->eip = ext_regs->eip;
575 return 0;
576 }
578 #define handle_rep_prefix() \
579 do { \
580 if ( rep_prefix && __handle_rep_prefix(&_regs, ctxt->regs, ad_bytes) ) \
581 goto done; \
582 } while (0)
584 /*
585 * Unsigned multiplication with double-word result.
586 * IN: Multiplicand=m[0], Multiplier=m[1]
587 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
588 */
589 static int mul_dbl(unsigned long m[2])
590 {
591 int rc;
592 asm ( "mul %4; seto %b2"
593 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
594 : "0" (m[0]), "1" (m[1]), "2" (0) );
595 return rc;
596 }
598 /*
599 * Signed multiplication with double-word result.
600 * IN: Multiplicand=m[0], Multiplier=m[1]
601 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
602 */
603 static int imul_dbl(unsigned long m[2])
604 {
605 int rc;
606 asm ( "imul %4; seto %b2"
607 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
608 : "0" (m[0]), "1" (m[1]), "2" (0) );
609 return rc;
610 }
612 /*
613 * Unsigned division of double-word dividend.
614 * IN: Dividend=u[1]:u[0], Divisor=v
615 * OUT: Return 1: #DE
616 * Return 0: Quotient=u[0], Remainder=u[1]
617 */
618 static int div_dbl(unsigned long u[2], unsigned long v)
619 {
620 if ( (v == 0) || (u[1] >= v) )
621 return 1;
622 asm ( "div %4"
623 : "=a" (u[0]), "=d" (u[1])
624 : "0" (u[0]), "1" (u[1]), "r" (v) );
625 return 0;
626 }
628 /*
629 * Signed division of double-word dividend.
630 * IN: Dividend=u[1]:u[0], Divisor=v
631 * OUT: Return 1: #DE
632 * Return 0: Quotient=u[0], Remainder=u[1]
633 * NB. We don't use idiv directly as it's moderately hard to work out
634 * ahead of time whether it will #DE, which we cannot allow to happen.
635 */
636 static int idiv_dbl(unsigned long u[2], unsigned long v)
637 {
638 int negu = (long)u[1] < 0, negv = (long)v < 0;
640 /* u = abs(u) */
641 if ( negu )
642 {
643 u[1] = ~u[1];
644 if ( (u[0] = -u[0]) == 0 )
645 u[1]++;
646 }
648 /* abs(u) / abs(v) */
649 if ( div_dbl(u, negv ? -v : v) )
650 return 1;
652 /* Remainder has same sign as dividend. It cannot overflow. */
653 if ( negu )
654 u[1] = -u[1];
656 /* Quotient is overflowed if sign bit is set. */
657 if ( negu ^ negv )
658 {
659 if ( (long)u[0] >= 0 )
660 u[0] = -u[0];
661 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
662 return 1;
663 }
664 else if ( (long)u[0] < 0 )
665 return 1;
667 return 0;
668 }
670 static int
671 test_cc(
672 unsigned int condition, unsigned int flags)
673 {
674 int rc = 0;
676 switch ( (condition & 15) >> 1 )
677 {
678 case 0: /* o */
679 rc |= (flags & EFLG_OF);
680 break;
681 case 1: /* b/c/nae */
682 rc |= (flags & EFLG_CF);
683 break;
684 case 2: /* z/e */
685 rc |= (flags & EFLG_ZF);
686 break;
687 case 3: /* be/na */
688 rc |= (flags & (EFLG_CF|EFLG_ZF));
689 break;
690 case 4: /* s */
691 rc |= (flags & EFLG_SF);
692 break;
693 case 5: /* p/pe */
694 rc |= (flags & EFLG_PF);
695 break;
696 case 7: /* le/ng */
697 rc |= (flags & EFLG_ZF);
698 /* fall through */
699 case 6: /* l/nge */
700 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
701 break;
702 }
704 /* Odd condition identifiers (lsb == 1) have inverted sense. */
705 return (!!rc ^ (condition & 1));
706 }
708 static int
709 get_cpl(
710 struct x86_emulate_ctxt *ctxt,
711 struct x86_emulate_ops *ops)
712 {
713 struct segment_register reg;
715 if ( ctxt->regs->eflags & EFLG_VM )
716 return 3;
718 if ( (ops->read_segment == NULL) ||
719 ops->read_segment(x86_seg_ss, &reg, ctxt) )
720 return -1;
722 return reg.attr.fields.dpl;
723 }
725 static int
726 _mode_iopl(
727 struct x86_emulate_ctxt *ctxt,
728 struct x86_emulate_ops *ops)
729 {
730 int cpl = get_cpl(ctxt, ops);
731 return ((cpl >= 0) && (cpl <= ((ctxt->regs->eflags >> 12) & 3)));
732 }
734 #define mode_ring0() (get_cpl(ctxt, ops) == 0)
735 #define mode_iopl() _mode_iopl(ctxt, ops)
737 static int
738 in_realmode(
739 struct x86_emulate_ctxt *ctxt,
740 struct x86_emulate_ops *ops)
741 {
742 unsigned long cr0;
743 int rc;
745 if ( ops->read_cr == NULL )
746 return 0;
748 rc = ops->read_cr(0, &cr0, ctxt);
749 return (!rc && !(cr0 & CR0_PE));
750 }
752 static int
753 load_seg(
754 enum x86_segment seg,
755 uint16_t sel,
756 struct x86_emulate_ctxt *ctxt,
757 struct x86_emulate_ops *ops)
758 {
759 struct segment_register reg;
760 int rc;
762 if ( !in_realmode(ctxt, ops) ||
763 (ops->read_segment == NULL) ||
764 (ops->write_segment == NULL) )
765 return X86EMUL_UNHANDLEABLE;
767 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
768 return rc;
770 reg.sel = sel;
771 reg.base = (uint32_t)sel << 4;
773 return ops->write_segment(seg, &reg, ctxt);
774 }
776 void *
777 decode_register(
778 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
779 {
780 void *p;
782 switch ( modrm_reg )
783 {
784 case 0: p = &regs->eax; break;
785 case 1: p = &regs->ecx; break;
786 case 2: p = &regs->edx; break;
787 case 3: p = &regs->ebx; break;
788 case 4: p = (highbyte_regs ?
789 ((unsigned char *)&regs->eax + 1) :
790 (unsigned char *)&regs->esp); break;
791 case 5: p = (highbyte_regs ?
792 ((unsigned char *)&regs->ecx + 1) :
793 (unsigned char *)&regs->ebp); break;
794 case 6: p = (highbyte_regs ?
795 ((unsigned char *)&regs->edx + 1) :
796 (unsigned char *)&regs->esi); break;
797 case 7: p = (highbyte_regs ?
798 ((unsigned char *)&regs->ebx + 1) :
799 (unsigned char *)&regs->edi); break;
800 #if defined(__x86_64__)
801 case 8: p = &regs->r8; break;
802 case 9: p = &regs->r9; break;
803 case 10: p = &regs->r10; break;
804 case 11: p = &regs->r11; break;
805 case 12: p = &regs->r12; break;
806 case 13: p = &regs->r13; break;
807 case 14: p = &regs->r14; break;
808 case 15: p = &regs->r15; break;
809 #endif
810 default: p = NULL; break;
811 }
813 return p;
814 }
816 #define decode_segment_failed x86_seg_tr
817 enum x86_segment
818 decode_segment(
819 uint8_t modrm_reg)
820 {
821 switch ( modrm_reg )
822 {
823 case 0: return x86_seg_es;
824 case 1: return x86_seg_cs;
825 case 2: return x86_seg_ss;
826 case 3: return x86_seg_ds;
827 case 4: return x86_seg_fs;
828 case 5: return x86_seg_gs;
829 default: break;
830 }
831 return decode_segment_failed;
832 }
834 int
835 x86_emulate(
836 struct x86_emulate_ctxt *ctxt,
837 struct x86_emulate_ops *ops)
838 {
839 /* Shadow copy of register state. Committed on successful emulation. */
840 struct cpu_user_regs _regs = *ctxt->regs;
842 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
843 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
844 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
845 #define REPE_PREFIX 1
846 #define REPNE_PREFIX 2
847 unsigned int lock_prefix = 0, rep_prefix = 0;
848 int override_seg = -1, rc = X86EMUL_OKAY;
849 struct operand src, dst;
851 /* Data operand effective address (usually computed from ModRM). */
852 struct operand ea;
854 /* Default is a memory operand relative to segment DS. */
855 ea.type = OP_MEM;
856 ea.mem.seg = x86_seg_ds;
857 ea.mem.off = 0;
859 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
860 if ( op_bytes == 8 )
861 {
862 op_bytes = def_op_bytes = 4;
863 #ifndef __x86_64__
864 return X86EMUL_UNHANDLEABLE;
865 #endif
866 }
868 /* Prefix bytes. */
869 for ( ; ; )
870 {
871 switch ( b = insn_fetch_type(uint8_t) )
872 {
873 case 0x66: /* operand-size override */
874 op_bytes = def_op_bytes ^ 6;
875 break;
876 case 0x67: /* address-size override */
877 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
878 break;
879 case 0x2e: /* CS override */
880 override_seg = x86_seg_cs;
881 break;
882 case 0x3e: /* DS override */
883 override_seg = x86_seg_ds;
884 break;
885 case 0x26: /* ES override */
886 override_seg = x86_seg_es;
887 break;
888 case 0x64: /* FS override */
889 override_seg = x86_seg_fs;
890 break;
891 case 0x65: /* GS override */
892 override_seg = x86_seg_gs;
893 break;
894 case 0x36: /* SS override */
895 override_seg = x86_seg_ss;
896 break;
897 case 0xf0: /* LOCK */
898 lock_prefix = 1;
899 break;
900 case 0xf2: /* REPNE/REPNZ */
901 rep_prefix = REPNE_PREFIX;
902 break;
903 case 0xf3: /* REP/REPE/REPZ */
904 rep_prefix = REPE_PREFIX;
905 break;
906 case 0x40 ... 0x4f: /* REX */
907 if ( !mode_64bit() )
908 goto done_prefixes;
909 rex_prefix = b;
910 continue;
911 default:
912 goto done_prefixes;
913 }
915 /* Any legacy prefix after a REX prefix nullifies its effect. */
916 rex_prefix = 0;
917 }
918 done_prefixes:
920 if ( rex_prefix & 8 ) /* REX.W */
921 op_bytes = 8;
923 /* Opcode byte(s). */
924 d = opcode_table[b];
925 if ( d == 0 )
926 {
927 /* Two-byte opcode? */
928 if ( b == 0x0f )
929 {
930 twobyte = 1;
931 b = insn_fetch_type(uint8_t);
932 d = twobyte_table[b];
933 }
935 /* Unrecognised? */
936 if ( d == 0 )
937 goto cannot_emulate;
938 }
940 /* Lock prefix is allowed only on RMW instructions. */
941 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
943 /* ModRM and SIB bytes. */
944 if ( d & ModRM )
945 {
946 modrm = insn_fetch_type(uint8_t);
947 modrm_mod = (modrm & 0xc0) >> 6;
948 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
949 modrm_rm = modrm & 0x07;
951 if ( modrm_mod == 3 )
952 {
953 modrm_rm |= (rex_prefix & 1) << 3;
954 ea.type = OP_REG;
955 ea.reg = decode_register(
956 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
957 }
958 else if ( ad_bytes == 2 )
959 {
960 /* 16-bit ModR/M decode. */
961 switch ( modrm_rm )
962 {
963 case 0:
964 ea.mem.off = _regs.ebx + _regs.esi;
965 break;
966 case 1:
967 ea.mem.off = _regs.ebx + _regs.edi;
968 break;
969 case 2:
970 ea.mem.seg = x86_seg_ss;
971 ea.mem.off = _regs.ebp + _regs.esi;
972 break;
973 case 3:
974 ea.mem.seg = x86_seg_ss;
975 ea.mem.off = _regs.ebp + _regs.edi;
976 break;
977 case 4:
978 ea.mem.off = _regs.esi;
979 break;
980 case 5:
981 ea.mem.off = _regs.edi;
982 break;
983 case 6:
984 if ( modrm_mod == 0 )
985 break;
986 ea.mem.seg = x86_seg_ss;
987 ea.mem.off = _regs.ebp;
988 break;
989 case 7:
990 ea.mem.off = _regs.ebx;
991 break;
992 }
993 switch ( modrm_mod )
994 {
995 case 0:
996 if ( modrm_rm == 6 )
997 ea.mem.off = insn_fetch_type(int16_t);
998 break;
999 case 1:
1000 ea.mem.off += insn_fetch_type(int8_t);
1001 break;
1002 case 2:
1003 ea.mem.off += insn_fetch_type(int16_t);
1004 break;
1006 ea.mem.off = truncate_ea(ea.mem.off);
1008 else
1010 /* 32/64-bit ModR/M decode. */
1011 if ( modrm_rm == 4 )
1013 sib = insn_fetch_type(uint8_t);
1014 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1015 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1016 if ( sib_index != 4 )
1017 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1018 ea.mem.off <<= (sib >> 6) & 3;
1019 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1020 ea.mem.off += insn_fetch_type(int32_t);
1021 else if ( sib_base == 4 )
1023 ea.mem.seg = x86_seg_ss;
1024 ea.mem.off += _regs.esp;
1025 if ( !twobyte && (b == 0x8f) )
1026 /* POP <rm> computes its EA post increment. */
1027 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1028 ? 8 : op_bytes);
1030 else if ( sib_base == 5 )
1032 ea.mem.seg = x86_seg_ss;
1033 ea.mem.off += _regs.ebp;
1035 else
1036 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1038 else
1040 modrm_rm |= (rex_prefix & 1) << 3;
1041 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1042 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1043 ea.mem.seg = x86_seg_ss;
1045 switch ( modrm_mod )
1047 case 0:
1048 if ( (modrm_rm & 7) != 5 )
1049 break;
1050 ea.mem.off = insn_fetch_type(int32_t);
1051 if ( !mode_64bit() )
1052 break;
1053 /* Relative to RIP of next instruction. Argh! */
1054 ea.mem.off += _regs.eip;
1055 if ( (d & SrcMask) == SrcImm )
1056 ea.mem.off += (d & ByteOp) ? 1 :
1057 ((op_bytes == 8) ? 4 : op_bytes);
1058 else if ( (d & SrcMask) == SrcImmByte )
1059 ea.mem.off += 1;
1060 else if ( ((b == 0xf6) || (b == 0xf7)) &&
1061 ((modrm_reg & 7) <= 1) )
1062 /* Special case in Grp3: test has immediate operand. */
1063 ea.mem.off += (d & ByteOp) ? 1
1064 : ((op_bytes == 8) ? 4 : op_bytes);
1065 break;
1066 case 1:
1067 ea.mem.off += insn_fetch_type(int8_t);
1068 break;
1069 case 2:
1070 ea.mem.off += insn_fetch_type(int32_t);
1071 break;
1073 ea.mem.off = truncate_ea(ea.mem.off);
1077 if ( override_seg != -1 )
1078 ea.mem.seg = override_seg;
1080 /* Special instructions do their own operand decoding. */
1081 if ( (d & DstMask) == ImplicitOps )
1082 goto special_insn;
1084 /* Decode and fetch the source operand: register, memory or immediate. */
1085 switch ( d & SrcMask )
1087 case SrcNone:
1088 break;
1089 case SrcReg:
1090 src.type = OP_REG;
1091 if ( d & ByteOp )
1093 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1094 src.val = *(uint8_t *)src.reg;
1095 src.bytes = 1;
1097 else
1099 src.reg = decode_register(modrm_reg, &_regs, 0);
1100 switch ( (src.bytes = op_bytes) )
1102 case 2: src.val = *(uint16_t *)src.reg; break;
1103 case 4: src.val = *(uint32_t *)src.reg; break;
1104 case 8: src.val = *(uint64_t *)src.reg; break;
1107 break;
1108 case SrcMem16:
1109 ea.bytes = 2;
1110 goto srcmem_common;
1111 case SrcMem:
1112 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1113 srcmem_common:
1114 src = ea;
1115 if ( src.type == OP_REG )
1117 switch ( src.bytes )
1119 case 1: src.val = *(uint8_t *)src.reg; break;
1120 case 2: src.val = *(uint16_t *)src.reg; break;
1121 case 4: src.val = *(uint32_t *)src.reg; break;
1122 case 8: src.val = *(uint64_t *)src.reg; break;
1125 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1126 &src.val, src.bytes, ctxt)) )
1127 goto done;
1128 break;
1129 case SrcImm:
1130 src.type = OP_IMM;
1131 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1132 if ( src.bytes == 8 ) src.bytes = 4;
1133 /* NB. Immediates are sign-extended as necessary. */
1134 switch ( src.bytes )
1136 case 1: src.val = insn_fetch_type(int8_t); break;
1137 case 2: src.val = insn_fetch_type(int16_t); break;
1138 case 4: src.val = insn_fetch_type(int32_t); break;
1140 break;
1141 case SrcImmByte:
1142 src.type = OP_IMM;
1143 src.bytes = 1;
1144 src.val = insn_fetch_type(int8_t);
1145 break;
1148 /* Decode and fetch the destination operand: register or memory. */
1149 switch ( d & DstMask )
1151 case DstReg:
1152 dst.type = OP_REG;
1153 if ( d & ByteOp )
1155 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1156 dst.val = *(uint8_t *)dst.reg;
1157 dst.bytes = 1;
1159 else
1161 dst.reg = decode_register(modrm_reg, &_regs, 0);
1162 switch ( (dst.bytes = op_bytes) )
1164 case 2: dst.val = *(uint16_t *)dst.reg; break;
1165 case 4: dst.val = *(uint32_t *)dst.reg; break;
1166 case 8: dst.val = *(uint64_t *)dst.reg; break;
1169 break;
1170 case DstBitBase:
1171 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1173 src.val &= (op_bytes << 3) - 1;
1175 else
1177 /*
1178 * EA += BitOffset DIV op_bytes*8
1179 * BitOffset = BitOffset MOD op_bytes*8
1180 * DIV truncates towards negative infinity.
1181 * MOD always produces a positive result.
1182 */
1183 if ( op_bytes == 2 )
1184 src.val = (int16_t)src.val;
1185 else if ( op_bytes == 4 )
1186 src.val = (int32_t)src.val;
1187 if ( (long)src.val < 0 )
1189 unsigned long byte_offset;
1190 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1191 ea.mem.off -= byte_offset;
1192 src.val = (byte_offset << 3) + src.val;
1194 else
1196 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1197 src.val &= (op_bytes << 3) - 1;
1200 /* Becomes a normal DstMem operation from here on. */
1201 d = (d & ~DstMask) | DstMem;
1202 case DstMem:
1203 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1204 dst = ea;
1205 if ( dst.type == OP_REG )
1207 switch ( dst.bytes )
1209 case 1: dst.val = *(uint8_t *)dst.reg; break;
1210 case 2: dst.val = *(uint16_t *)dst.reg; break;
1211 case 4: dst.val = *(uint32_t *)dst.reg; break;
1212 case 8: dst.val = *(uint64_t *)dst.reg; break;
1215 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1217 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1218 &dst.val, dst.bytes, ctxt)) )
1219 goto done;
1220 dst.orig_val = dst.val;
1222 break;
1225 /* LOCK prefix allowed only on instructions with memory destination. */
1226 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
1228 if ( twobyte )
1229 goto twobyte_insn;
1231 switch ( b )
1233 case 0x04 ... 0x05: /* add imm,%%eax */
1234 dst.reg = (unsigned long *)&_regs.eax;
1235 dst.val = _regs.eax;
1236 case 0x00 ... 0x03: add: /* add */
1237 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1238 break;
1240 case 0x0c ... 0x0d: /* or imm,%%eax */
1241 dst.reg = (unsigned long *)&_regs.eax;
1242 dst.val = _regs.eax;
1243 case 0x08 ... 0x0b: or: /* or */
1244 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1245 break;
1247 case 0x14 ... 0x15: /* adc imm,%%eax */
1248 dst.reg = (unsigned long *)&_regs.eax;
1249 dst.val = _regs.eax;
1250 case 0x10 ... 0x13: adc: /* adc */
1251 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1252 break;
1254 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1255 dst.reg = (unsigned long *)&_regs.eax;
1256 dst.val = _regs.eax;
1257 case 0x18 ... 0x1b: sbb: /* sbb */
1258 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1259 break;
1261 case 0x24 ... 0x25: /* and imm,%%eax */
1262 dst.reg = (unsigned long *)&_regs.eax;
1263 dst.val = _regs.eax;
1264 case 0x20 ... 0x23: and: /* and */
1265 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1266 break;
1268 case 0x2c ... 0x2d: /* sub imm,%%eax */
1269 dst.reg = (unsigned long *)&_regs.eax;
1270 dst.val = _regs.eax;
1271 case 0x28 ... 0x2b: sub: /* sub */
1272 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1273 break;
1275 case 0x34 ... 0x35: /* xor imm,%%eax */
1276 dst.reg = (unsigned long *)&_regs.eax;
1277 dst.val = _regs.eax;
1278 case 0x30 ... 0x33: xor: /* xor */
1279 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1280 break;
1282 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1283 dst.reg = (unsigned long *)&_regs.eax;
1284 dst.val = _regs.eax;
1285 case 0x38 ... 0x3b: cmp: /* cmp */
1286 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1287 break;
1289 case 0x62: /* bound */ {
1290 unsigned long src_val2;
1291 int lb, ub, idx;
1292 generate_exception_if(mode_64bit() || (src.type != OP_MEM), EXC_UD);
1293 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1294 &src_val2, op_bytes, ctxt)) )
1295 goto done;
1296 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1297 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1298 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1299 generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
1300 dst.type = OP_NONE;
1301 break;
1304 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1305 if ( mode_64bit() )
1307 /* movsxd */
1308 if ( src.type == OP_REG )
1309 src.val = *(int32_t *)src.reg;
1310 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1311 &src.val, 4, ctxt)) )
1312 goto done;
1313 dst.val = (int32_t)src.val;
1315 else
1317 /* arpl */
1318 uint16_t src_val = dst.val;
1319 dst = src;
1320 _regs.eflags &= ~EFLG_ZF;
1321 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1322 if ( _regs.eflags & EFLG_ZF )
1323 dst.val = (dst.val & ~3) | (src_val & 3);
1324 else
1325 dst.type = OP_NONE;
1326 generate_exception_if(in_realmode(ctxt, ops), EXC_UD);
1328 break;
1330 case 0x69: /* imul imm16/32 */
1331 case 0x6b: /* imul imm8 */ {
1332 unsigned long reg = *(long *)decode_register(modrm_reg, &_regs, 0);
1333 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1334 switch ( dst.bytes )
1336 case 2:
1337 dst.val = ((uint32_t)(int16_t)src.val *
1338 (uint32_t)(int16_t)reg);
1339 if ( (int16_t)dst.val != (uint32_t)dst.val )
1340 _regs.eflags |= EFLG_OF|EFLG_CF;
1341 break;
1342 #ifdef __x86_64__
1343 case 4:
1344 dst.val = ((uint64_t)(int32_t)src.val *
1345 (uint64_t)(int32_t)reg);
1346 if ( (int32_t)dst.val != dst.val )
1347 _regs.eflags |= EFLG_OF|EFLG_CF;
1348 break;
1349 #endif
1350 default: {
1351 unsigned long m[2] = { src.val, reg };
1352 if ( imul_dbl(m) )
1353 _regs.eflags |= EFLG_OF|EFLG_CF;
1354 dst.val = m[0];
1355 break;
1358 dst.type = OP_REG;
1359 dst.reg = decode_register(modrm_reg, &_regs, 0);
1360 break;
1363 case 0x82: /* Grp1 (x86/32 only) */
1364 generate_exception_if(mode_64bit(), EXC_UD);
1365 case 0x80: case 0x81: case 0x83: /* Grp1 */
1366 switch ( modrm_reg & 7 )
1368 case 0: goto add;
1369 case 1: goto or;
1370 case 2: goto adc;
1371 case 3: goto sbb;
1372 case 4: goto and;
1373 case 5: goto sub;
1374 case 6: goto xor;
1375 case 7: goto cmp;
1377 break;
1379 case 0xa8 ... 0xa9: /* test imm,%%eax */
1380 dst.reg = (unsigned long *)&_regs.eax;
1381 dst.val = _regs.eax;
1382 case 0x84 ... 0x85: test: /* test */
1383 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1384 break;
1386 case 0x86 ... 0x87: xchg: /* xchg */
1387 /* Write back the register source. */
1388 switch ( dst.bytes )
1390 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1391 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1392 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1393 case 8: *src.reg = dst.val; break;
1395 /* Write back the memory destination with implicit LOCK prefix. */
1396 dst.val = src.val;
1397 lock_prefix = 1;
1398 break;
1400 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1401 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1402 case 0x88 ... 0x8b: /* mov */
1403 dst.val = src.val;
1404 break;
1406 case 0x8c: /* mov Sreg,r/m */ {
1407 struct segment_register reg;
1408 enum x86_segment seg = decode_segment(modrm_reg);
1409 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1410 fail_if(ops->read_segment == NULL);
1411 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
1412 goto done;
1413 dst.val = reg.sel;
1414 if ( dst.type == OP_MEM )
1415 dst.bytes = 2;
1416 break;
1419 case 0x8e: /* mov r/m,Sreg */ {
1420 enum x86_segment seg = decode_segment(modrm_reg);
1421 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1422 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
1423 goto done;
1424 dst.type = OP_NONE;
1425 break;
1428 case 0x8d: /* lea */
1429 dst.val = ea.mem.off;
1430 break;
1432 case 0x8f: /* pop (sole member of Grp1a) */
1433 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1434 /* 64-bit mode: POP defaults to a 64-bit operand. */
1435 if ( mode_64bit() && (dst.bytes == 4) )
1436 dst.bytes = 8;
1437 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1438 &dst.val, dst.bytes, ctxt)) != 0 )
1439 goto done;
1440 break;
1442 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1443 dst.reg = decode_register(
1444 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1445 dst.val = src.val;
1446 break;
1448 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1449 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1450 src.val = ((uint32_t)src.val |
1451 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1452 dst.reg = decode_register(
1453 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1454 dst.val = src.val;
1455 break;
1457 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1458 switch ( modrm_reg & 7 )
1460 case 0: /* rol */
1461 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1462 break;
1463 case 1: /* ror */
1464 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1465 break;
1466 case 2: /* rcl */
1467 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1468 break;
1469 case 3: /* rcr */
1470 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1471 break;
1472 case 4: /* sal/shl */
1473 case 6: /* sal/shl */
1474 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1475 break;
1476 case 5: /* shr */
1477 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1478 break;
1479 case 7: /* sar */
1480 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1481 break;
1483 break;
1485 case 0xc4: /* les */ {
1486 unsigned long sel;
1487 dst.val = x86_seg_es;
1488 les:
1489 generate_exception_if(src.type != OP_MEM, EXC_UD);
1490 if ( (rc = ops->read(src.mem.seg, src.mem.off + src.bytes,
1491 &sel, 2, ctxt)) != 0 )
1492 goto done;
1493 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
1494 goto done;
1495 dst.val = src.val;
1496 break;
1499 case 0xc5: /* lds */
1500 dst.val = x86_seg_ds;
1501 goto les;
1503 case 0xd0 ... 0xd1: /* Grp2 */
1504 src.val = 1;
1505 goto grp2;
1507 case 0xd2 ... 0xd3: /* Grp2 */
1508 src.val = _regs.ecx;
1509 goto grp2;
1511 case 0xf6 ... 0xf7: /* Grp3 */
1512 switch ( modrm_reg & 7 )
1514 case 0 ... 1: /* test */
1515 /* Special case in Grp3: test has an immediate source operand. */
1516 src.type = OP_IMM;
1517 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1518 if ( src.bytes == 8 ) src.bytes = 4;
1519 switch ( src.bytes )
1521 case 1: src.val = insn_fetch_type(int8_t); break;
1522 case 2: src.val = insn_fetch_type(int16_t); break;
1523 case 4: src.val = insn_fetch_type(int32_t); break;
1525 goto test;
1526 case 2: /* not */
1527 dst.val = ~dst.val;
1528 break;
1529 case 3: /* neg */
1530 emulate_1op("neg", dst, _regs.eflags);
1531 break;
1532 case 4: /* mul */
1533 src = dst;
1534 dst.type = OP_REG;
1535 dst.reg = (unsigned long *)&_regs.eax;
1536 dst.val = *dst.reg;
1537 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1538 switch ( src.bytes )
1540 case 1:
1541 dst.val *= src.val;
1542 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1543 _regs.eflags |= EFLG_OF|EFLG_CF;
1544 break;
1545 case 2:
1546 dst.val *= src.val;
1547 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1548 _regs.eflags |= EFLG_OF|EFLG_CF;
1549 *(uint16_t *)&_regs.edx = dst.val >> 16;
1550 break;
1551 #ifdef __x86_64__
1552 case 4:
1553 dst.val *= src.val;
1554 if ( (uint32_t)dst.val != dst.val )
1555 _regs.eflags |= EFLG_OF|EFLG_CF;
1556 _regs.edx = (uint32_t)(dst.val >> 32);
1557 break;
1558 #endif
1559 default: {
1560 unsigned long m[2] = { src.val, dst.val };
1561 if ( mul_dbl(m) )
1562 _regs.eflags |= EFLG_OF|EFLG_CF;
1563 _regs.edx = m[1];
1564 dst.val = m[0];
1565 break;
1568 break;
1569 case 5: /* imul */
1570 src = dst;
1571 dst.type = OP_REG;
1572 dst.reg = (unsigned long *)&_regs.eax;
1573 dst.val = *dst.reg;
1574 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1575 switch ( src.bytes )
1577 case 1:
1578 dst.val = ((uint16_t)(int8_t)src.val *
1579 (uint16_t)(int8_t)dst.val);
1580 if ( (int8_t)dst.val != (uint16_t)dst.val )
1581 _regs.eflags |= EFLG_OF|EFLG_CF;
1582 break;
1583 case 2:
1584 dst.val = ((uint32_t)(int16_t)src.val *
1585 (uint32_t)(int16_t)dst.val);
1586 if ( (int16_t)dst.val != (uint32_t)dst.val )
1587 _regs.eflags |= EFLG_OF|EFLG_CF;
1588 *(uint16_t *)&_regs.edx = dst.val >> 16;
1589 break;
1590 #ifdef __x86_64__
1591 case 4:
1592 dst.val = ((uint64_t)(int32_t)src.val *
1593 (uint64_t)(int32_t)dst.val);
1594 if ( (int32_t)dst.val != dst.val )
1595 _regs.eflags |= EFLG_OF|EFLG_CF;
1596 _regs.edx = (uint32_t)(dst.val >> 32);
1597 break;
1598 #endif
1599 default: {
1600 unsigned long m[2] = { src.val, dst.val };
1601 if ( imul_dbl(m) )
1602 _regs.eflags |= EFLG_OF|EFLG_CF;
1603 _regs.edx = m[1];
1604 dst.val = m[0];
1605 break;
1608 break;
1609 case 6: /* div */ {
1610 unsigned long u[2], v;
1611 src = dst;
1612 dst.type = OP_REG;
1613 dst.reg = (unsigned long *)&_regs.eax;
1614 switch ( src.bytes )
1616 case 1:
1617 u[0] = (uint16_t)_regs.eax;
1618 u[1] = 0;
1619 v = (uint8_t)src.val;
1620 generate_exception_if(
1621 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1622 EXC_DE);
1623 dst.val = (uint8_t)u[0];
1624 ((uint8_t *)&_regs.eax)[1] = u[1];
1625 break;
1626 case 2:
1627 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1628 u[1] = 0;
1629 v = (uint16_t)src.val;
1630 generate_exception_if(
1631 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1632 EXC_DE);
1633 dst.val = (uint16_t)u[0];
1634 *(uint16_t *)&_regs.edx = u[1];
1635 break;
1636 #ifdef __x86_64__
1637 case 4:
1638 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1639 u[1] = 0;
1640 v = (uint32_t)src.val;
1641 generate_exception_if(
1642 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1643 EXC_DE);
1644 dst.val = (uint32_t)u[0];
1645 _regs.edx = (uint32_t)u[1];
1646 break;
1647 #endif
1648 default:
1649 u[0] = _regs.eax;
1650 u[1] = _regs.edx;
1651 v = src.val;
1652 generate_exception_if(div_dbl(u, v), EXC_DE);
1653 dst.val = u[0];
1654 _regs.edx = u[1];
1655 break;
1657 break;
1659 case 7: /* idiv */ {
1660 unsigned long u[2], v;
1661 src = dst;
1662 dst.type = OP_REG;
1663 dst.reg = (unsigned long *)&_regs.eax;
1664 switch ( src.bytes )
1666 case 1:
1667 u[0] = (int16_t)_regs.eax;
1668 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1669 v = (int8_t)src.val;
1670 generate_exception_if(
1671 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1672 EXC_DE);
1673 dst.val = (int8_t)u[0];
1674 ((int8_t *)&_regs.eax)[1] = u[1];
1675 break;
1676 case 2:
1677 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1678 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1679 v = (int16_t)src.val;
1680 generate_exception_if(
1681 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1682 EXC_DE);
1683 dst.val = (int16_t)u[0];
1684 *(int16_t *)&_regs.edx = u[1];
1685 break;
1686 #ifdef __x86_64__
1687 case 4:
1688 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1689 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1690 v = (int32_t)src.val;
1691 generate_exception_if(
1692 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1693 EXC_DE);
1694 dst.val = (int32_t)u[0];
1695 _regs.edx = (uint32_t)u[1];
1696 break;
1697 #endif
1698 default:
1699 u[0] = _regs.eax;
1700 u[1] = _regs.edx;
1701 v = src.val;
1702 generate_exception_if(idiv_dbl(u, v), EXC_DE);
1703 dst.val = u[0];
1704 _regs.edx = u[1];
1705 break;
1707 break;
1709 default:
1710 goto cannot_emulate;
1712 break;
1714 case 0xfe: /* Grp4 */
1715 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
1716 case 0xff: /* Grp5 */
1717 switch ( modrm_reg & 7 )
1719 case 0: /* inc */
1720 emulate_1op("inc", dst, _regs.eflags);
1721 break;
1722 case 1: /* dec */
1723 emulate_1op("dec", dst, _regs.eflags);
1724 break;
1725 case 2: /* call (near) */
1726 case 4: /* jmp (near) */
1727 if ( ((op_bytes = dst.bytes) != 8) && mode_64bit() )
1729 dst.bytes = op_bytes = 8;
1730 if ( dst.type == OP_REG )
1731 dst.val = *dst.reg;
1732 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1733 &dst.val, 8, ctxt)) != 0 )
1734 goto done;
1736 src.val = _regs.eip;
1737 _regs.eip = dst.val;
1738 if ( (modrm_reg & 7) == 2 )
1739 goto push; /* call */
1740 break;
1741 case 3: /* call (far, absolute indirect) */
1742 case 5: /* jmp (far, absolute indirect) */ {
1743 unsigned long sel, eip = dst.val;
1745 if ( (rc = ops->read(dst.mem.seg, dst.mem.off+dst.bytes,
1746 &sel, 2, ctxt)) )
1747 goto done;
1749 if ( (modrm_reg & 7) == 3 ) /* call */
1751 struct segment_register reg;
1752 fail_if(ops->read_segment == NULL);
1753 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
1754 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1755 reg.sel, op_bytes, ctxt)) ||
1756 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1757 _regs.eip, op_bytes, ctxt)) )
1758 goto done;
1761 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
1762 goto done;
1763 _regs.eip = eip;
1765 dst.type = OP_NONE;
1766 break;
1768 case 6: /* push */
1769 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1770 if ( mode_64bit() && (dst.bytes == 4) )
1772 dst.bytes = 8;
1773 if ( dst.type == OP_REG )
1774 dst.val = *dst.reg;
1775 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1776 &dst.val, 8, ctxt)) != 0 )
1777 goto done;
1779 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
1780 dst.val, dst.bytes, ctxt)) != 0 )
1781 goto done;
1782 dst.type = OP_NONE;
1783 break;
1784 case 7:
1785 generate_exception_if(1, EXC_UD);
1786 default:
1787 goto cannot_emulate;
1789 break;
1792 writeback:
1793 switch ( dst.type )
1795 case OP_REG:
1796 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1797 switch ( dst.bytes )
1799 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1800 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1801 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1802 case 8: *dst.reg = dst.val; break;
1804 break;
1805 case OP_MEM:
1806 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1807 /* nothing to do */;
1808 else if ( lock_prefix )
1809 rc = ops->cmpxchg(
1810 dst.mem.seg, dst.mem.off, dst.orig_val,
1811 dst.val, dst.bytes, ctxt);
1812 else
1813 rc = ops->write(
1814 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1815 if ( rc != 0 )
1816 goto done;
1817 default:
1818 break;
1821 /* Commit shadow register state. */
1822 _regs.eflags &= ~EFLG_RF;
1823 *ctxt->regs = _regs;
1825 if ( (_regs.eflags & EFLG_TF) &&
1826 (rc == X86EMUL_OKAY) &&
1827 (ops->inject_hw_exception != NULL) )
1828 rc = ops->inject_hw_exception(EXC_DB, ctxt) ? : X86EMUL_EXCEPTION;
1830 done:
1831 return rc;
1833 special_insn:
1834 dst.type = OP_NONE;
1836 /*
1837 * The only implicit-operands instructions allowed a LOCK prefix are
1838 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1839 */
1840 generate_exception_if(lock_prefix &&
1841 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1842 (b != 0xc7), /* CMPXCHG{8,16}B */
1843 EXC_GP);
1845 if ( twobyte )
1846 goto twobyte_special_insn;
1848 switch ( b )
1850 case 0x06: /* push %%es */ {
1851 struct segment_register reg;
1852 src.val = x86_seg_es;
1853 push_seg:
1854 fail_if(ops->read_segment == NULL);
1855 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
1856 return rc;
1857 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1858 if ( mode_64bit() && (op_bytes == 4) )
1859 op_bytes = 8;
1860 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1861 reg.sel, op_bytes, ctxt)) != 0 )
1862 goto done;
1863 break;
1866 case 0x07: /* pop %%es */
1867 src.val = x86_seg_es;
1868 pop_seg:
1869 fail_if(ops->write_segment == NULL);
1870 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1871 if ( mode_64bit() && (op_bytes == 4) )
1872 op_bytes = 8;
1873 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
1874 &dst.val, op_bytes, ctxt)) != 0 )
1875 goto done;
1876 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
1877 return rc;
1878 break;
1880 case 0x0e: /* push %%cs */
1881 src.val = x86_seg_cs;
1882 goto push_seg;
1884 case 0x16: /* push %%ss */
1885 src.val = x86_seg_ss;
1886 goto push_seg;
1888 case 0x17: /* pop %%ss */
1889 src.val = x86_seg_ss;
1890 goto pop_seg;
1892 case 0x1e: /* push %%ds */
1893 src.val = x86_seg_ds;
1894 goto push_seg;
1896 case 0x1f: /* pop %%ds */
1897 src.val = x86_seg_ds;
1898 goto pop_seg;
1900 case 0x27: /* daa */ {
1901 uint8_t al = _regs.eax;
1902 unsigned long eflags = _regs.eflags;
1903 generate_exception_if(mode_64bit(), EXC_UD);
1904 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1905 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1907 *(uint8_t *)&_regs.eax += 6;
1908 _regs.eflags |= EFLG_AF;
1910 if ( (al > 0x99) || (eflags & EFLG_CF) )
1912 *(uint8_t *)&_regs.eax += 0x60;
1913 _regs.eflags |= EFLG_CF;
1915 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1916 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1917 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1918 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1919 break;
1922 case 0x2f: /* das */ {
1923 uint8_t al = _regs.eax;
1924 unsigned long eflags = _regs.eflags;
1925 generate_exception_if(mode_64bit(), EXC_UD);
1926 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1927 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1929 _regs.eflags |= EFLG_AF;
1930 if ( (al < 6) || (eflags & EFLG_CF) )
1931 _regs.eflags |= EFLG_CF;
1932 *(uint8_t *)&_regs.eax -= 6;
1934 if ( (al > 0x99) || (eflags & EFLG_CF) )
1936 *(uint8_t *)&_regs.eax -= 0x60;
1937 _regs.eflags |= EFLG_CF;
1939 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1940 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1941 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1942 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1943 break;
1946 case 0x37: /* aaa */
1947 case 0x3f: /* aas */
1948 generate_exception_if(mode_64bit(), EXC_UD);
1949 _regs.eflags &= ~EFLG_CF;
1950 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1952 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1953 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1954 _regs.eflags |= EFLG_CF | EFLG_AF;
1956 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1957 break;
1959 case 0x40 ... 0x4f: /* inc/dec reg */
1960 dst.type = OP_REG;
1961 dst.reg = decode_register(b & 7, &_regs, 0);
1962 dst.bytes = op_bytes;
1963 dst.val = *dst.reg;
1964 if ( b & 8 )
1965 emulate_1op("dec", dst, _regs.eflags);
1966 else
1967 emulate_1op("inc", dst, _regs.eflags);
1968 break;
1970 case 0x50 ... 0x57: /* push reg */
1971 src.val = *(unsigned long *)decode_register(
1972 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1973 goto push;
1975 case 0x58 ... 0x5f: /* pop reg */
1976 dst.type = OP_REG;
1977 dst.reg = decode_register(
1978 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1979 dst.bytes = op_bytes;
1980 if ( mode_64bit() && (dst.bytes == 4) )
1981 dst.bytes = 8;
1982 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1983 &dst.val, dst.bytes, ctxt)) != 0 )
1984 goto done;
1985 break;
1987 case 0x60: /* pusha */ {
1988 int i;
1989 unsigned long regs[] = {
1990 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1991 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1992 generate_exception_if(mode_64bit(), EXC_UD);
1993 for ( i = 0; i < 8; i++ )
1994 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1995 regs[i], op_bytes, ctxt)) != 0 )
1996 goto done;
1997 break;
2000 case 0x61: /* popa */ {
2001 int i;
2002 unsigned long dummy_esp, *regs[] = {
2003 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
2004 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
2005 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
2006 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
2007 generate_exception_if(mode_64bit(), EXC_UD);
2008 for ( i = 0; i < 8; i++ )
2009 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2010 regs[i], op_bytes, ctxt)) != 0 )
2011 goto done;
2012 break;
2015 case 0x68: /* push imm{16,32,64} */
2016 src.val = ((op_bytes == 2)
2017 ? (int32_t)insn_fetch_type(int16_t)
2018 : insn_fetch_type(int32_t));
2019 goto push;
2021 case 0x6a: /* push imm8 */
2022 src.val = insn_fetch_type(int8_t);
2023 push:
2024 d |= Mov; /* force writeback */
2025 dst.type = OP_MEM;
2026 dst.bytes = op_bytes;
2027 if ( mode_64bit() && (dst.bytes == 4) )
2028 dst.bytes = 8;
2029 dst.val = src.val;
2030 dst.mem.seg = x86_seg_ss;
2031 dst.mem.off = sp_pre_dec(dst.bytes);
2032 break;
2034 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */
2035 handle_rep_prefix();
2036 generate_exception_if(!mode_iopl(), EXC_GP);
2037 dst.type = OP_MEM;
2038 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2039 dst.mem.seg = x86_seg_es;
2040 dst.mem.off = truncate_ea(_regs.edi);
2041 fail_if(ops->read_io == NULL);
2042 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
2043 &dst.val, ctxt)) != 0 )
2044 goto done;
2045 register_address_increment(
2046 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2047 break;
2049 case 0x6e ... 0x6f: /* outs %esi,%dx */
2050 handle_rep_prefix();
2051 generate_exception_if(!mode_iopl(), EXC_GP);
2052 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2053 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2054 &dst.val, dst.bytes, ctxt)) != 0 )
2055 goto done;
2056 fail_if(ops->write_io == NULL);
2057 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
2058 dst.val, ctxt)) != 0 )
2059 goto done;
2060 register_address_increment(
2061 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2062 break;
2064 case 0x70 ... 0x7f: /* jcc (short) */ {
2065 int rel = insn_fetch_type(int8_t);
2066 if ( test_cc(b, _regs.eflags) )
2067 jmp_rel(rel);
2068 break;
2071 case 0x90: /* nop / xchg %%r8,%%rax */
2072 if ( !(rex_prefix & 1) )
2073 break; /* nop */
2075 case 0x91 ... 0x97: /* xchg reg,%%rax */
2076 src.type = dst.type = OP_REG;
2077 src.bytes = dst.bytes = op_bytes;
2078 src.reg = (unsigned long *)&_regs.eax;
2079 src.val = *src.reg;
2080 dst.reg = decode_register(
2081 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2082 dst.val = *dst.reg;
2083 goto xchg;
2085 case 0x98: /* cbw/cwde/cdqe */
2086 switch ( op_bytes )
2088 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2089 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2090 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2092 break;
2094 case 0x99: /* cwd/cdq/cqo */
2095 switch ( op_bytes )
2097 case 2:
2098 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2099 break;
2100 case 4:
2101 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2102 break;
2103 case 8:
2104 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2105 break;
2107 break;
2109 case 0x9a: /* call (far, absolute) */ {
2110 struct segment_register reg;
2111 uint16_t sel;
2112 uint32_t eip;
2114 fail_if(ops->read_segment == NULL);
2115 generate_exception_if(mode_64bit(), EXC_UD);
2117 eip = insn_fetch_bytes(op_bytes);
2118 sel = insn_fetch_type(uint16_t);
2120 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2121 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2122 reg.sel, op_bytes, ctxt)) ||
2123 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2124 _regs.eip, op_bytes, ctxt)) )
2125 goto done;
2127 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2128 goto done;
2129 _regs.eip = eip;
2130 break;
2133 case 0x9c: /* pushf */
2134 src.val = _regs.eflags;
2135 goto push;
2137 case 0x9d: /* popf */ {
2138 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2139 if ( !mode_iopl() )
2140 mask |= EFLG_IOPL;
2141 fail_if(ops->write_rflags == NULL);
2142 /* 64-bit mode: POP defaults to a 64-bit operand. */
2143 if ( mode_64bit() && (op_bytes == 4) )
2144 op_bytes = 8;
2145 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2146 &dst.val, op_bytes, ctxt)) != 0 )
2147 goto done;
2148 if ( op_bytes == 2 )
2149 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2150 dst.val &= 0x257fd5;
2151 _regs.eflags &= mask;
2152 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2153 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2154 goto done;
2155 break;
2158 case 0x9e: /* sahf */
2159 *(uint8_t *)_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2160 break;
2162 case 0x9f: /* lahf */
2163 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2164 break;
2166 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2167 /* Source EA is not encoded via ModRM. */
2168 dst.type = OP_REG;
2169 dst.reg = (unsigned long *)&_regs.eax;
2170 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2171 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2172 &dst.val, dst.bytes, ctxt)) != 0 )
2173 goto done;
2174 break;
2176 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2177 /* Destination EA is not encoded via ModRM. */
2178 dst.type = OP_MEM;
2179 dst.mem.seg = ea.mem.seg;
2180 dst.mem.off = insn_fetch_bytes(ad_bytes);
2181 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2182 dst.val = (unsigned long)_regs.eax;
2183 break;
2185 case 0xa4 ... 0xa5: /* movs */
2186 handle_rep_prefix();
2187 dst.type = OP_MEM;
2188 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2189 dst.mem.seg = x86_seg_es;
2190 dst.mem.off = truncate_ea(_regs.edi);
2191 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2192 &dst.val, dst.bytes, ctxt)) != 0 )
2193 goto done;
2194 register_address_increment(
2195 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2196 register_address_increment(
2197 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2198 break;
2200 case 0xa6 ... 0xa7: /* cmps */ {
2201 unsigned long next_eip = _regs.eip;
2202 handle_rep_prefix();
2203 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2204 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2205 &dst.val, dst.bytes, ctxt)) ||
2206 (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2207 &src.val, src.bytes, ctxt)) )
2208 goto done;
2209 register_address_increment(
2210 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2211 register_address_increment(
2212 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2213 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2214 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2215 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2216 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2217 _regs.eip = next_eip;
2218 break;
2221 case 0xaa ... 0xab: /* stos */
2222 handle_rep_prefix();
2223 dst.type = OP_MEM;
2224 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2225 dst.mem.seg = x86_seg_es;
2226 dst.mem.off = truncate_ea(_regs.edi);
2227 dst.val = _regs.eax;
2228 register_address_increment(
2229 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2230 break;
2232 case 0xac ... 0xad: /* lods */
2233 handle_rep_prefix();
2234 dst.type = OP_REG;
2235 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2236 dst.reg = (unsigned long *)&_regs.eax;
2237 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2238 &dst.val, dst.bytes, ctxt)) != 0 )
2239 goto done;
2240 register_address_increment(
2241 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2242 break;
2244 case 0xae ... 0xaf: /* scas */ {
2245 unsigned long next_eip = _regs.eip;
2246 handle_rep_prefix();
2247 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2248 dst.val = _regs.eax;
2249 if ( (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2250 &src.val, src.bytes, ctxt)) != 0 )
2251 goto done;
2252 register_address_increment(
2253 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2254 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2255 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2256 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2257 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2258 _regs.eip = next_eip;
2259 break;
2262 case 0xc2: /* ret imm16 (near) */
2263 case 0xc3: /* ret (near) */ {
2264 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2265 op_bytes = mode_64bit() ? 8 : op_bytes;
2266 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2267 &dst.val, op_bytes, ctxt)) != 0 )
2268 goto done;
2269 _regs.eip = dst.val;
2270 break;
2273 case 0xc8: /* enter imm16,imm8 */ {
2274 uint16_t size = insn_fetch_type(uint16_t);
2275 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2276 int i;
2278 dst.type = OP_REG;
2279 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2280 dst.reg = (unsigned long *)&_regs.ebp;
2281 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2282 _regs.ebp, dst.bytes, ctxt)) )
2283 goto done;
2284 dst.val = _regs.esp;
2286 if ( depth > 0 )
2288 for ( i = 1; i < depth; i++ )
2290 unsigned long ebp, temp_data;
2291 ebp = _truncate_ea(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2292 if ( (rc = ops->read(x86_seg_ss, ebp,
2293 &temp_data, dst.bytes, ctxt)) ||
2294 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2295 temp_data, dst.bytes, ctxt)) )
2296 goto done;
2298 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2299 dst.val, dst.bytes, ctxt)) )
2300 goto done;
2303 sp_pre_dec(size);
2304 break;
2307 case 0xc9: /* leave */
2308 /* First writeback, to %%esp. */
2309 dst.type = OP_REG;
2310 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2311 dst.reg = (unsigned long *)&_regs.esp;
2312 dst.val = _regs.ebp;
2314 /* Flush first writeback, since there is a second. */
2315 switch ( dst.bytes )
2317 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2318 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2319 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2320 case 8: *dst.reg = dst.val; break;
2323 /* Second writeback, to %%ebp. */
2324 dst.reg = (unsigned long *)&_regs.ebp;
2325 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2326 &dst.val, dst.bytes, ctxt)) )
2327 goto done;
2328 break;
2330 case 0xca: /* ret imm16 (far) */
2331 case 0xcb: /* ret (far) */ {
2332 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2333 op_bytes = mode_64bit() ? 8 : op_bytes;
2334 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2335 &dst.val, op_bytes, ctxt)) ||
2336 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2337 &src.val, op_bytes, ctxt)) ||
2338 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2339 goto done;
2340 _regs.eip = dst.val;
2341 break;
2344 case 0xcc: /* int3 */
2345 src.val = EXC_BP;
2346 goto swint;
2348 case 0xcd: /* int imm8 */
2349 src.val = insn_fetch_type(uint8_t);
2350 swint:
2351 fail_if(ops->inject_sw_interrupt == NULL);
2352 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2353 ctxt) ? : X86EMUL_EXCEPTION;
2354 goto done;
2356 case 0xce: /* into */
2357 generate_exception_if(mode_64bit(), EXC_UD);
2358 if ( !(_regs.eflags & EFLG_OF) )
2359 break;
2360 src.val = EXC_OF;
2361 goto swint;
2363 case 0xcf: /* iret */ {
2364 unsigned long cs, eip, eflags;
2365 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2366 if ( !mode_iopl() )
2367 mask |= EFLG_IOPL;
2368 fail_if(!in_realmode(ctxt, ops));
2369 fail_if(ops->write_rflags == NULL);
2370 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2371 &eip, op_bytes, ctxt)) ||
2372 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2373 &cs, op_bytes, ctxt)) ||
2374 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2375 &eflags, op_bytes, ctxt)) )
2376 goto done;
2377 if ( op_bytes == 2 )
2378 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2379 eflags &= 0x257fd5;
2380 _regs.eflags &= mask;
2381 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2382 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2383 goto done;
2384 _regs.eip = eip;
2385 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2386 goto done;
2387 break;
2390 case 0xd4: /* aam */ {
2391 unsigned int base = insn_fetch_type(uint8_t);
2392 uint8_t al = _regs.eax;
2393 generate_exception_if(mode_64bit(), EXC_UD);
2394 generate_exception_if(base == 0, EXC_DE);
2395 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2396 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2397 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2398 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2399 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2400 break;
2403 case 0xd5: /* aad */ {
2404 unsigned int base = insn_fetch_type(uint8_t);
2405 uint16_t ax = _regs.eax;
2406 generate_exception_if(mode_64bit(), EXC_UD);
2407 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2408 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2409 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2410 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2411 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2412 break;
2415 case 0xd6: /* salc */
2416 generate_exception_if(mode_64bit(), EXC_UD);
2417 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2418 break;
2420 case 0xd7: /* xlat */ {
2421 unsigned long al = (uint8_t)_regs.eax;
2422 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
2423 &al, 1, ctxt)) != 0 )
2424 goto done;
2425 *(uint8_t *)&_regs.eax = al;
2426 break;
2429 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2430 int rel = insn_fetch_type(int8_t);
2431 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2432 if ( b == 0xe1 )
2433 do_jmp = !do_jmp; /* loopz */
2434 else if ( b == 0xe2 )
2435 do_jmp = 1; /* loop */
2436 switch ( ad_bytes )
2438 case 2:
2439 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2440 break;
2441 case 4:
2442 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
2443 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
2444 break;
2445 default: /* case 8: */
2446 do_jmp &= --_regs.ecx != 0;
2447 break;
2449 if ( do_jmp )
2450 jmp_rel(rel);
2451 break;
2454 case 0xe3: /* jcxz/jecxz (short) */ {
2455 int rel = insn_fetch_type(int8_t);
2456 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2457 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2458 jmp_rel(rel);
2459 break;
2462 case 0xe4: /* in imm8,%al */
2463 case 0xe5: /* in imm8,%eax */
2464 case 0xe6: /* out %al,imm8 */
2465 case 0xe7: /* out %eax,imm8 */
2466 case 0xec: /* in %dx,%al */
2467 case 0xed: /* in %dx,%eax */
2468 case 0xee: /* out %al,%dx */
2469 case 0xef: /* out %eax,%dx */ {
2470 unsigned int port = ((b < 0xe8)
2471 ? insn_fetch_type(uint8_t)
2472 : (uint16_t)_regs.edx);
2473 generate_exception_if(!mode_iopl(), EXC_GP);
2474 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2475 if ( b & 2 )
2477 /* out */
2478 fail_if(ops->write_io == NULL);
2479 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2482 else
2484 /* in */
2485 dst.type = OP_REG;
2486 dst.bytes = op_bytes;
2487 dst.reg = (unsigned long *)&_regs.eax;
2488 fail_if(ops->read_io == NULL);
2489 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2491 if ( rc != 0 )
2492 goto done;
2493 break;
2496 case 0xe8: /* call (near) */ {
2497 int rel = (((op_bytes == 2) && !mode_64bit())
2498 ? (int32_t)insn_fetch_type(int16_t)
2499 : insn_fetch_type(int32_t));
2500 op_bytes = mode_64bit() ? 8 : op_bytes;
2501 src.val = _regs.eip;
2502 jmp_rel(rel);
2503 goto push;
2506 case 0xe9: /* jmp (near) */ {
2507 int rel = (((op_bytes == 2) && !mode_64bit())
2508 ? (int32_t)insn_fetch_type(int16_t)
2509 : insn_fetch_type(int32_t));
2510 jmp_rel(rel);
2511 break;
2514 case 0xea: /* jmp (far, absolute) */ {
2515 uint16_t sel;
2516 uint32_t eip;
2517 generate_exception_if(mode_64bit(), EXC_UD);
2518 eip = insn_fetch_bytes(op_bytes);
2519 sel = insn_fetch_type(uint16_t);
2520 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2521 goto done;
2522 _regs.eip = eip;
2523 break;
2526 case 0xeb: /* jmp (short) */
2527 jmp_rel(insn_fetch_type(int8_t));
2528 break;
2530 case 0xf1: /* int1 (icebp) */
2531 src.val = EXC_DB;
2532 goto swint;
2534 case 0xf4: /* hlt */
2535 fail_if(ops->hlt == NULL);
2536 if ( (rc = ops->hlt(ctxt)) != 0 )
2537 goto done;
2538 break;
2540 case 0xf5: /* cmc */
2541 _regs.eflags ^= EFLG_CF;
2542 break;
2544 case 0xf8: /* clc */
2545 _regs.eflags &= ~EFLG_CF;
2546 break;
2548 case 0xf9: /* stc */
2549 _regs.eflags |= EFLG_CF;
2550 break;
2552 case 0xfa: /* cli */
2553 case 0xfb: /* sti */
2554 generate_exception_if(!mode_iopl(), EXC_GP);
2555 fail_if(ops->write_rflags == NULL);
2556 _regs.eflags &= ~EFLG_IF;
2557 if ( b == 0xfb ) /* sti */
2558 _regs.eflags |= EFLG_IF;
2559 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2560 goto done;
2561 break;
2563 case 0xfc: /* cld */
2564 _regs.eflags &= ~EFLG_DF;
2565 break;
2567 case 0xfd: /* std */
2568 _regs.eflags |= EFLG_DF;
2569 break;
2571 goto writeback;
2573 twobyte_insn:
2574 switch ( b )
2576 case 0x40 ... 0x4f: /* cmovcc */
2577 dst.val = src.val;
2578 if ( !test_cc(b, _regs.eflags) )
2579 dst.type = OP_NONE;
2580 break;
2582 case 0x90 ... 0x9f: /* setcc */
2583 dst.val = test_cc(b, _regs.eflags);
2584 break;
2586 case 0xb0 ... 0xb1: /* cmpxchg */
2587 /* Save real source value, then compare EAX against destination. */
2588 src.orig_val = src.val;
2589 src.val = _regs.eax;
2590 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2591 /* Always write back. The question is: where to? */
2592 d |= Mov;
2593 if ( _regs.eflags & EFLG_ZF )
2595 /* Success: write back to memory. */
2596 dst.val = src.orig_val;
2598 else
2600 /* Failure: write the value we saw to EAX. */
2601 dst.type = OP_REG;
2602 dst.reg = (unsigned long *)&_regs.eax;
2604 break;
2606 case 0xa3: bt: /* bt */
2607 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2608 break;
2610 case 0xb3: btr: /* btr */
2611 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
2612 break;
2614 case 0xab: bts: /* bts */
2615 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
2616 break;
2618 case 0xaf: /* imul */
2619 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
2620 switch ( dst.bytes )
2622 case 2:
2623 dst.val = ((uint32_t)(int16_t)src.val *
2624 (uint32_t)(int16_t)dst.val);
2625 if ( (int16_t)dst.val != (uint32_t)dst.val )
2626 _regs.eflags |= EFLG_OF|EFLG_CF;
2627 break;
2628 #ifdef __x86_64__
2629 case 4:
2630 dst.val = ((uint64_t)(int32_t)src.val *
2631 (uint64_t)(int32_t)dst.val);
2632 if ( (int32_t)dst.val != dst.val )
2633 _regs.eflags |= EFLG_OF|EFLG_CF;
2634 break;
2635 #endif
2636 default: {
2637 unsigned long m[2] = { src.val, dst.val };
2638 if ( imul_dbl(m) )
2639 _regs.eflags |= EFLG_OF|EFLG_CF;
2640 dst.val = m[0];
2641 break;
2644 break;
2646 case 0xb2: /* lss */
2647 dst.val = x86_seg_ss;
2648 goto les;
2650 case 0xb4: /* lfs */
2651 dst.val = x86_seg_fs;
2652 goto les;
2654 case 0xb5: /* lgs */
2655 dst.val = x86_seg_gs;
2656 goto les;
2658 case 0xb6: /* movzx rm8,r{16,32,64} */
2659 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2660 dst.reg = decode_register(modrm_reg, &_regs, 0);
2661 dst.bytes = op_bytes;
2662 dst.val = (uint8_t)src.val;
2663 break;
2665 case 0xbc: /* bsf */ {
2666 int zf;
2667 asm ( "bsf %2,%0; setz %b1"
2668 : "=r" (dst.val), "=q" (zf)
2669 : "r" (src.val), "1" (0) );
2670 _regs.eflags &= ~EFLG_ZF;
2671 _regs.eflags |= zf ? EFLG_ZF : 0;
2672 break;
2675 case 0xbd: /* bsr */ {
2676 int zf;
2677 asm ( "bsr %2,%0; setz %b1"
2678 : "=r" (dst.val), "=q" (zf)
2679 : "r" (src.val), "1" (0) );
2680 _regs.eflags &= ~EFLG_ZF;
2681 _regs.eflags |= zf ? EFLG_ZF : 0;
2682 break;
2685 case 0xb7: /* movzx rm16,r{16,32,64} */
2686 dst.val = (uint16_t)src.val;
2687 break;
2689 case 0xbb: btc: /* btc */
2690 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
2691 break;
2693 case 0xba: /* Grp8 */
2694 switch ( modrm_reg & 7 )
2696 case 4: goto bt;
2697 case 5: goto bts;
2698 case 6: goto btr;
2699 case 7: goto btc;
2700 default: generate_exception_if(1, EXC_UD);
2702 break;
2704 case 0xbe: /* movsx rm8,r{16,32,64} */
2705 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2706 dst.reg = decode_register(modrm_reg, &_regs, 0);
2707 dst.bytes = op_bytes;
2708 dst.val = (int8_t)src.val;
2709 break;
2711 case 0xbf: /* movsx rm16,r{16,32,64} */
2712 dst.val = (int16_t)src.val;
2713 break;
2715 case 0xc0 ... 0xc1: /* xadd */
2716 /* Write back the register source. */
2717 switch ( dst.bytes )
2719 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2720 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2721 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2722 case 8: *src.reg = dst.val; break;
2724 goto add;
2726 goto writeback;
2728 twobyte_special_insn:
2729 switch ( b )
2731 case 0x01: /* Grp7 */ {
2732 struct segment_register reg;
2733 unsigned long base, limit, cr0, cr0w;
2735 switch ( modrm_reg & 7 )
2737 case 0: /* sgdt */
2738 case 1: /* sidt */
2739 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2740 fail_if(ops->read_segment == NULL);
2741 if ( (rc = ops->read_segment((modrm_reg & 1) ?
2742 x86_seg_idtr : x86_seg_gdtr,
2743 &reg, ctxt)) )
2744 goto done;
2745 if ( op_bytes == 2 )
2746 reg.base &= 0xffffff;
2747 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
2748 reg.limit, 2, ctxt)) ||
2749 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
2750 reg.base, mode_64bit() ? 8 : 4, ctxt)) )
2751 goto done;
2752 break;
2753 case 2: /* lgdt */
2754 case 3: /* lidt */
2755 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2756 fail_if(ops->write_segment == NULL);
2757 memset(&reg, 0, sizeof(reg));
2758 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0,
2759 &limit, 2, ctxt)) ||
2760 (rc = ops->read(ea.mem.seg, ea.mem.off+2,
2761 &base, mode_64bit() ? 8 : 4, ctxt)) )
2762 goto done;
2763 reg.base = base;
2764 reg.limit = limit;
2765 if ( op_bytes == 2 )
2766 reg.base &= 0xffffff;
2767 if ( (rc = ops->write_segment((modrm_reg & 1) ?
2768 x86_seg_idtr : x86_seg_gdtr,
2769 &reg, ctxt)) )
2770 goto done;
2771 break;
2772 case 4: /* smsw */
2773 ea.bytes = 2;
2774 dst = ea;
2775 fail_if(ops->read_cr == NULL);
2776 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
2777 goto done;
2778 d |= Mov; /* force writeback */
2779 break;
2780 case 6: /* lmsw */
2781 fail_if(ops->read_cr == NULL);
2782 fail_if(ops->write_cr == NULL);
2783 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
2784 goto done;
2785 if ( ea.type == OP_REG )
2786 cr0w = *ea.reg;
2787 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
2788 &cr0w, 2, ctxt)) )
2789 goto done;
2790 cr0 &= 0xffff0000;
2791 cr0 |= (uint16_t)cr0w;
2792 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
2793 goto done;
2794 break;
2795 default:
2796 goto cannot_emulate;
2798 break;
2801 case 0x06: /* clts */
2802 generate_exception_if(!mode_ring0(), EXC_GP);
2803 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
2804 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
2805 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
2806 goto done;
2807 break;
2809 case 0x08: /* invd */
2810 case 0x09: /* wbinvd */
2811 generate_exception_if(!mode_ring0(), EXC_GP);
2812 fail_if(ops->wbinvd == NULL);
2813 if ( (rc = ops->wbinvd(ctxt)) != 0 )
2814 goto done;
2815 break;
2817 case 0x0d: /* GrpP (prefetch) */
2818 case 0x18: /* Grp16 (prefetch/nop) */
2819 case 0x19 ... 0x1f: /* nop (amd-defined) */
2820 break;
2822 case 0x20: /* mov cr,reg */
2823 case 0x21: /* mov dr,reg */
2824 case 0x22: /* mov reg,cr */
2825 case 0x23: /* mov reg,dr */
2826 generate_exception_if(!mode_ring0(), EXC_GP);
2827 modrm_rm |= (rex_prefix & 1) << 3;
2828 modrm_reg |= lock_prefix << 3;
2829 if ( b & 2 )
2831 /* Write to CR/DR. */
2832 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
2833 if ( !mode_64bit() )
2834 src.val = (uint32_t)src.val;
2835 rc = ((b & 1)
2836 ? (ops->write_dr
2837 ? ops->write_dr(modrm_reg, src.val, ctxt)
2838 : X86EMUL_UNHANDLEABLE)
2839 : (ops->write_cr
2840 ? ops->write_cr(modrm_reg, src.val, ctxt)
2841 : X86EMUL_UNHANDLEABLE));
2843 else
2845 /* Read from CR/DR. */
2846 dst.type = OP_REG;
2847 dst.bytes = mode_64bit() ? 8 : 4;
2848 dst.reg = decode_register(modrm_rm, &_regs, 0);
2849 rc = ((b & 1)
2850 ? (ops->read_dr
2851 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2852 : X86EMUL_UNHANDLEABLE)
2853 : (ops->read_cr
2854 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
2855 : X86EMUL_UNHANDLEABLE));
2857 if ( rc != 0 )
2858 goto done;
2859 break;
2861 case 0x30: /* wrmsr */ {
2862 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
2863 generate_exception_if(!mode_ring0(), EXC_GP);
2864 fail_if(ops->write_msr == NULL);
2865 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
2866 goto done;
2867 break;
2870 case 0x31: /* rdtsc */ {
2871 unsigned long cr4;
2872 uint64_t val;
2873 fail_if(ops->read_cr == NULL);
2874 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
2875 goto done;
2876 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP);
2877 fail_if(ops->read_msr == NULL);
2878 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
2879 goto done;
2880 _regs.edx = (uint32_t)(val >> 32);
2881 _regs.eax = (uint32_t)(val >> 0);
2882 break;
2885 case 0x32: /* rdmsr */ {
2886 uint64_t val;
2887 generate_exception_if(!mode_ring0(), EXC_GP);
2888 fail_if(ops->read_msr == NULL);
2889 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
2890 goto done;
2891 _regs.edx = (uint32_t)(val >> 32);
2892 _regs.eax = (uint32_t)(val >> 0);
2893 break;
2896 case 0x80 ... 0x8f: /* jcc (near) */ {
2897 int rel = (((op_bytes == 2) && !mode_64bit())
2898 ? (int32_t)insn_fetch_type(int16_t)
2899 : insn_fetch_type(int32_t));
2900 if ( test_cc(b, _regs.eflags) )
2901 jmp_rel(rel);
2902 break;
2905 case 0xa0: /* push %%fs */
2906 src.val = x86_seg_fs;
2907 goto push_seg;
2909 case 0xa1: /* pop %%fs */
2910 src.val = x86_seg_fs;
2911 goto pop_seg;
2913 case 0xa2: /* cpuid */ {
2914 unsigned int eax = _regs.eax, ebx = _regs.ebx;
2915 unsigned int ecx = _regs.ecx, edx = _regs.edx;
2916 fail_if(ops->cpuid == NULL);
2917 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
2918 goto done;
2919 _regs.eax = eax; _regs.ebx = ebx;
2920 _regs.ecx = ecx; _regs.edx = edx;
2921 break;
2924 case 0xa8: /* push %%gs */
2925 src.val = x86_seg_gs;
2926 goto push_seg;
2928 case 0xa9: /* pop %%gs */
2929 src.val = x86_seg_gs;
2930 goto pop_seg;
2932 case 0xc7: /* Grp9 (cmpxchg8b) */
2933 #if defined(__i386__)
2935 unsigned long old_lo, old_hi;
2936 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2937 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
2938 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
2939 goto done;
2940 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
2942 _regs.eax = old_lo;
2943 _regs.edx = old_hi;
2944 _regs.eflags &= ~EFLG_ZF;
2946 else if ( ops->cmpxchg8b == NULL )
2948 rc = X86EMUL_UNHANDLEABLE;
2949 goto done;
2951 else
2953 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
2954 _regs.ebx, _regs.ecx, ctxt)) != 0 )
2955 goto done;
2956 _regs.eflags |= EFLG_ZF;
2958 break;
2960 #elif defined(__x86_64__)
2962 unsigned long old, new;
2963 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2964 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
2965 goto done;
2966 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
2967 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
2969 _regs.eax = (uint32_t)(old>>0);
2970 _regs.edx = (uint32_t)(old>>32);
2971 _regs.eflags &= ~EFLG_ZF;
2973 else
2975 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
2976 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
2977 new, 8, ctxt)) != 0 )
2978 goto done;
2979 _regs.eflags |= EFLG_ZF;
2981 break;
2983 #endif
2985 case 0xc8 ... 0xcf: /* bswap */
2986 dst.type = OP_REG;
2987 dst.reg = decode_register(
2988 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2989 switch ( dst.bytes = op_bytes )
2991 default: /* case 2: */
2992 /* Undefined behaviour. Writes zero on all tested CPUs. */
2993 dst.val = 0;
2994 break;
2995 case 4:
2996 #ifdef __x86_64__
2997 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
2998 break;
2999 case 8:
3000 #endif
3001 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3002 break;
3004 break;
3006 goto writeback;
3008 cannot_emulate:
3009 #if 0
3010 gdprintk(XENLOG_DEBUG, "Instr:");
3011 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
3013 unsigned long x;
3014 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
3015 printk(" %02x", (uint8_t)x);
3017 printk("\n");
3018 #endif
3019 return X86EMUL_UNHANDLEABLE;