ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 16478:c5332fa8b68d

x86_emulate: Emulate RETF and RETF imm16.
Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Mon Nov 26 16:46:22 2007 +0000 (2007-11-26)
parents 9f61a0add5b6
children f676c0dacbb9
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <public/xen.h>
28 #else
29 #include <xen/config.h>
30 #include <xen/types.h>
31 #include <xen/lib.h>
32 #include <asm/regs.h>
33 #undef cmpxchg
34 #endif
35 #include <asm-x86/x86_emulate.h>
37 /* Operand sizes: 8-bit operands or specified/overridden size. */
38 #define ByteOp (1<<0) /* 8-bit operands. */
39 /* Destination operand type. */
40 #define DstBitBase (0<<1) /* Memory operand, bit string. */
41 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
42 #define DstReg (2<<1) /* Register operand. */
43 #define DstMem (3<<1) /* Memory operand. */
44 #define DstMask (3<<1)
45 /* Source operand type. */
46 #define SrcNone (0<<3) /* No source operand. */
47 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
48 #define SrcReg (1<<3) /* Register operand. */
49 #define SrcMem (2<<3) /* Memory operand. */
50 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
51 #define SrcImm (4<<3) /* Immediate operand. */
52 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
53 #define SrcMask (7<<3)
54 /* Generic ModRM decode. */
55 #define ModRM (1<<6)
56 /* Destination is only written; never read. */
57 #define Mov (1<<7)
59 static uint8_t opcode_table[256] = {
60 /* 0x00 - 0x07 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
64 /* 0x08 - 0x0F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
68 /* 0x10 - 0x17 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
72 /* 0x18 - 0x1F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
76 /* 0x20 - 0x27 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
80 /* 0x28 - 0x2F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
84 /* 0x30 - 0x37 */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
88 /* 0x38 - 0x3F */
89 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
90 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
91 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
92 /* 0x40 - 0x4F */
93 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 /* 0x50 - 0x5F */
98 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 /* 0x60 - 0x67 */
103 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
104 0, 0, 0, 0,
105 /* 0x68 - 0x6F */
106 ImplicitOps|Mov, DstMem|SrcImm|ModRM|Mov,
107 ImplicitOps|Mov, DstMem|SrcImmByte|ModRM|Mov,
108 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
109 /* 0x70 - 0x77 */
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 /* 0x78 - 0x7F */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x80 - 0x87 */
116 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
118 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 /* 0x88 - 0x8F */
121 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
122 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
123 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
124 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
125 /* 0x90 - 0x97 */
126 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
128 /* 0x98 - 0x9F */
129 ImplicitOps, ImplicitOps, ImplicitOps, 0,
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 /* 0xA0 - 0xA7 */
132 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
133 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
134 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
135 ByteOp|ImplicitOps, ImplicitOps,
136 /* 0xA8 - 0xAF */
137 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
138 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
139 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
140 ByteOp|ImplicitOps, ImplicitOps,
141 /* 0xB0 - 0xB7 */
142 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
143 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
144 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
145 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
146 /* 0xB8 - 0xBF */
147 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
148 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
149 /* 0xC0 - 0xC7 */
150 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
151 ImplicitOps, ImplicitOps,
152 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
153 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
154 /* 0xC8 - 0xCF */
155 0, 0, ImplicitOps, ImplicitOps,
156 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
157 /* 0xD0 - 0xD7 */
158 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
159 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
160 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
161 /* 0xD8 - 0xDF */
162 0, 0, 0, 0, 0, 0, 0, 0,
163 /* 0xE0 - 0xE7 */
164 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
165 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
166 /* 0xE8 - 0xEF */
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
169 /* 0xF0 - 0xF7 */
170 0, ImplicitOps, 0, 0,
171 ImplicitOps, ImplicitOps,
172 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
173 /* 0xF8 - 0xFF */
174 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
175 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
176 };
178 static uint8_t twobyte_table[256] = {
179 /* 0x00 - 0x07 */
180 0, ImplicitOps|ModRM, 0, 0, 0, ImplicitOps, 0, 0,
181 /* 0x08 - 0x0F */
182 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
183 /* 0x10 - 0x17 */
184 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0x18 - 0x1F */
186 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
187 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
188 /* 0x20 - 0x27 */
189 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
190 0, 0, 0, 0,
191 /* 0x28 - 0x2F */
192 0, 0, 0, 0, 0, 0, 0, 0,
193 /* 0x30 - 0x37 */
194 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0,
195 /* 0x38 - 0x3F */
196 0, 0, 0, 0, 0, 0, 0, 0,
197 /* 0x40 - 0x47 */
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
202 /* 0x48 - 0x4F */
203 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
204 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
205 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
206 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
207 /* 0x50 - 0x5F */
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 /* 0x60 - 0x6F */
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 /* 0x70 - 0x7F */
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 /* 0x80 - 0x87 */
214 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
215 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
216 /* 0x88 - 0x8F */
217 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
218 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
219 /* 0x90 - 0x97 */
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
224 /* 0x98 - 0x9F */
225 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
226 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
227 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
228 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
229 /* 0xA0 - 0xA7 */
230 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
231 0, 0, 0, 0,
232 /* 0xA8 - 0xAF */
233 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
234 0, 0, 0, DstReg|SrcMem|ModRM,
235 /* 0xB0 - 0xB7 */
236 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
237 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
238 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
239 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
240 /* 0xB8 - 0xBF */
241 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
242 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
243 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
244 /* 0xC0 - 0xC7 */
245 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
246 0, 0, 0, ImplicitOps|ModRM,
247 /* 0xC8 - 0xCF */
248 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
249 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
250 /* 0xD0 - 0xDF */
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
252 /* 0xE0 - 0xEF */
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
254 /* 0xF0 - 0xFF */
255 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
256 };
258 /* Type, address-of, and value of an instruction's operand. */
259 struct operand {
260 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
261 unsigned int bytes;
262 unsigned long val, orig_val;
263 union {
264 /* OP_REG: Pointer to register field. */
265 unsigned long *reg;
266 /* OP_MEM: Segment and offset. */
267 struct {
268 enum x86_segment seg;
269 unsigned long off;
270 } mem;
271 };
272 };
274 /* EFLAGS bit definitions. */
275 #define EFLG_VIP (1<<20)
276 #define EFLG_VIF (1<<19)
277 #define EFLG_AC (1<<18)
278 #define EFLG_VM (1<<17)
279 #define EFLG_RF (1<<16)
280 #define EFLG_NT (1<<14)
281 #define EFLG_IOPL (3<<12)
282 #define EFLG_OF (1<<11)
283 #define EFLG_DF (1<<10)
284 #define EFLG_IF (1<<9)
285 #define EFLG_TF (1<<8)
286 #define EFLG_SF (1<<7)
287 #define EFLG_ZF (1<<6)
288 #define EFLG_AF (1<<4)
289 #define EFLG_PF (1<<2)
290 #define EFLG_CF (1<<0)
292 /* Exception definitions. */
293 #define EXC_DE 0
294 #define EXC_DB 1
295 #define EXC_BP 3
296 #define EXC_OF 4
297 #define EXC_BR 5
298 #define EXC_UD 6
299 #define EXC_GP 13
301 /*
302 * Instruction emulation:
303 * Most instructions are emulated directly via a fragment of inline assembly
304 * code. This allows us to save/restore EFLAGS and thus very easily pick up
305 * any modified flags.
306 */
308 #if defined(__x86_64__)
309 #define _LO32 "k" /* force 32-bit operand */
310 #define _STK "%%rsp" /* stack pointer */
311 #define _BYTES_PER_LONG "8"
312 #elif defined(__i386__)
313 #define _LO32 "" /* force 32-bit operand */
314 #define _STK "%%esp" /* stack pointer */
315 #define _BYTES_PER_LONG "4"
316 #endif
318 /*
319 * These EFLAGS bits are restored from saved value during emulation, and
320 * any changes are written back to the saved value after emulation.
321 */
322 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
324 /* Before executing instruction: restore necessary bits in EFLAGS. */
325 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
326 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
327 "movl %"_sav",%"_LO32 _tmp"; " \
328 "push %"_tmp"; " \
329 "push %"_tmp"; " \
330 "movl %"_msk",%"_LO32 _tmp"; " \
331 "andl %"_LO32 _tmp",("_STK"); " \
332 "pushf; " \
333 "notl %"_LO32 _tmp"; " \
334 "andl %"_LO32 _tmp",("_STK"); " \
335 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
336 "pop %"_tmp"; " \
337 "orl %"_LO32 _tmp",("_STK"); " \
338 "popf; " \
339 "pop %"_sav"; "
341 /* After executing instruction: write-back necessary bits in EFLAGS. */
342 #define _POST_EFLAGS(_sav, _msk, _tmp) \
343 /* _sav |= EFLAGS & _msk; */ \
344 "pushf; " \
345 "pop %"_tmp"; " \
346 "andl %"_msk",%"_LO32 _tmp"; " \
347 "orl %"_LO32 _tmp",%"_sav"; "
349 /* Raw emulation: instruction has two explicit operands. */
350 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
351 do{ unsigned long _tmp; \
352 switch ( (_dst).bytes ) \
353 { \
354 case 2: \
355 asm volatile ( \
356 _PRE_EFLAGS("0","4","2") \
357 _op"w %"_wx"3,%1; " \
358 _POST_EFLAGS("0","4","2") \
359 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
360 : _wy ((_src).val), "i" (EFLAGS_MASK), \
361 "m" (_eflags), "m" ((_dst).val) ); \
362 break; \
363 case 4: \
364 asm volatile ( \
365 _PRE_EFLAGS("0","4","2") \
366 _op"l %"_lx"3,%1; " \
367 _POST_EFLAGS("0","4","2") \
368 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
369 : _ly ((_src).val), "i" (EFLAGS_MASK), \
370 "m" (_eflags), "m" ((_dst).val) ); \
371 break; \
372 case 8: \
373 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
374 break; \
375 } \
376 } while (0)
377 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
378 do{ unsigned long _tmp; \
379 switch ( (_dst).bytes ) \
380 { \
381 case 1: \
382 asm volatile ( \
383 _PRE_EFLAGS("0","4","2") \
384 _op"b %"_bx"3,%1; " \
385 _POST_EFLAGS("0","4","2") \
386 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
387 : _by ((_src).val), "i" (EFLAGS_MASK), \
388 "m" (_eflags), "m" ((_dst).val) ); \
389 break; \
390 default: \
391 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
392 break; \
393 } \
394 } while (0)
395 /* Source operand is byte-sized and may be restricted to just %cl. */
396 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
397 __emulate_2op(_op, _src, _dst, _eflags, \
398 "b", "c", "b", "c", "b", "c", "b", "c")
399 /* Source operand is byte, word, long or quad sized. */
400 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
401 __emulate_2op(_op, _src, _dst, _eflags, \
402 "b", "q", "w", "r", _LO32, "r", "", "r")
403 /* Source operand is word, long or quad sized. */
404 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
405 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
406 "w", "r", _LO32, "r", "", "r")
408 /* Instruction has only one explicit operand (no source operand). */
409 #define emulate_1op(_op,_dst,_eflags) \
410 do{ unsigned long _tmp; \
411 switch ( (_dst).bytes ) \
412 { \
413 case 1: \
414 asm volatile ( \
415 _PRE_EFLAGS("0","3","2") \
416 _op"b %1; " \
417 _POST_EFLAGS("0","3","2") \
418 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
419 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
420 break; \
421 case 2: \
422 asm volatile ( \
423 _PRE_EFLAGS("0","3","2") \
424 _op"w %1; " \
425 _POST_EFLAGS("0","3","2") \
426 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
427 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
428 break; \
429 case 4: \
430 asm volatile ( \
431 _PRE_EFLAGS("0","3","2") \
432 _op"l %1; " \
433 _POST_EFLAGS("0","3","2") \
434 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
435 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
436 break; \
437 case 8: \
438 __emulate_1op_8byte(_op, _dst, _eflags); \
439 break; \
440 } \
441 } while (0)
443 /* Emulate an instruction with quadword operands (x86/64 only). */
444 #if defined(__x86_64__)
445 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
446 do{ asm volatile ( \
447 _PRE_EFLAGS("0","4","2") \
448 _op"q %"_qx"3,%1; " \
449 _POST_EFLAGS("0","4","2") \
450 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
451 : _qy ((_src).val), "i" (EFLAGS_MASK), \
452 "m" (_eflags), "m" ((_dst).val) ); \
453 } while (0)
454 #define __emulate_1op_8byte(_op, _dst, _eflags) \
455 do{ asm volatile ( \
456 _PRE_EFLAGS("0","3","2") \
457 _op"q %1; " \
458 _POST_EFLAGS("0","3","2") \
459 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
460 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
461 } while (0)
462 #elif defined(__i386__)
463 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
464 #define __emulate_1op_8byte(_op, _dst, _eflags)
465 #endif /* __i386__ */
467 /* Fetch next part of the instruction being emulated. */
468 #define insn_fetch_bytes(_size) \
469 ({ unsigned long _x, _eip = _regs.eip; \
470 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
471 _regs.eip += (_size); /* real hardware doesn't truncate */ \
472 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
473 EXC_GP); \
474 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
475 if ( rc ) goto done; \
476 _x; \
477 })
478 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
480 #define _truncate_ea(ea, byte_width) \
481 ({ unsigned long __ea = (ea); \
482 unsigned int _width = (byte_width); \
483 ((_width == sizeof(unsigned long)) ? __ea : \
484 (__ea & ((1UL << (_width << 3)) - 1))); \
485 })
486 #define truncate_ea(ea) _truncate_ea((ea), ad_bytes)
488 #define mode_64bit() (def_ad_bytes == 8)
490 #define fail_if(p) \
491 do { \
492 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
493 if ( rc ) goto done; \
494 } while (0)
496 #define generate_exception_if(p, e) \
497 ({ if ( (p) ) { \
498 fail_if(ops->inject_hw_exception == NULL); \
499 rc = ops->inject_hw_exception(e, ctxt) ? : X86EMUL_EXCEPTION; \
500 goto done; \
501 } \
502 })
504 /* Given byte has even parity (even number of 1s)? */
505 static int even_parity(uint8_t v)
506 {
507 asm ( "test %%al,%%al; setp %%al"
508 : "=a" (v) : "0" (v) );
509 return v;
510 }
512 /* Update address held in a register, based on addressing mode. */
513 #define _register_address_increment(reg, inc, byte_width) \
514 do { \
515 int _inc = (inc); /* signed type ensures sign extension to long */ \
516 unsigned int _width = (byte_width); \
517 if ( _width == sizeof(unsigned long) ) \
518 (reg) += _inc; \
519 else if ( mode_64bit() ) \
520 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
521 else \
522 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
523 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
524 } while (0)
525 #define register_address_increment(reg, inc) \
526 _register_address_increment((reg), (inc), ad_bytes)
528 #define sp_pre_dec(dec) ({ \
529 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
530 _truncate_ea(_regs.esp, ctxt->sp_size/8); \
531 })
532 #define sp_post_inc(inc) ({ \
533 unsigned long __esp = _truncate_ea(_regs.esp, ctxt->sp_size/8); \
534 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
535 __esp; \
536 })
538 #define jmp_rel(rel) \
539 do { \
540 _regs.eip += (int)(rel); \
541 if ( !mode_64bit() ) \
542 _regs.eip = ((op_bytes == 2) \
543 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
544 } while (0)
546 static int __handle_rep_prefix(
547 struct cpu_user_regs *int_regs,
548 struct cpu_user_regs *ext_regs,
549 int ad_bytes)
550 {
551 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
552 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
553 int_regs->ecx);
555 if ( ecx-- == 0 )
556 {
557 ext_regs->eip = int_regs->eip;
558 return 1;
559 }
561 if ( ad_bytes == 2 )
562 *(uint16_t *)&int_regs->ecx = ecx;
563 else if ( ad_bytes == 4 )
564 int_regs->ecx = (uint32_t)ecx;
565 else
566 int_regs->ecx = ecx;
567 int_regs->eip = ext_regs->eip;
568 return 0;
569 }
571 #define handle_rep_prefix() \
572 do { \
573 if ( rep_prefix && __handle_rep_prefix(&_regs, ctxt->regs, ad_bytes) ) \
574 goto done; \
575 } while (0)
577 /*
578 * Unsigned multiplication with double-word result.
579 * IN: Multiplicand=m[0], Multiplier=m[1]
580 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
581 */
582 static int mul_dbl(unsigned long m[2])
583 {
584 int rc;
585 asm ( "mul %4; seto %b2"
586 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
587 : "0" (m[0]), "1" (m[1]), "2" (0) );
588 return rc;
589 }
591 /*
592 * Signed multiplication with double-word result.
593 * IN: Multiplicand=m[0], Multiplier=m[1]
594 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
595 */
596 static int imul_dbl(unsigned long m[2])
597 {
598 int rc;
599 asm ( "imul %4; seto %b2"
600 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
601 : "0" (m[0]), "1" (m[1]), "2" (0) );
602 return rc;
603 }
605 /*
606 * Unsigned division of double-word dividend.
607 * IN: Dividend=u[1]:u[0], Divisor=v
608 * OUT: Return 1: #DE
609 * Return 0: Quotient=u[0], Remainder=u[1]
610 */
611 static int div_dbl(unsigned long u[2], unsigned long v)
612 {
613 if ( (v == 0) || (u[1] >= v) )
614 return 1;
615 asm ( "div %4"
616 : "=a" (u[0]), "=d" (u[1])
617 : "0" (u[0]), "1" (u[1]), "r" (v) );
618 return 0;
619 }
621 /*
622 * Signed division of double-word dividend.
623 * IN: Dividend=u[1]:u[0], Divisor=v
624 * OUT: Return 1: #DE
625 * Return 0: Quotient=u[0], Remainder=u[1]
626 * NB. We don't use idiv directly as it's moderately hard to work out
627 * ahead of time whether it will #DE, which we cannot allow to happen.
628 */
629 static int idiv_dbl(unsigned long u[2], unsigned long v)
630 {
631 int negu = (long)u[1] < 0, negv = (long)v < 0;
633 /* u = abs(u) */
634 if ( negu )
635 {
636 u[1] = ~u[1];
637 if ( (u[0] = -u[0]) == 0 )
638 u[1]++;
639 }
641 /* abs(u) / abs(v) */
642 if ( div_dbl(u, negv ? -v : v) )
643 return 1;
645 /* Remainder has same sign as dividend. It cannot overflow. */
646 if ( negu )
647 u[1] = -u[1];
649 /* Quotient is overflowed if sign bit is set. */
650 if ( negu ^ negv )
651 {
652 if ( (long)u[0] >= 0 )
653 u[0] = -u[0];
654 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
655 return 1;
656 }
657 else if ( (long)u[0] < 0 )
658 return 1;
660 return 0;
661 }
663 static int
664 test_cc(
665 unsigned int condition, unsigned int flags)
666 {
667 int rc = 0;
669 switch ( (condition & 15) >> 1 )
670 {
671 case 0: /* o */
672 rc |= (flags & EFLG_OF);
673 break;
674 case 1: /* b/c/nae */
675 rc |= (flags & EFLG_CF);
676 break;
677 case 2: /* z/e */
678 rc |= (flags & EFLG_ZF);
679 break;
680 case 3: /* be/na */
681 rc |= (flags & (EFLG_CF|EFLG_ZF));
682 break;
683 case 4: /* s */
684 rc |= (flags & EFLG_SF);
685 break;
686 case 5: /* p/pe */
687 rc |= (flags & EFLG_PF);
688 break;
689 case 7: /* le/ng */
690 rc |= (flags & EFLG_ZF);
691 /* fall through */
692 case 6: /* l/nge */
693 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
694 break;
695 }
697 /* Odd condition identifiers (lsb == 1) have inverted sense. */
698 return (!!rc ^ (condition & 1));
699 }
701 static int
702 get_cpl(
703 struct x86_emulate_ctxt *ctxt,
704 struct x86_emulate_ops *ops)
705 {
706 struct segment_register reg;
708 if ( ctxt->regs->eflags & EFLG_VM )
709 return 3;
711 if ( (ops->read_segment == NULL) ||
712 ops->read_segment(x86_seg_ss, &reg, ctxt) )
713 return -1;
715 return reg.attr.fields.dpl;
716 }
718 static int
719 _mode_iopl(
720 struct x86_emulate_ctxt *ctxt,
721 struct x86_emulate_ops *ops)
722 {
723 int cpl = get_cpl(ctxt, ops);
724 return ((cpl >= 0) && (cpl <= ((ctxt->regs->eflags >> 12) & 3)));
725 }
727 #define mode_ring0() (get_cpl(ctxt, ops) == 0)
728 #define mode_iopl() _mode_iopl(ctxt, ops)
730 static int
731 in_realmode(
732 struct x86_emulate_ctxt *ctxt,
733 struct x86_emulate_ops *ops)
734 {
735 unsigned long cr0;
736 int rc;
738 if ( ops->read_cr == NULL )
739 return 0;
741 rc = ops->read_cr(0, &cr0, ctxt);
742 return (!rc && !(cr0 & 1));
743 }
745 static int
746 load_seg(
747 enum x86_segment seg,
748 uint16_t sel,
749 struct x86_emulate_ctxt *ctxt,
750 struct x86_emulate_ops *ops)
751 {
752 struct segment_register reg;
753 int rc;
755 if ( !in_realmode(ctxt, ops) ||
756 (ops->read_segment == NULL) ||
757 (ops->write_segment == NULL) )
758 return X86EMUL_UNHANDLEABLE;
760 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
761 return rc;
763 reg.sel = sel;
764 reg.base = (uint32_t)sel << 4;
766 return ops->write_segment(seg, &reg, ctxt);
767 }
769 void *
770 decode_register(
771 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
772 {
773 void *p;
775 switch ( modrm_reg )
776 {
777 case 0: p = &regs->eax; break;
778 case 1: p = &regs->ecx; break;
779 case 2: p = &regs->edx; break;
780 case 3: p = &regs->ebx; break;
781 case 4: p = (highbyte_regs ?
782 ((unsigned char *)&regs->eax + 1) :
783 (unsigned char *)&regs->esp); break;
784 case 5: p = (highbyte_regs ?
785 ((unsigned char *)&regs->ecx + 1) :
786 (unsigned char *)&regs->ebp); break;
787 case 6: p = (highbyte_regs ?
788 ((unsigned char *)&regs->edx + 1) :
789 (unsigned char *)&regs->esi); break;
790 case 7: p = (highbyte_regs ?
791 ((unsigned char *)&regs->ebx + 1) :
792 (unsigned char *)&regs->edi); break;
793 #if defined(__x86_64__)
794 case 8: p = &regs->r8; break;
795 case 9: p = &regs->r9; break;
796 case 10: p = &regs->r10; break;
797 case 11: p = &regs->r11; break;
798 case 12: p = &regs->r12; break;
799 case 13: p = &regs->r13; break;
800 case 14: p = &regs->r14; break;
801 case 15: p = &regs->r15; break;
802 #endif
803 default: p = NULL; break;
804 }
806 return p;
807 }
809 #define decode_segment_failed x86_seg_tr
810 enum x86_segment
811 decode_segment(
812 uint8_t modrm_reg)
813 {
814 switch ( modrm_reg )
815 {
816 case 0: return x86_seg_es;
817 case 1: return x86_seg_cs;
818 case 2: return x86_seg_ss;
819 case 3: return x86_seg_ds;
820 case 4: return x86_seg_fs;
821 case 5: return x86_seg_gs;
822 default: break;
823 }
824 return decode_segment_failed;
825 }
827 int
828 x86_emulate(
829 struct x86_emulate_ctxt *ctxt,
830 struct x86_emulate_ops *ops)
831 {
832 /* Shadow copy of register state. Committed on successful emulation. */
833 struct cpu_user_regs _regs = *ctxt->regs;
835 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
836 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
837 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
838 #define REPE_PREFIX 1
839 #define REPNE_PREFIX 2
840 unsigned int lock_prefix = 0, rep_prefix = 0;
841 int override_seg = -1, rc = X86EMUL_OKAY;
842 struct operand src, dst;
844 /* Data operand effective address (usually computed from ModRM). */
845 struct operand ea;
847 /* Default is a memory operand relative to segment DS. */
848 ea.type = OP_MEM;
849 ea.mem.seg = x86_seg_ds;
850 ea.mem.off = 0;
852 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
853 if ( op_bytes == 8 )
854 {
855 op_bytes = def_op_bytes = 4;
856 #ifndef __x86_64__
857 return X86EMUL_UNHANDLEABLE;
858 #endif
859 }
861 /* Prefix bytes. */
862 for ( ; ; )
863 {
864 switch ( b = insn_fetch_type(uint8_t) )
865 {
866 case 0x66: /* operand-size override */
867 op_bytes = def_op_bytes ^ 6;
868 break;
869 case 0x67: /* address-size override */
870 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
871 break;
872 case 0x2e: /* CS override */
873 override_seg = x86_seg_cs;
874 break;
875 case 0x3e: /* DS override */
876 override_seg = x86_seg_ds;
877 break;
878 case 0x26: /* ES override */
879 override_seg = x86_seg_es;
880 break;
881 case 0x64: /* FS override */
882 override_seg = x86_seg_fs;
883 break;
884 case 0x65: /* GS override */
885 override_seg = x86_seg_gs;
886 break;
887 case 0x36: /* SS override */
888 override_seg = x86_seg_ss;
889 break;
890 case 0xf0: /* LOCK */
891 lock_prefix = 1;
892 break;
893 case 0xf2: /* REPNE/REPNZ */
894 rep_prefix = REPNE_PREFIX;
895 break;
896 case 0xf3: /* REP/REPE/REPZ */
897 rep_prefix = REPE_PREFIX;
898 break;
899 case 0x40 ... 0x4f: /* REX */
900 if ( !mode_64bit() )
901 goto done_prefixes;
902 rex_prefix = b;
903 continue;
904 default:
905 goto done_prefixes;
906 }
908 /* Any legacy prefix after a REX prefix nullifies its effect. */
909 rex_prefix = 0;
910 }
911 done_prefixes:
913 if ( rex_prefix & 8 ) /* REX.W */
914 op_bytes = 8;
916 /* Opcode byte(s). */
917 d = opcode_table[b];
918 if ( d == 0 )
919 {
920 /* Two-byte opcode? */
921 if ( b == 0x0f )
922 {
923 twobyte = 1;
924 b = insn_fetch_type(uint8_t);
925 d = twobyte_table[b];
926 }
928 /* Unrecognised? */
929 if ( d == 0 )
930 goto cannot_emulate;
931 }
933 /* Lock prefix is allowed only on RMW instructions. */
934 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
936 /* ModRM and SIB bytes. */
937 if ( d & ModRM )
938 {
939 modrm = insn_fetch_type(uint8_t);
940 modrm_mod = (modrm & 0xc0) >> 6;
941 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
942 modrm_rm = modrm & 0x07;
944 if ( modrm_mod == 3 )
945 {
946 modrm_rm |= (rex_prefix & 1) << 3;
947 ea.type = OP_REG;
948 ea.reg = decode_register(
949 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
950 }
951 else if ( ad_bytes == 2 )
952 {
953 /* 16-bit ModR/M decode. */
954 switch ( modrm_rm )
955 {
956 case 0:
957 ea.mem.off = _regs.ebx + _regs.esi;
958 break;
959 case 1:
960 ea.mem.off = _regs.ebx + _regs.edi;
961 break;
962 case 2:
963 ea.mem.seg = x86_seg_ss;
964 ea.mem.off = _regs.ebp + _regs.esi;
965 break;
966 case 3:
967 ea.mem.seg = x86_seg_ss;
968 ea.mem.off = _regs.ebp + _regs.edi;
969 break;
970 case 4:
971 ea.mem.off = _regs.esi;
972 break;
973 case 5:
974 ea.mem.off = _regs.edi;
975 break;
976 case 6:
977 if ( modrm_mod == 0 )
978 break;
979 ea.mem.seg = x86_seg_ss;
980 ea.mem.off = _regs.ebp;
981 break;
982 case 7:
983 ea.mem.off = _regs.ebx;
984 break;
985 }
986 switch ( modrm_mod )
987 {
988 case 0:
989 if ( modrm_rm == 6 )
990 ea.mem.off = insn_fetch_type(int16_t);
991 break;
992 case 1:
993 ea.mem.off += insn_fetch_type(int8_t);
994 break;
995 case 2:
996 ea.mem.off += insn_fetch_type(int16_t);
997 break;
998 }
999 ea.mem.off = truncate_ea(ea.mem.off);
1001 else
1003 /* 32/64-bit ModR/M decode. */
1004 if ( modrm_rm == 4 )
1006 sib = insn_fetch_type(uint8_t);
1007 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1008 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1009 if ( sib_index != 4 )
1010 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1011 ea.mem.off <<= (sib >> 6) & 3;
1012 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1013 ea.mem.off += insn_fetch_type(int32_t);
1014 else if ( sib_base == 4 )
1016 ea.mem.seg = x86_seg_ss;
1017 ea.mem.off += _regs.esp;
1018 if ( !twobyte && (b == 0x8f) )
1019 /* POP <rm> computes its EA post increment. */
1020 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1021 ? 8 : op_bytes);
1023 else if ( sib_base == 5 )
1025 ea.mem.seg = x86_seg_ss;
1026 ea.mem.off += _regs.ebp;
1028 else
1029 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1031 else
1033 modrm_rm |= (rex_prefix & 1) << 3;
1034 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1035 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1036 ea.mem.seg = x86_seg_ss;
1038 switch ( modrm_mod )
1040 case 0:
1041 if ( (modrm_rm & 7) != 5 )
1042 break;
1043 ea.mem.off = insn_fetch_type(int32_t);
1044 if ( !mode_64bit() )
1045 break;
1046 /* Relative to RIP of next instruction. Argh! */
1047 ea.mem.off += _regs.eip;
1048 if ( (d & SrcMask) == SrcImm )
1049 ea.mem.off += (d & ByteOp) ? 1 :
1050 ((op_bytes == 8) ? 4 : op_bytes);
1051 else if ( (d & SrcMask) == SrcImmByte )
1052 ea.mem.off += 1;
1053 else if ( ((b == 0xf6) || (b == 0xf7)) &&
1054 ((modrm_reg & 7) <= 1) )
1055 /* Special case in Grp3: test has immediate operand. */
1056 ea.mem.off += (d & ByteOp) ? 1
1057 : ((op_bytes == 8) ? 4 : op_bytes);
1058 break;
1059 case 1:
1060 ea.mem.off += insn_fetch_type(int8_t);
1061 break;
1062 case 2:
1063 ea.mem.off += insn_fetch_type(int32_t);
1064 break;
1066 ea.mem.off = truncate_ea(ea.mem.off);
1070 if ( override_seg != -1 )
1071 ea.mem.seg = override_seg;
1073 /* Special instructions do their own operand decoding. */
1074 if ( (d & DstMask) == ImplicitOps )
1075 goto special_insn;
1077 /* Decode and fetch the source operand: register, memory or immediate. */
1078 switch ( d & SrcMask )
1080 case SrcNone:
1081 break;
1082 case SrcReg:
1083 src.type = OP_REG;
1084 if ( d & ByteOp )
1086 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1087 src.val = *(uint8_t *)src.reg;
1088 src.bytes = 1;
1090 else
1092 src.reg = decode_register(modrm_reg, &_regs, 0);
1093 switch ( (src.bytes = op_bytes) )
1095 case 2: src.val = *(uint16_t *)src.reg; break;
1096 case 4: src.val = *(uint32_t *)src.reg; break;
1097 case 8: src.val = *(uint64_t *)src.reg; break;
1100 break;
1101 case SrcMem16:
1102 ea.bytes = 2;
1103 goto srcmem_common;
1104 case SrcMem:
1105 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1106 srcmem_common:
1107 src = ea;
1108 if ( src.type == OP_REG )
1110 switch ( src.bytes )
1112 case 1: src.val = *(uint8_t *)src.reg; break;
1113 case 2: src.val = *(uint16_t *)src.reg; break;
1114 case 4: src.val = *(uint32_t *)src.reg; break;
1115 case 8: src.val = *(uint64_t *)src.reg; break;
1118 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1119 &src.val, src.bytes, ctxt)) )
1120 goto done;
1121 break;
1122 case SrcImm:
1123 src.type = OP_IMM;
1124 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1125 if ( src.bytes == 8 ) src.bytes = 4;
1126 /* NB. Immediates are sign-extended as necessary. */
1127 switch ( src.bytes )
1129 case 1: src.val = insn_fetch_type(int8_t); break;
1130 case 2: src.val = insn_fetch_type(int16_t); break;
1131 case 4: src.val = insn_fetch_type(int32_t); break;
1133 break;
1134 case SrcImmByte:
1135 src.type = OP_IMM;
1136 src.bytes = 1;
1137 src.val = insn_fetch_type(int8_t);
1138 break;
1141 /* Decode and fetch the destination operand: register or memory. */
1142 switch ( d & DstMask )
1144 case DstReg:
1145 dst.type = OP_REG;
1146 if ( d & ByteOp )
1148 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1149 dst.val = *(uint8_t *)dst.reg;
1150 dst.bytes = 1;
1152 else
1154 dst.reg = decode_register(modrm_reg, &_regs, 0);
1155 switch ( (dst.bytes = op_bytes) )
1157 case 2: dst.val = *(uint16_t *)dst.reg; break;
1158 case 4: dst.val = *(uint32_t *)dst.reg; break;
1159 case 8: dst.val = *(uint64_t *)dst.reg; break;
1162 break;
1163 case DstBitBase:
1164 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1166 src.val &= (op_bytes << 3) - 1;
1168 else
1170 /*
1171 * EA += BitOffset DIV op_bytes*8
1172 * BitOffset = BitOffset MOD op_bytes*8
1173 * DIV truncates towards negative infinity.
1174 * MOD always produces a positive result.
1175 */
1176 if ( op_bytes == 2 )
1177 src.val = (int16_t)src.val;
1178 else if ( op_bytes == 4 )
1179 src.val = (int32_t)src.val;
1180 if ( (long)src.val < 0 )
1182 unsigned long byte_offset;
1183 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1184 ea.mem.off -= byte_offset;
1185 src.val = (byte_offset << 3) + src.val;
1187 else
1189 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1190 src.val &= (op_bytes << 3) - 1;
1193 /* Becomes a normal DstMem operation from here on. */
1194 d = (d & ~DstMask) | DstMem;
1195 case DstMem:
1196 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1197 dst = ea;
1198 if ( dst.type == OP_REG )
1200 switch ( dst.bytes )
1202 case 1: dst.val = *(uint8_t *)dst.reg; break;
1203 case 2: dst.val = *(uint16_t *)dst.reg; break;
1204 case 4: dst.val = *(uint32_t *)dst.reg; break;
1205 case 8: dst.val = *(uint64_t *)dst.reg; break;
1208 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1210 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1211 &dst.val, dst.bytes, ctxt)) )
1212 goto done;
1213 dst.orig_val = dst.val;
1215 break;
1218 /* LOCK prefix allowed only on instructions with memory destination. */
1219 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
1221 if ( twobyte )
1222 goto twobyte_insn;
1224 switch ( b )
1226 case 0x04 ... 0x05: /* add imm,%%eax */
1227 dst.reg = (unsigned long *)&_regs.eax;
1228 dst.val = _regs.eax;
1229 case 0x00 ... 0x03: add: /* add */
1230 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1231 break;
1233 case 0x0c ... 0x0d: /* or imm,%%eax */
1234 dst.reg = (unsigned long *)&_regs.eax;
1235 dst.val = _regs.eax;
1236 case 0x08 ... 0x0b: or: /* or */
1237 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1238 break;
1240 case 0x14 ... 0x15: /* adc imm,%%eax */
1241 dst.reg = (unsigned long *)&_regs.eax;
1242 dst.val = _regs.eax;
1243 case 0x10 ... 0x13: adc: /* adc */
1244 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1245 break;
1247 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1248 dst.reg = (unsigned long *)&_regs.eax;
1249 dst.val = _regs.eax;
1250 case 0x18 ... 0x1b: sbb: /* sbb */
1251 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1252 break;
1254 case 0x24 ... 0x25: /* and imm,%%eax */
1255 dst.reg = (unsigned long *)&_regs.eax;
1256 dst.val = _regs.eax;
1257 case 0x20 ... 0x23: and: /* and */
1258 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1259 break;
1261 case 0x2c ... 0x2d: /* sub imm,%%eax */
1262 dst.reg = (unsigned long *)&_regs.eax;
1263 dst.val = _regs.eax;
1264 case 0x28 ... 0x2b: sub: /* sub */
1265 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1266 break;
1268 case 0x34 ... 0x35: /* xor imm,%%eax */
1269 dst.reg = (unsigned long *)&_regs.eax;
1270 dst.val = _regs.eax;
1271 case 0x30 ... 0x33: xor: /* xor */
1272 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1273 break;
1275 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1276 dst.reg = (unsigned long *)&_regs.eax;
1277 dst.val = _regs.eax;
1278 case 0x38 ... 0x3b: cmp: /* cmp */
1279 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1280 break;
1282 case 0x62: /* bound */ {
1283 unsigned long src_val2;
1284 int lb, ub, idx;
1285 generate_exception_if(mode_64bit() || (src.type != OP_MEM), EXC_UD);
1286 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1287 &src_val2, op_bytes, ctxt)) )
1288 goto done;
1289 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1290 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1291 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1292 generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
1293 dst.type = OP_NONE;
1294 break;
1297 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1298 if ( mode_64bit() )
1300 /* movsxd */
1301 if ( src.type == OP_REG )
1302 src.val = *(int32_t *)src.reg;
1303 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1304 &src.val, 4, ctxt)) )
1305 goto done;
1306 dst.val = (int32_t)src.val;
1308 else
1310 /* arpl */
1311 uint16_t src_val = dst.val;
1312 dst = src;
1313 _regs.eflags &= ~EFLG_ZF;
1314 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1315 if ( _regs.eflags & EFLG_ZF )
1316 dst.val = (dst.val & ~3) | (src_val & 3);
1317 else
1318 dst.type = OP_NONE;
1319 generate_exception_if(in_realmode(ctxt, ops), EXC_UD);
1321 break;
1323 case 0x69: /* imul imm16/32 */
1324 case 0x6b: /* imul imm8 */ {
1325 unsigned long reg = *(long *)decode_register(modrm_reg, &_regs, 0);
1326 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1327 switch ( dst.bytes )
1329 case 2:
1330 dst.val = ((uint32_t)(int16_t)src.val *
1331 (uint32_t)(int16_t)reg);
1332 if ( (int16_t)dst.val != (uint32_t)dst.val )
1333 _regs.eflags |= EFLG_OF|EFLG_CF;
1334 break;
1335 #ifdef __x86_64__
1336 case 4:
1337 dst.val = ((uint64_t)(int32_t)src.val *
1338 (uint64_t)(int32_t)reg);
1339 if ( (int32_t)dst.val != dst.val )
1340 _regs.eflags |= EFLG_OF|EFLG_CF;
1341 break;
1342 #endif
1343 default: {
1344 unsigned long m[2] = { src.val, reg };
1345 if ( imul_dbl(m) )
1346 _regs.eflags |= EFLG_OF|EFLG_CF;
1347 dst.val = m[0];
1348 break;
1351 dst.type = OP_REG;
1352 dst.reg = decode_register(modrm_reg, &_regs, 0);
1353 break;
1356 case 0x82: /* Grp1 (x86/32 only) */
1357 generate_exception_if(mode_64bit(), EXC_UD);
1358 case 0x80: case 0x81: case 0x83: /* Grp1 */
1359 switch ( modrm_reg & 7 )
1361 case 0: goto add;
1362 case 1: goto or;
1363 case 2: goto adc;
1364 case 3: goto sbb;
1365 case 4: goto and;
1366 case 5: goto sub;
1367 case 6: goto xor;
1368 case 7: goto cmp;
1370 break;
1372 case 0xa8 ... 0xa9: /* test imm,%%eax */
1373 dst.reg = (unsigned long *)&_regs.eax;
1374 dst.val = _regs.eax;
1375 case 0x84 ... 0x85: test: /* test */
1376 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1377 break;
1379 case 0x86 ... 0x87: xchg: /* xchg */
1380 /* Write back the register source. */
1381 switch ( dst.bytes )
1383 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1384 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1385 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1386 case 8: *src.reg = dst.val; break;
1388 /* Write back the memory destination with implicit LOCK prefix. */
1389 dst.val = src.val;
1390 lock_prefix = 1;
1391 break;
1393 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1394 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1395 case 0x88 ... 0x8b: /* mov */
1396 dst.val = src.val;
1397 break;
1399 case 0x8c: /* mov Sreg,r/m */ {
1400 struct segment_register reg;
1401 enum x86_segment seg = decode_segment(modrm_reg);
1402 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1403 fail_if(ops->read_segment == NULL);
1404 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
1405 goto done;
1406 dst.val = reg.sel;
1407 if ( dst.type == OP_MEM )
1408 dst.bytes = 2;
1409 break;
1412 case 0x8e: /* mov r/m,Sreg */ {
1413 enum x86_segment seg = decode_segment(modrm_reg);
1414 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1415 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
1416 goto done;
1417 dst.type = OP_NONE;
1418 break;
1421 case 0x8d: /* lea */
1422 dst.val = ea.mem.off;
1423 break;
1425 case 0x8f: /* pop (sole member of Grp1a) */
1426 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1427 /* 64-bit mode: POP defaults to a 64-bit operand. */
1428 if ( mode_64bit() && (dst.bytes == 4) )
1429 dst.bytes = 8;
1430 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1431 &dst.val, dst.bytes, ctxt)) != 0 )
1432 goto done;
1433 break;
1435 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1436 dst.reg = decode_register(
1437 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1438 dst.val = src.val;
1439 break;
1441 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1442 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1443 src.val = ((uint32_t)src.val |
1444 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1445 dst.reg = decode_register(
1446 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1447 dst.val = src.val;
1448 break;
1450 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1451 switch ( modrm_reg & 7 )
1453 case 0: /* rol */
1454 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1455 break;
1456 case 1: /* ror */
1457 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1458 break;
1459 case 2: /* rcl */
1460 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1461 break;
1462 case 3: /* rcr */
1463 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1464 break;
1465 case 4: /* sal/shl */
1466 case 6: /* sal/shl */
1467 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1468 break;
1469 case 5: /* shr */
1470 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1471 break;
1472 case 7: /* sar */
1473 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1474 break;
1476 break;
1478 case 0xc4: /* les */ {
1479 unsigned long sel;
1480 dst.val = x86_seg_es;
1481 les:
1482 generate_exception_if(src.type != OP_MEM, EXC_UD);
1483 if ( (rc = ops->read(src.mem.seg, src.mem.off + src.bytes,
1484 &sel, 2, ctxt)) != 0 )
1485 goto done;
1486 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
1487 goto done;
1488 dst.val = src.val;
1489 break;
1492 case 0xc5: /* lds */
1493 dst.val = x86_seg_ds;
1494 goto les;
1496 case 0xd0 ... 0xd1: /* Grp2 */
1497 src.val = 1;
1498 goto grp2;
1500 case 0xd2 ... 0xd3: /* Grp2 */
1501 src.val = _regs.ecx;
1502 goto grp2;
1504 case 0xf6 ... 0xf7: /* Grp3 */
1505 switch ( modrm_reg & 7 )
1507 case 0 ... 1: /* test */
1508 /* Special case in Grp3: test has an immediate source operand. */
1509 src.type = OP_IMM;
1510 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1511 if ( src.bytes == 8 ) src.bytes = 4;
1512 switch ( src.bytes )
1514 case 1: src.val = insn_fetch_type(int8_t); break;
1515 case 2: src.val = insn_fetch_type(int16_t); break;
1516 case 4: src.val = insn_fetch_type(int32_t); break;
1518 goto test;
1519 case 2: /* not */
1520 dst.val = ~dst.val;
1521 break;
1522 case 3: /* neg */
1523 emulate_1op("neg", dst, _regs.eflags);
1524 break;
1525 case 4: /* mul */
1526 src = dst;
1527 dst.type = OP_REG;
1528 dst.reg = (unsigned long *)&_regs.eax;
1529 dst.val = *dst.reg;
1530 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1531 switch ( src.bytes )
1533 case 1:
1534 dst.val *= src.val;
1535 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1536 _regs.eflags |= EFLG_OF|EFLG_CF;
1537 break;
1538 case 2:
1539 dst.val *= src.val;
1540 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1541 _regs.eflags |= EFLG_OF|EFLG_CF;
1542 *(uint16_t *)&_regs.edx = dst.val >> 16;
1543 break;
1544 #ifdef __x86_64__
1545 case 4:
1546 dst.val *= src.val;
1547 if ( (uint32_t)dst.val != dst.val )
1548 _regs.eflags |= EFLG_OF|EFLG_CF;
1549 _regs.edx = (uint32_t)(dst.val >> 32);
1550 break;
1551 #endif
1552 default: {
1553 unsigned long m[2] = { src.val, dst.val };
1554 if ( mul_dbl(m) )
1555 _regs.eflags |= EFLG_OF|EFLG_CF;
1556 _regs.edx = m[1];
1557 dst.val = m[0];
1558 break;
1561 break;
1562 case 5: /* imul */
1563 src = dst;
1564 dst.type = OP_REG;
1565 dst.reg = (unsigned long *)&_regs.eax;
1566 dst.val = *dst.reg;
1567 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1568 switch ( src.bytes )
1570 case 1:
1571 dst.val = ((uint16_t)(int8_t)src.val *
1572 (uint16_t)(int8_t)dst.val);
1573 if ( (int8_t)dst.val != (uint16_t)dst.val )
1574 _regs.eflags |= EFLG_OF|EFLG_CF;
1575 break;
1576 case 2:
1577 dst.val = ((uint32_t)(int16_t)src.val *
1578 (uint32_t)(int16_t)dst.val);
1579 if ( (int16_t)dst.val != (uint32_t)dst.val )
1580 _regs.eflags |= EFLG_OF|EFLG_CF;
1581 *(uint16_t *)&_regs.edx = dst.val >> 16;
1582 break;
1583 #ifdef __x86_64__
1584 case 4:
1585 dst.val = ((uint64_t)(int32_t)src.val *
1586 (uint64_t)(int32_t)dst.val);
1587 if ( (int32_t)dst.val != dst.val )
1588 _regs.eflags |= EFLG_OF|EFLG_CF;
1589 _regs.edx = (uint32_t)(dst.val >> 32);
1590 break;
1591 #endif
1592 default: {
1593 unsigned long m[2] = { src.val, dst.val };
1594 if ( imul_dbl(m) )
1595 _regs.eflags |= EFLG_OF|EFLG_CF;
1596 _regs.edx = m[1];
1597 dst.val = m[0];
1598 break;
1601 break;
1602 case 6: /* div */ {
1603 unsigned long u[2], v;
1604 src = dst;
1605 dst.type = OP_REG;
1606 dst.reg = (unsigned long *)&_regs.eax;
1607 switch ( src.bytes )
1609 case 1:
1610 u[0] = (uint16_t)_regs.eax;
1611 u[1] = 0;
1612 v = (uint8_t)src.val;
1613 generate_exception_if(
1614 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1615 EXC_DE);
1616 dst.val = (uint8_t)u[0];
1617 ((uint8_t *)&_regs.eax)[1] = u[1];
1618 break;
1619 case 2:
1620 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1621 u[1] = 0;
1622 v = (uint16_t)src.val;
1623 generate_exception_if(
1624 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1625 EXC_DE);
1626 dst.val = (uint16_t)u[0];
1627 *(uint16_t *)&_regs.edx = u[1];
1628 break;
1629 #ifdef __x86_64__
1630 case 4:
1631 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1632 u[1] = 0;
1633 v = (uint32_t)src.val;
1634 generate_exception_if(
1635 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1636 EXC_DE);
1637 dst.val = (uint32_t)u[0];
1638 _regs.edx = (uint32_t)u[1];
1639 break;
1640 #endif
1641 default:
1642 u[0] = _regs.eax;
1643 u[1] = _regs.edx;
1644 v = src.val;
1645 generate_exception_if(div_dbl(u, v), EXC_DE);
1646 dst.val = u[0];
1647 _regs.edx = u[1];
1648 break;
1650 break;
1652 case 7: /* idiv */ {
1653 unsigned long u[2], v;
1654 src = dst;
1655 dst.type = OP_REG;
1656 dst.reg = (unsigned long *)&_regs.eax;
1657 switch ( src.bytes )
1659 case 1:
1660 u[0] = (int16_t)_regs.eax;
1661 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1662 v = (int8_t)src.val;
1663 generate_exception_if(
1664 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1665 EXC_DE);
1666 dst.val = (int8_t)u[0];
1667 ((int8_t *)&_regs.eax)[1] = u[1];
1668 break;
1669 case 2:
1670 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1671 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1672 v = (int16_t)src.val;
1673 generate_exception_if(
1674 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1675 EXC_DE);
1676 dst.val = (int16_t)u[0];
1677 *(int16_t *)&_regs.edx = u[1];
1678 break;
1679 #ifdef __x86_64__
1680 case 4:
1681 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1682 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1683 v = (int32_t)src.val;
1684 generate_exception_if(
1685 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1686 EXC_DE);
1687 dst.val = (int32_t)u[0];
1688 _regs.edx = (uint32_t)u[1];
1689 break;
1690 #endif
1691 default:
1692 u[0] = _regs.eax;
1693 u[1] = _regs.edx;
1694 v = src.val;
1695 generate_exception_if(idiv_dbl(u, v), EXC_DE);
1696 dst.val = u[0];
1697 _regs.edx = u[1];
1698 break;
1700 break;
1702 default:
1703 goto cannot_emulate;
1705 break;
1707 case 0xfe: /* Grp4 */
1708 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
1709 case 0xff: /* Grp5 */
1710 switch ( modrm_reg & 7 )
1712 case 0: /* inc */
1713 emulate_1op("inc", dst, _regs.eflags);
1714 break;
1715 case 1: /* dec */
1716 emulate_1op("dec", dst, _regs.eflags);
1717 break;
1718 case 2: /* call (near) */
1719 case 4: /* jmp (near) */
1720 if ( ((op_bytes = dst.bytes) != 8) && mode_64bit() )
1722 dst.bytes = op_bytes = 8;
1723 if ( dst.type == OP_REG )
1724 dst.val = *dst.reg;
1725 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1726 &dst.val, 8, ctxt)) != 0 )
1727 goto done;
1729 src.val = _regs.eip;
1730 _regs.eip = dst.val;
1731 if ( (modrm_reg & 7) == 2 )
1732 goto push; /* call */
1733 break;
1734 case 3: /* call (far, absolute indirect) */
1735 case 5: /* jmp (far, absolute indirect) */ {
1736 unsigned long sel, eip = dst.val;
1738 if ( (rc = ops->read(dst.mem.seg, dst.mem.off+dst.bytes,
1739 &sel, 2, ctxt)) )
1740 goto done;
1742 if ( (modrm_reg & 7) == 3 ) /* call */
1744 struct segment_register reg;
1745 fail_if(ops->read_segment == NULL);
1746 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
1747 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1748 reg.sel, op_bytes, ctxt)) ||
1749 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1750 _regs.eip, op_bytes, ctxt)) )
1751 goto done;
1754 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
1755 goto done;
1756 _regs.eip = eip;
1758 dst.type = OP_NONE;
1759 break;
1761 case 6: /* push */
1762 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1763 if ( mode_64bit() && (dst.bytes == 4) )
1765 dst.bytes = 8;
1766 if ( dst.type == OP_REG )
1767 dst.val = *dst.reg;
1768 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1769 &dst.val, 8, ctxt)) != 0 )
1770 goto done;
1772 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
1773 dst.val, dst.bytes, ctxt)) != 0 )
1774 goto done;
1775 dst.type = OP_NONE;
1776 break;
1777 case 7:
1778 generate_exception_if(1, EXC_UD);
1779 default:
1780 goto cannot_emulate;
1782 break;
1785 writeback:
1786 switch ( dst.type )
1788 case OP_REG:
1789 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1790 switch ( dst.bytes )
1792 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1793 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1794 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1795 case 8: *dst.reg = dst.val; break;
1797 break;
1798 case OP_MEM:
1799 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1800 /* nothing to do */;
1801 else if ( lock_prefix )
1802 rc = ops->cmpxchg(
1803 dst.mem.seg, dst.mem.off, dst.orig_val,
1804 dst.val, dst.bytes, ctxt);
1805 else
1806 rc = ops->write(
1807 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1808 if ( rc != 0 )
1809 goto done;
1810 default:
1811 break;
1814 /* Commit shadow register state. */
1815 _regs.eflags &= ~EFLG_RF;
1816 *ctxt->regs = _regs;
1818 if ( (_regs.eflags & EFLG_TF) &&
1819 (rc == X86EMUL_OKAY) &&
1820 (ops->inject_hw_exception != NULL) )
1821 rc = ops->inject_hw_exception(EXC_DB, ctxt) ? : X86EMUL_EXCEPTION;
1823 done:
1824 return rc;
1826 special_insn:
1827 dst.type = OP_NONE;
1829 /*
1830 * The only implicit-operands instructions allowed a LOCK prefix are
1831 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1832 */
1833 generate_exception_if(lock_prefix &&
1834 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1835 (b != 0xc7), /* CMPXCHG{8,16}B */
1836 EXC_GP);
1838 if ( twobyte )
1839 goto twobyte_special_insn;
1841 switch ( b )
1843 case 0x06: /* push %%es */ {
1844 struct segment_register reg;
1845 src.val = x86_seg_es;
1846 push_seg:
1847 fail_if(ops->read_segment == NULL);
1848 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
1849 return rc;
1850 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1851 if ( mode_64bit() && (op_bytes == 4) )
1852 op_bytes = 8;
1853 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1854 reg.sel, op_bytes, ctxt)) != 0 )
1855 goto done;
1856 break;
1859 case 0x07: /* pop %%es */
1860 src.val = x86_seg_es;
1861 pop_seg:
1862 fail_if(ops->write_segment == NULL);
1863 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1864 if ( mode_64bit() && (op_bytes == 4) )
1865 op_bytes = 8;
1866 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
1867 &dst.val, op_bytes, ctxt)) != 0 )
1868 goto done;
1869 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
1870 return rc;
1871 break;
1873 case 0x0e: /* push %%cs */
1874 src.val = x86_seg_cs;
1875 goto push_seg;
1877 case 0x16: /* push %%ss */
1878 src.val = x86_seg_ss;
1879 goto push_seg;
1881 case 0x17: /* pop %%ss */
1882 src.val = x86_seg_ss;
1883 goto pop_seg;
1885 case 0x1e: /* push %%ds */
1886 src.val = x86_seg_ds;
1887 goto push_seg;
1889 case 0x1f: /* pop %%ds */
1890 src.val = x86_seg_ds;
1891 goto pop_seg;
1893 case 0x27: /* daa */ {
1894 uint8_t al = _regs.eax;
1895 unsigned long eflags = _regs.eflags;
1896 generate_exception_if(mode_64bit(), EXC_UD);
1897 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1898 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1900 *(uint8_t *)&_regs.eax += 6;
1901 _regs.eflags |= EFLG_AF;
1903 if ( (al > 0x99) || (eflags & EFLG_CF) )
1905 *(uint8_t *)&_regs.eax += 0x60;
1906 _regs.eflags |= EFLG_CF;
1908 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1909 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1910 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1911 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1912 break;
1915 case 0x2f: /* das */ {
1916 uint8_t al = _regs.eax;
1917 unsigned long eflags = _regs.eflags;
1918 generate_exception_if(mode_64bit(), EXC_UD);
1919 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1920 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1922 _regs.eflags |= EFLG_AF;
1923 if ( (al < 6) || (eflags & EFLG_CF) )
1924 _regs.eflags |= EFLG_CF;
1925 *(uint8_t *)&_regs.eax -= 6;
1927 if ( (al > 0x99) || (eflags & EFLG_CF) )
1929 *(uint8_t *)&_regs.eax -= 0x60;
1930 _regs.eflags |= EFLG_CF;
1932 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1933 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1934 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1935 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1936 break;
1939 case 0x37: /* aaa */
1940 case 0x3f: /* aas */
1941 generate_exception_if(mode_64bit(), EXC_UD);
1942 _regs.eflags &= ~EFLG_CF;
1943 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1945 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1946 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1947 _regs.eflags |= EFLG_CF | EFLG_AF;
1949 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1950 break;
1952 case 0x40 ... 0x4f: /* inc/dec reg */
1953 dst.type = OP_REG;
1954 dst.reg = decode_register(b & 7, &_regs, 0);
1955 dst.bytes = op_bytes;
1956 dst.val = *dst.reg;
1957 if ( b & 8 )
1958 emulate_1op("dec", dst, _regs.eflags);
1959 else
1960 emulate_1op("inc", dst, _regs.eflags);
1961 break;
1963 case 0x50 ... 0x57: /* push reg */
1964 src.val = *(unsigned long *)decode_register(
1965 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1966 goto push;
1968 case 0x58 ... 0x5f: /* pop reg */
1969 dst.type = OP_REG;
1970 dst.reg = decode_register(
1971 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1972 dst.bytes = op_bytes;
1973 if ( mode_64bit() && (dst.bytes == 4) )
1974 dst.bytes = 8;
1975 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1976 &dst.val, dst.bytes, ctxt)) != 0 )
1977 goto done;
1978 break;
1980 case 0x60: /* pusha */ {
1981 int i;
1982 unsigned long regs[] = {
1983 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1984 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1985 generate_exception_if(mode_64bit(), EXC_UD);
1986 for ( i = 0; i < 8; i++ )
1987 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1988 regs[i], op_bytes, ctxt)) != 0 )
1989 goto done;
1990 break;
1993 case 0x61: /* popa */ {
1994 int i;
1995 unsigned long dummy_esp, *regs[] = {
1996 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
1997 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
1998 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
1999 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
2000 generate_exception_if(mode_64bit(), EXC_UD);
2001 for ( i = 0; i < 8; i++ )
2002 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2003 regs[i], op_bytes, ctxt)) != 0 )
2004 goto done;
2005 break;
2008 case 0x68: /* push imm{16,32,64} */
2009 src.val = ((op_bytes == 2)
2010 ? (int32_t)insn_fetch_type(int16_t)
2011 : insn_fetch_type(int32_t));
2012 goto push;
2014 case 0x6a: /* push imm8 */
2015 src.val = insn_fetch_type(int8_t);
2016 push:
2017 d |= Mov; /* force writeback */
2018 dst.type = OP_MEM;
2019 dst.bytes = op_bytes;
2020 if ( mode_64bit() && (dst.bytes == 4) )
2021 dst.bytes = 8;
2022 dst.val = src.val;
2023 dst.mem.seg = x86_seg_ss;
2024 dst.mem.off = sp_pre_dec(dst.bytes);
2025 break;
2027 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */
2028 handle_rep_prefix();
2029 generate_exception_if(!mode_iopl(), EXC_GP);
2030 dst.type = OP_MEM;
2031 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2032 dst.mem.seg = x86_seg_es;
2033 dst.mem.off = truncate_ea(_regs.edi);
2034 fail_if(ops->read_io == NULL);
2035 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
2036 &dst.val, ctxt)) != 0 )
2037 goto done;
2038 register_address_increment(
2039 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2040 break;
2042 case 0x6e ... 0x6f: /* outs %esi,%dx */
2043 handle_rep_prefix();
2044 generate_exception_if(!mode_iopl(), EXC_GP);
2045 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2046 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2047 &dst.val, dst.bytes, ctxt)) != 0 )
2048 goto done;
2049 fail_if(ops->write_io == NULL);
2050 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
2051 dst.val, ctxt)) != 0 )
2052 goto done;
2053 register_address_increment(
2054 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2055 break;
2057 case 0x70 ... 0x7f: /* jcc (short) */ {
2058 int rel = insn_fetch_type(int8_t);
2059 if ( test_cc(b, _regs.eflags) )
2060 jmp_rel(rel);
2061 break;
2064 case 0x90: /* nop / xchg %%r8,%%rax */
2065 if ( !(rex_prefix & 1) )
2066 break; /* nop */
2068 case 0x91 ... 0x97: /* xchg reg,%%rax */
2069 src.type = dst.type = OP_REG;
2070 src.bytes = dst.bytes = op_bytes;
2071 src.reg = (unsigned long *)&_regs.eax;
2072 src.val = *src.reg;
2073 dst.reg = decode_register(
2074 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2075 dst.val = *dst.reg;
2076 goto xchg;
2078 case 0x98: /* cbw/cwde/cdqe */
2079 switch ( op_bytes )
2081 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2082 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2083 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2085 break;
2087 case 0x99: /* cwd/cdq/cqo */
2088 switch ( op_bytes )
2090 case 2:
2091 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2092 break;
2093 case 4:
2094 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2095 break;
2096 case 8:
2097 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2098 break;
2100 break;
2102 case 0x9a: /* call (far, absolute) */ {
2103 struct segment_register reg;
2104 uint16_t sel;
2105 uint32_t eip;
2107 fail_if(ops->read_segment == NULL);
2108 generate_exception_if(mode_64bit(), EXC_UD);
2110 eip = insn_fetch_bytes(op_bytes);
2111 sel = insn_fetch_type(uint16_t);
2113 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2114 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2115 reg.sel, op_bytes, ctxt)) ||
2116 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2117 _regs.eip, op_bytes, ctxt)) )
2118 goto done;
2120 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2121 goto done;
2122 _regs.eip = eip;
2123 break;
2126 case 0x9c: /* pushf */
2127 src.val = _regs.eflags;
2128 goto push;
2130 case 0x9d: /* popf */ {
2131 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2132 if ( !mode_iopl() )
2133 mask |= EFLG_IOPL;
2134 fail_if(ops->write_rflags == NULL);
2135 /* 64-bit mode: POP defaults to a 64-bit operand. */
2136 if ( mode_64bit() && (op_bytes == 4) )
2137 op_bytes = 8;
2138 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2139 &dst.val, op_bytes, ctxt)) != 0 )
2140 goto done;
2141 if ( op_bytes == 2 )
2142 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2143 dst.val &= 0x257fd5;
2144 _regs.eflags &= mask;
2145 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2146 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2147 goto done;
2148 break;
2151 case 0x9e: /* sahf */
2152 *(uint8_t *)_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2153 break;
2155 case 0x9f: /* lahf */
2156 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2157 break;
2159 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2160 /* Source EA is not encoded via ModRM. */
2161 dst.type = OP_REG;
2162 dst.reg = (unsigned long *)&_regs.eax;
2163 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2164 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2165 &dst.val, dst.bytes, ctxt)) != 0 )
2166 goto done;
2167 break;
2169 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2170 /* Destination EA is not encoded via ModRM. */
2171 dst.type = OP_MEM;
2172 dst.mem.seg = ea.mem.seg;
2173 dst.mem.off = insn_fetch_bytes(ad_bytes);
2174 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2175 dst.val = (unsigned long)_regs.eax;
2176 break;
2178 case 0xa4 ... 0xa5: /* movs */
2179 handle_rep_prefix();
2180 dst.type = OP_MEM;
2181 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2182 dst.mem.seg = x86_seg_es;
2183 dst.mem.off = truncate_ea(_regs.edi);
2184 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2185 &dst.val, dst.bytes, ctxt)) != 0 )
2186 goto done;
2187 register_address_increment(
2188 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2189 register_address_increment(
2190 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2191 break;
2193 case 0xa6 ... 0xa7: /* cmps */ {
2194 unsigned long next_eip = _regs.eip;
2195 handle_rep_prefix();
2196 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2197 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2198 &dst.val, dst.bytes, ctxt)) ||
2199 (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2200 &src.val, src.bytes, ctxt)) )
2201 goto done;
2202 register_address_increment(
2203 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2204 register_address_increment(
2205 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2206 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2207 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2208 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2209 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2210 _regs.eip = next_eip;
2211 break;
2214 case 0xaa ... 0xab: /* stos */
2215 handle_rep_prefix();
2216 dst.type = OP_MEM;
2217 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2218 dst.mem.seg = x86_seg_es;
2219 dst.mem.off = truncate_ea(_regs.edi);
2220 dst.val = _regs.eax;
2221 register_address_increment(
2222 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2223 break;
2225 case 0xac ... 0xad: /* lods */
2226 handle_rep_prefix();
2227 dst.type = OP_REG;
2228 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2229 dst.reg = (unsigned long *)&_regs.eax;
2230 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2231 &dst.val, dst.bytes, ctxt)) != 0 )
2232 goto done;
2233 register_address_increment(
2234 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2235 break;
2237 case 0xae ... 0xaf: /* scas */ {
2238 unsigned long next_eip = _regs.eip;
2239 handle_rep_prefix();
2240 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2241 dst.val = _regs.eax;
2242 if ( (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2243 &src.val, src.bytes, ctxt)) != 0 )
2244 goto done;
2245 register_address_increment(
2246 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2247 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2248 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2249 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2250 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2251 _regs.eip = next_eip;
2252 break;
2255 case 0xc2: /* ret imm16 (near) */
2256 case 0xc3: /* ret (near) */ {
2257 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2258 op_bytes = mode_64bit() ? 8 : op_bytes;
2259 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2260 &dst.val, op_bytes, ctxt)) != 0 )
2261 goto done;
2262 _regs.eip = dst.val;
2263 break;
2266 case 0xca: /* ret imm16 (far) */
2267 case 0xcb: /* ret (far) */ {
2268 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2269 op_bytes = mode_64bit() ? 8 : op_bytes;
2270 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2271 &dst.val, op_bytes, ctxt)) ||
2272 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2273 &src.val, op_bytes, ctxt)) ||
2274 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2275 goto done;
2276 _regs.eip = dst.val;
2277 break;
2280 case 0xcc: /* int3 */
2281 src.val = EXC_BP;
2282 goto swint;
2284 case 0xcd: /* int imm8 */
2285 src.val = insn_fetch_type(uint8_t);
2286 swint:
2287 fail_if(ops->inject_sw_interrupt == NULL);
2288 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2289 ctxt) ? : X86EMUL_EXCEPTION;
2290 goto done;
2292 case 0xce: /* into */
2293 generate_exception_if(mode_64bit(), EXC_UD);
2294 if ( !(_regs.eflags & EFLG_OF) )
2295 break;
2296 src.val = EXC_OF;
2297 goto swint;
2299 case 0xcf: /* iret */ {
2300 unsigned long cs, eip, eflags;
2301 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2302 if ( !mode_iopl() )
2303 mask |= EFLG_IOPL;
2304 fail_if(!in_realmode(ctxt, ops));
2305 fail_if(ops->write_rflags == NULL);
2306 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2307 &eip, op_bytes, ctxt)) ||
2308 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2309 &cs, op_bytes, ctxt)) ||
2310 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2311 &eflags, op_bytes, ctxt)) )
2312 goto done;
2313 if ( op_bytes == 2 )
2314 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2315 eflags &= 0x257fd5;
2316 _regs.eflags &= mask;
2317 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2318 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2319 goto done;
2320 _regs.eip = eip;
2321 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2322 goto done;
2323 break;
2326 case 0xd4: /* aam */ {
2327 unsigned int base = insn_fetch_type(uint8_t);
2328 uint8_t al = _regs.eax;
2329 generate_exception_if(mode_64bit(), EXC_UD);
2330 generate_exception_if(base == 0, EXC_DE);
2331 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2332 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2333 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2334 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2335 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2336 break;
2339 case 0xd5: /* aad */ {
2340 unsigned int base = insn_fetch_type(uint8_t);
2341 uint16_t ax = _regs.eax;
2342 generate_exception_if(mode_64bit(), EXC_UD);
2343 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2344 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2345 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2346 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2347 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2348 break;
2351 case 0xd6: /* salc */
2352 generate_exception_if(mode_64bit(), EXC_UD);
2353 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2354 break;
2356 case 0xd7: /* xlat */ {
2357 unsigned long al = (uint8_t)_regs.eax;
2358 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
2359 &al, 1, ctxt)) != 0 )
2360 goto done;
2361 *(uint8_t *)&_regs.eax = al;
2362 break;
2365 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2366 int rel = insn_fetch_type(int8_t);
2367 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2368 if ( b == 0xe1 )
2369 do_jmp = !do_jmp; /* loopz */
2370 else if ( b == 0xe2 )
2371 do_jmp = 1; /* loop */
2372 switch ( ad_bytes )
2374 case 2:
2375 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2376 break;
2377 case 4:
2378 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
2379 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
2380 break;
2381 default: /* case 8: */
2382 do_jmp &= --_regs.ecx != 0;
2383 break;
2385 if ( do_jmp )
2386 jmp_rel(rel);
2387 break;
2390 case 0xe3: /* jcxz/jecxz (short) */ {
2391 int rel = insn_fetch_type(int8_t);
2392 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2393 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2394 jmp_rel(rel);
2395 break;
2398 case 0xe4: /* in imm8,%al */
2399 case 0xe5: /* in imm8,%eax */
2400 case 0xe6: /* out %al,imm8 */
2401 case 0xe7: /* out %eax,imm8 */
2402 case 0xec: /* in %dx,%al */
2403 case 0xed: /* in %dx,%eax */
2404 case 0xee: /* out %al,%dx */
2405 case 0xef: /* out %eax,%dx */ {
2406 unsigned int port = ((b < 0xe8)
2407 ? insn_fetch_type(uint8_t)
2408 : (uint16_t)_regs.edx);
2409 generate_exception_if(!mode_iopl(), EXC_GP);
2410 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2411 if ( b & 2 )
2413 /* out */
2414 fail_if(ops->write_io == NULL);
2415 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2418 else
2420 /* in */
2421 dst.type = OP_REG;
2422 dst.bytes = op_bytes;
2423 dst.reg = (unsigned long *)&_regs.eax;
2424 fail_if(ops->read_io == NULL);
2425 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2427 if ( rc != 0 )
2428 goto done;
2429 break;
2432 case 0xe8: /* call (near) */ {
2433 int rel = (((op_bytes == 2) && !mode_64bit())
2434 ? (int32_t)insn_fetch_type(int16_t)
2435 : insn_fetch_type(int32_t));
2436 op_bytes = mode_64bit() ? 8 : op_bytes;
2437 src.val = _regs.eip;
2438 jmp_rel(rel);
2439 goto push;
2442 case 0xe9: /* jmp (near) */ {
2443 int rel = (((op_bytes == 2) && !mode_64bit())
2444 ? (int32_t)insn_fetch_type(int16_t)
2445 : insn_fetch_type(int32_t));
2446 jmp_rel(rel);
2447 break;
2450 case 0xea: /* jmp (far, absolute) */ {
2451 uint16_t sel;
2452 uint32_t eip;
2453 generate_exception_if(mode_64bit(), EXC_UD);
2454 eip = insn_fetch_bytes(op_bytes);
2455 sel = insn_fetch_type(uint16_t);
2456 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2457 goto done;
2458 _regs.eip = eip;
2459 break;
2462 case 0xeb: /* jmp (short) */
2463 jmp_rel(insn_fetch_type(int8_t));
2464 break;
2466 case 0xf1: /* int1 (icebp) */
2467 src.val = EXC_DB;
2468 goto swint;
2470 case 0xf4: /* hlt */
2471 fail_if(ops->hlt == NULL);
2472 if ( (rc = ops->hlt(ctxt)) != 0 )
2473 goto done;
2474 break;
2476 case 0xf5: /* cmc */
2477 _regs.eflags ^= EFLG_CF;
2478 break;
2480 case 0xf8: /* clc */
2481 _regs.eflags &= ~EFLG_CF;
2482 break;
2484 case 0xf9: /* stc */
2485 _regs.eflags |= EFLG_CF;
2486 break;
2488 case 0xfa: /* cli */
2489 case 0xfb: /* sti */
2490 generate_exception_if(!mode_iopl(), EXC_GP);
2491 fail_if(ops->write_rflags == NULL);
2492 _regs.eflags &= ~EFLG_IF;
2493 if ( b == 0xfb ) /* sti */
2494 _regs.eflags |= EFLG_IF;
2495 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2496 goto done;
2497 break;
2499 case 0xfc: /* cld */
2500 _regs.eflags &= ~EFLG_DF;
2501 break;
2503 case 0xfd: /* std */
2504 _regs.eflags |= EFLG_DF;
2505 break;
2507 goto writeback;
2509 twobyte_insn:
2510 switch ( b )
2512 case 0x40 ... 0x4f: /* cmovcc */
2513 dst.val = src.val;
2514 if ( !test_cc(b, _regs.eflags) )
2515 dst.type = OP_NONE;
2516 break;
2518 case 0x90 ... 0x9f: /* setcc */
2519 dst.val = test_cc(b, _regs.eflags);
2520 break;
2522 case 0xb0 ... 0xb1: /* cmpxchg */
2523 /* Save real source value, then compare EAX against destination. */
2524 src.orig_val = src.val;
2525 src.val = _regs.eax;
2526 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2527 /* Always write back. The question is: where to? */
2528 d |= Mov;
2529 if ( _regs.eflags & EFLG_ZF )
2531 /* Success: write back to memory. */
2532 dst.val = src.orig_val;
2534 else
2536 /* Failure: write the value we saw to EAX. */
2537 dst.type = OP_REG;
2538 dst.reg = (unsigned long *)&_regs.eax;
2540 break;
2542 case 0xa3: bt: /* bt */
2543 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2544 break;
2546 case 0xb3: btr: /* btr */
2547 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
2548 break;
2550 case 0xab: bts: /* bts */
2551 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
2552 break;
2554 case 0xaf: /* imul */
2555 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
2556 switch ( dst.bytes )
2558 case 2:
2559 dst.val = ((uint32_t)(int16_t)src.val *
2560 (uint32_t)(int16_t)dst.val);
2561 if ( (int16_t)dst.val != (uint32_t)dst.val )
2562 _regs.eflags |= EFLG_OF|EFLG_CF;
2563 break;
2564 #ifdef __x86_64__
2565 case 4:
2566 dst.val = ((uint64_t)(int32_t)src.val *
2567 (uint64_t)(int32_t)dst.val);
2568 if ( (int32_t)dst.val != dst.val )
2569 _regs.eflags |= EFLG_OF|EFLG_CF;
2570 break;
2571 #endif
2572 default: {
2573 unsigned long m[2] = { src.val, dst.val };
2574 if ( imul_dbl(m) )
2575 _regs.eflags |= EFLG_OF|EFLG_CF;
2576 dst.val = m[0];
2577 break;
2580 break;
2582 case 0xb2: /* lss */
2583 dst.val = x86_seg_ss;
2584 goto les;
2586 case 0xb4: /* lfs */
2587 dst.val = x86_seg_fs;
2588 goto les;
2590 case 0xb5: /* lgs */
2591 dst.val = x86_seg_gs;
2592 goto les;
2594 case 0xb6: /* movzx rm8,r{16,32,64} */
2595 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2596 dst.reg = decode_register(modrm_reg, &_regs, 0);
2597 dst.bytes = op_bytes;
2598 dst.val = (uint8_t)src.val;
2599 break;
2601 case 0xbc: /* bsf */ {
2602 int zf;
2603 asm ( "bsf %2,%0; setz %b1"
2604 : "=r" (dst.val), "=q" (zf)
2605 : "r" (src.val), "1" (0) );
2606 _regs.eflags &= ~EFLG_ZF;
2607 _regs.eflags |= zf ? EFLG_ZF : 0;
2608 break;
2611 case 0xbd: /* bsr */ {
2612 int zf;
2613 asm ( "bsr %2,%0; setz %b1"
2614 : "=r" (dst.val), "=q" (zf)
2615 : "r" (src.val), "1" (0) );
2616 _regs.eflags &= ~EFLG_ZF;
2617 _regs.eflags |= zf ? EFLG_ZF : 0;
2618 break;
2621 case 0xb7: /* movzx rm16,r{16,32,64} */
2622 dst.val = (uint16_t)src.val;
2623 break;
2625 case 0xbb: btc: /* btc */
2626 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
2627 break;
2629 case 0xba: /* Grp8 */
2630 switch ( modrm_reg & 7 )
2632 case 4: goto bt;
2633 case 5: goto bts;
2634 case 6: goto btr;
2635 case 7: goto btc;
2636 default: generate_exception_if(1, EXC_UD);
2638 break;
2640 case 0xbe: /* movsx rm8,r{16,32,64} */
2641 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2642 dst.reg = decode_register(modrm_reg, &_regs, 0);
2643 dst.bytes = op_bytes;
2644 dst.val = (int8_t)src.val;
2645 break;
2647 case 0xbf: /* movsx rm16,r{16,32,64} */
2648 dst.val = (int16_t)src.val;
2649 break;
2651 case 0xc0 ... 0xc1: /* xadd */
2652 /* Write back the register source. */
2653 switch ( dst.bytes )
2655 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2656 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2657 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2658 case 8: *src.reg = dst.val; break;
2660 goto add;
2662 goto writeback;
2664 twobyte_special_insn:
2665 switch ( b )
2667 case 0x01: /* Grp7 */ {
2668 struct segment_register reg;
2670 switch ( modrm_reg & 7 )
2672 case 0: /* sgdt */
2673 case 1: /* sidt */
2674 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2675 fail_if(ops->read_segment == NULL);
2676 if ( (rc = ops->read_segment((modrm_reg & 1) ?
2677 x86_seg_idtr : x86_seg_gdtr,
2678 &reg, ctxt)) )
2679 goto done;
2680 if ( op_bytes == 2 )
2681 reg.base &= 0xffffff;
2682 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
2683 reg.limit, 2, ctxt)) ||
2684 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
2685 reg.base, mode_64bit() ? 8 : 4, ctxt)) )
2686 goto done;
2687 break;
2688 case 2: /* lgdt */
2689 case 3: /* lidt */
2690 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2691 fail_if(ops->write_segment == NULL);
2692 memset(&reg, 0, sizeof(reg));
2693 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0,
2694 (unsigned long *)&reg.limit, 2, ctxt)) ||
2695 (rc = ops->read(ea.mem.seg, ea.mem.off+2,
2696 (unsigned long *)&reg.base,
2697 mode_64bit() ? 8 : 4, ctxt)) )
2698 goto done;
2699 if ( op_bytes == 2 )
2700 reg.base &= 0xffffff;
2701 if ( (rc = ops->write_segment((modrm_reg & 1) ?
2702 x86_seg_idtr : x86_seg_gdtr,
2703 &reg, ctxt)) )
2704 goto done;
2705 break;
2706 default:
2707 goto cannot_emulate;
2709 break;
2712 case 0x06: /* clts */
2713 generate_exception_if(!mode_ring0(), EXC_GP);
2714 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
2715 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
2716 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
2717 goto done;
2718 break;
2720 case 0x08: /* invd */
2721 case 0x09: /* wbinvd */
2722 generate_exception_if(!mode_ring0(), EXC_GP);
2723 fail_if(ops->wbinvd == NULL);
2724 if ( (rc = ops->wbinvd(ctxt)) != 0 )
2725 goto done;
2726 break;
2728 case 0x0d: /* GrpP (prefetch) */
2729 case 0x18: /* Grp16 (prefetch/nop) */
2730 case 0x19 ... 0x1f: /* nop (amd-defined) */
2731 break;
2733 case 0x20: /* mov cr,reg */
2734 case 0x21: /* mov dr,reg */
2735 case 0x22: /* mov reg,cr */
2736 case 0x23: /* mov reg,dr */
2737 generate_exception_if(!mode_ring0(), EXC_GP);
2738 modrm_rm |= (rex_prefix & 1) << 3;
2739 modrm_reg |= lock_prefix << 3;
2740 if ( b & 2 )
2742 /* Write to CR/DR. */
2743 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
2744 if ( !mode_64bit() )
2745 src.val = (uint32_t)src.val;
2746 rc = ((b & 1)
2747 ? (ops->write_dr
2748 ? ops->write_dr(modrm_reg, src.val, ctxt)
2749 : X86EMUL_UNHANDLEABLE)
2750 : (ops->write_cr
2751 ? ops->write_cr(modrm_reg, src.val, ctxt)
2752 : X86EMUL_UNHANDLEABLE));
2754 else
2756 /* Read from CR/DR. */
2757 dst.type = OP_REG;
2758 dst.bytes = mode_64bit() ? 8 : 4;
2759 dst.reg = decode_register(modrm_rm, &_regs, 0);
2760 rc = ((b & 1)
2761 ? (ops->read_dr
2762 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2763 : X86EMUL_UNHANDLEABLE)
2764 : (ops->read_cr
2765 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
2766 : X86EMUL_UNHANDLEABLE));
2768 if ( rc != 0 )
2769 goto done;
2770 break;
2772 case 0x30: /* wrmsr */ {
2773 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
2774 generate_exception_if(!mode_ring0(), EXC_GP);
2775 fail_if(ops->write_msr == NULL);
2776 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
2777 goto done;
2778 break;
2781 case 0x32: /* rdmsr */ {
2782 uint64_t val;
2783 generate_exception_if(!mode_ring0(), EXC_GP);
2784 fail_if(ops->read_msr == NULL);
2785 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
2786 goto done;
2787 _regs.edx = (uint32_t)(val >> 32);
2788 _regs.eax = (uint32_t)(val >> 0);
2789 break;
2792 case 0x80 ... 0x8f: /* jcc (near) */ {
2793 int rel = (((op_bytes == 2) && !mode_64bit())
2794 ? (int32_t)insn_fetch_type(int16_t)
2795 : insn_fetch_type(int32_t));
2796 if ( test_cc(b, _regs.eflags) )
2797 jmp_rel(rel);
2798 break;
2801 case 0xa0: /* push %%fs */
2802 src.val = x86_seg_fs;
2803 goto push_seg;
2805 case 0xa1: /* pop %%fs */
2806 src.val = x86_seg_fs;
2807 goto pop_seg;
2809 case 0xa2: /* cpuid */ {
2810 unsigned int eax = _regs.eax, ebx = _regs.ebx;
2811 unsigned int ecx = _regs.ecx, edx = _regs.edx;
2812 fail_if(ops->cpuid == NULL);
2813 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
2814 goto done;
2815 _regs.eax = eax; _regs.ebx = ebx;
2816 _regs.ecx = ecx; _regs.edx = edx;
2817 break;
2820 case 0xa8: /* push %%gs */
2821 src.val = x86_seg_gs;
2822 goto push_seg;
2824 case 0xa9: /* pop %%gs */
2825 src.val = x86_seg_gs;
2826 goto pop_seg;
2828 case 0xc7: /* Grp9 (cmpxchg8b) */
2829 #if defined(__i386__)
2831 unsigned long old_lo, old_hi;
2832 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2833 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
2834 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
2835 goto done;
2836 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
2838 _regs.eax = old_lo;
2839 _regs.edx = old_hi;
2840 _regs.eflags &= ~EFLG_ZF;
2842 else if ( ops->cmpxchg8b == NULL )
2844 rc = X86EMUL_UNHANDLEABLE;
2845 goto done;
2847 else
2849 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
2850 _regs.ebx, _regs.ecx, ctxt)) != 0 )
2851 goto done;
2852 _regs.eflags |= EFLG_ZF;
2854 break;
2856 #elif defined(__x86_64__)
2858 unsigned long old, new;
2859 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2860 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
2861 goto done;
2862 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
2863 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
2865 _regs.eax = (uint32_t)(old>>0);
2866 _regs.edx = (uint32_t)(old>>32);
2867 _regs.eflags &= ~EFLG_ZF;
2869 else
2871 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
2872 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
2873 new, 8, ctxt)) != 0 )
2874 goto done;
2875 _regs.eflags |= EFLG_ZF;
2877 break;
2879 #endif
2881 case 0xc8 ... 0xcf: /* bswap */
2882 dst.type = OP_REG;
2883 dst.reg = decode_register(
2884 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2885 switch ( dst.bytes = op_bytes )
2887 default: /* case 2: */
2888 /* Undefined behaviour. Writes zero on all tested CPUs. */
2889 dst.val = 0;
2890 break;
2891 case 4:
2892 #ifdef __x86_64__
2893 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
2894 break;
2895 case 8:
2896 #endif
2897 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
2898 break;
2900 break;
2902 goto writeback;
2904 cannot_emulate:
2905 #if 0
2906 gdprintk(XENLOG_DEBUG, "Instr:");
2907 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
2909 unsigned long x;
2910 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
2911 printk(" %02x", (uint8_t)x);
2913 printk("\n");
2914 #endif
2915 return X86EMUL_UNHANDLEABLE;