ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 14034:4719e34ed7a7

x86: Fix emulation of REP prefix.
Firstly, it should be ignored when used with any opcode for which it
is undefined. Secondly, the count register (rCX) width depends on
address size.
Signed-off-by: Keir Fraser <keir@xensource.com>
author kfraser@localhost.localdomain
date Tue Feb 20 16:57:50 2007 +0000 (2007-02-20)
parents fec1a6975588
children e7994a122aab
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <public/xen.h>
28 #else
29 #include <xen/config.h>
30 #include <xen/types.h>
31 #include <xen/lib.h>
32 #include <asm/regs.h>
33 #undef cmpxchg
34 #endif
35 #include <asm-x86/x86_emulate.h>
37 /* Operand sizes: 8-bit operands or specified/overridden size. */
38 #define ByteOp (1<<0) /* 8-bit operands. */
39 /* Destination operand type. */
40 #define DstBitBase (0<<1) /* Memory operand, bit string. */
41 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
42 #define DstReg (2<<1) /* Register operand. */
43 #define DstMem (3<<1) /* Memory operand. */
44 #define DstMask (3<<1)
45 /* Source operand type. */
46 #define SrcNone (0<<3) /* No source operand. */
47 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
48 #define SrcReg (1<<3) /* Register operand. */
49 #define SrcMem (2<<3) /* Memory operand. */
50 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
51 #define SrcImm (4<<3) /* Immediate operand. */
52 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
53 #define SrcMask (7<<3)
54 /* Generic ModRM decode. */
55 #define ModRM (1<<6)
56 /* Destination is only written; never read. */
57 #define Mov (1<<7)
59 static uint8_t opcode_table[256] = {
60 /* 0x00 - 0x07 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
64 /* 0x08 - 0x0F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
68 /* 0x10 - 0x17 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
72 /* 0x18 - 0x1F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
76 /* 0x20 - 0x27 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
80 /* 0x28 - 0x2F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
84 /* 0x30 - 0x37 */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
88 /* 0x38 - 0x3F */
89 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
90 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
91 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
92 /* 0x40 - 0x4F */
93 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 /* 0x50 - 0x5F */
98 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 /* 0x60 - 0x67 */
103 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
104 0, 0, 0, 0,
105 /* 0x68 - 0x6F */
106 ImplicitOps|Mov, DstMem|SrcImm|ModRM|Mov,
107 ImplicitOps|Mov, DstMem|SrcImmByte|ModRM|Mov,
108 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
109 /* 0x70 - 0x77 */
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 /* 0x78 - 0x7F */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x80 - 0x87 */
116 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
118 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 /* 0x88 - 0x8F */
121 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
122 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
123 0, DstReg|SrcNone|ModRM, 0, DstMem|SrcNone|ModRM|Mov,
124 /* 0x90 - 0x97 */
125 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
126 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 /* 0x98 - 0x9F */
128 ImplicitOps, ImplicitOps, 0, 0, 0, 0, ImplicitOps, ImplicitOps,
129 /* 0xA0 - 0xA7 */
130 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
131 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
132 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
133 /* 0xA8 - 0xAF */
134 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
135 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
136 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
137 /* 0xB0 - 0xB7 */
138 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
139 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
140 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
141 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
142 /* 0xB8 - 0xBF */
143 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
144 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
145 /* 0xC0 - 0xC7 */
146 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
147 ImplicitOps, ImplicitOps,
148 0, 0, ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
149 /* 0xC8 - 0xCF */
150 0, 0, 0, 0, 0, 0, 0, 0,
151 /* 0xD0 - 0xD7 */
152 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
153 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
154 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
155 /* 0xD8 - 0xDF */
156 0, 0, 0, 0, 0, 0, 0, 0,
157 /* 0xE0 - 0xE7 */
158 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
159 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
160 /* 0xE8 - 0xEF */
161 ImplicitOps, ImplicitOps, 0, ImplicitOps,
162 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
163 /* 0xF0 - 0xF7 */
164 0, 0, 0, 0,
165 0, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
166 /* 0xF8 - 0xFF */
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
169 };
171 static uint8_t twobyte_table[256] = {
172 /* 0x00 - 0x07 */
173 0, 0, 0, 0, 0, ImplicitOps, 0, 0,
174 /* 0x08 - 0x0F */
175 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
176 /* 0x10 - 0x17 */
177 0, 0, 0, 0, 0, 0, 0, 0,
178 /* 0x18 - 0x1F */
179 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
180 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
181 /* 0x20 - 0x27 */
182 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
183 0, 0, 0, 0,
184 /* 0x28 - 0x2F */
185 0, 0, 0, 0, 0, 0, 0, 0,
186 /* 0x30 - 0x37 */
187 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0,
188 /* 0x38 - 0x3F */
189 0, 0, 0, 0, 0, 0, 0, 0,
190 /* 0x40 - 0x47 */
191 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
192 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
193 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
194 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
195 /* 0x48 - 0x4F */
196 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
197 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 /* 0x50 - 0x5F */
201 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 /* 0x60 - 0x6F */
203 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
204 /* 0x70 - 0x7F */
205 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
206 /* 0x80 - 0x87 */
207 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
208 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
209 /* 0x88 - 0x8F */
210 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
211 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
212 /* 0x90 - 0x97 */
213 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
214 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
215 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
216 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
217 /* 0x98 - 0x9F */
218 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
219 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 /* 0xA0 - 0xA7 */
223 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, 0,
224 /* 0xA8 - 0xAF */
225 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, DstReg|SrcMem|ModRM,
226 /* 0xB0 - 0xB7 */
227 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
228 0, DstBitBase|SrcReg|ModRM,
229 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
230 /* 0xB8 - 0xBF */
231 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
232 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
233 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
234 /* 0xC0 - 0xC7 */
235 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
236 0, 0, 0, ImplicitOps|ModRM,
237 /* 0xC8 - 0xCF */
238 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
239 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
240 /* 0xD0 - 0xDF */
241 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
242 /* 0xE0 - 0xEF */
243 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
244 /* 0xF0 - 0xFF */
245 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
246 };
248 /* Type, address-of, and value of an instruction's operand. */
249 struct operand {
250 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
251 unsigned int bytes;
252 unsigned long val, orig_val;
253 union {
254 /* OP_REG: Pointer to register field. */
255 unsigned long *reg;
256 /* OP_MEM: Segment and offset. */
257 struct {
258 enum x86_segment seg;
259 unsigned long off;
260 } mem;
261 };
262 };
264 /* EFLAGS bit definitions. */
265 #define EFLG_OF (1<<11)
266 #define EFLG_DF (1<<10)
267 #define EFLG_IF (1<<9)
268 #define EFLG_SF (1<<7)
269 #define EFLG_ZF (1<<6)
270 #define EFLG_AF (1<<4)
271 #define EFLG_PF (1<<2)
272 #define EFLG_CF (1<<0)
274 /* Exception definitions. */
275 #define EXC_DE 0
276 #define EXC_BR 5
277 #define EXC_UD 6
278 #define EXC_GP 13
280 /*
281 * Instruction emulation:
282 * Most instructions are emulated directly via a fragment of inline assembly
283 * code. This allows us to save/restore EFLAGS and thus very easily pick up
284 * any modified flags.
285 */
287 #if defined(__x86_64__)
288 #define _LO32 "k" /* force 32-bit operand */
289 #define _STK "%%rsp" /* stack pointer */
290 #elif defined(__i386__)
291 #define _LO32 "" /* force 32-bit operand */
292 #define _STK "%%esp" /* stack pointer */
293 #endif
295 /*
296 * These EFLAGS bits are restored from saved value during emulation, and
297 * any changes are written back to the saved value after emulation.
298 */
299 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
301 /* Before executing instruction: restore necessary bits in EFLAGS. */
302 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
303 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */\
304 "push %"_sav"; " \
305 "movl %"_msk",%"_LO32 _tmp"; " \
306 "andl %"_LO32 _tmp",("_STK"); " \
307 "pushf; " \
308 "notl %"_LO32 _tmp"; " \
309 "andl %"_LO32 _tmp",("_STK"); " \
310 "pop %"_tmp"; " \
311 "orl %"_LO32 _tmp",("_STK"); " \
312 "popf; " \
313 /* _sav &= ~msk; */ \
314 "movl %"_msk",%"_LO32 _tmp"; " \
315 "notl %"_LO32 _tmp"; " \
316 "andl %"_LO32 _tmp",%"_sav"; "
318 /* After executing instruction: write-back necessary bits in EFLAGS. */
319 #define _POST_EFLAGS(_sav, _msk, _tmp) \
320 /* _sav |= EFLAGS & _msk; */ \
321 "pushf; " \
322 "pop %"_tmp"; " \
323 "andl %"_msk",%"_LO32 _tmp"; " \
324 "orl %"_LO32 _tmp",%"_sav"; "
326 /* Raw emulation: instruction has two explicit operands. */
327 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
328 do{ unsigned long _tmp; \
329 switch ( (_dst).bytes ) \
330 { \
331 case 2: \
332 __asm__ __volatile__ ( \
333 _PRE_EFLAGS("0","4","2") \
334 _op"w %"_wx"3,%1; " \
335 _POST_EFLAGS("0","4","2") \
336 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
337 : _wy ((_src).val), "i" (EFLAGS_MASK), \
338 "m" (_eflags), "m" ((_dst).val) ); \
339 break; \
340 case 4: \
341 __asm__ __volatile__ ( \
342 _PRE_EFLAGS("0","4","2") \
343 _op"l %"_lx"3,%1; " \
344 _POST_EFLAGS("0","4","2") \
345 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
346 : _ly ((_src).val), "i" (EFLAGS_MASK), \
347 "m" (_eflags), "m" ((_dst).val) ); \
348 break; \
349 case 8: \
350 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
351 break; \
352 } \
353 } while (0)
354 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
355 do{ unsigned long _tmp; \
356 switch ( (_dst).bytes ) \
357 { \
358 case 1: \
359 __asm__ __volatile__ ( \
360 _PRE_EFLAGS("0","4","2") \
361 _op"b %"_bx"3,%1; " \
362 _POST_EFLAGS("0","4","2") \
363 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
364 : _by ((_src).val), "i" (EFLAGS_MASK), \
365 "m" (_eflags), "m" ((_dst).val) ); \
366 break; \
367 default: \
368 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
369 break; \
370 } \
371 } while (0)
372 /* Source operand is byte-sized and may be restricted to just %cl. */
373 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
374 __emulate_2op(_op, _src, _dst, _eflags, \
375 "b", "c", "b", "c", "b", "c", "b", "c")
376 /* Source operand is byte, word, long or quad sized. */
377 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
378 __emulate_2op(_op, _src, _dst, _eflags, \
379 "b", "q", "w", "r", _LO32, "r", "", "r")
380 /* Source operand is word, long or quad sized. */
381 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
382 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
383 "w", "r", _LO32, "r", "", "r")
385 /* Instruction has only one explicit operand (no source operand). */
386 #define emulate_1op(_op,_dst,_eflags) \
387 do{ unsigned long _tmp; \
388 switch ( (_dst).bytes ) \
389 { \
390 case 1: \
391 __asm__ __volatile__ ( \
392 _PRE_EFLAGS("0","3","2") \
393 _op"b %1; " \
394 _POST_EFLAGS("0","3","2") \
395 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
396 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
397 break; \
398 case 2: \
399 __asm__ __volatile__ ( \
400 _PRE_EFLAGS("0","3","2") \
401 _op"w %1; " \
402 _POST_EFLAGS("0","3","2") \
403 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
404 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
405 break; \
406 case 4: \
407 __asm__ __volatile__ ( \
408 _PRE_EFLAGS("0","3","2") \
409 _op"l %1; " \
410 _POST_EFLAGS("0","3","2") \
411 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
412 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
413 break; \
414 case 8: \
415 __emulate_1op_8byte(_op, _dst, _eflags); \
416 break; \
417 } \
418 } while (0)
420 /* Emulate an instruction with quadword operands (x86/64 only). */
421 #if defined(__x86_64__)
422 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
423 do{ __asm__ __volatile__ ( \
424 _PRE_EFLAGS("0","4","2") \
425 _op"q %"_qx"3,%1; " \
426 _POST_EFLAGS("0","4","2") \
427 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
428 : _qy ((_src).val), "i" (EFLAGS_MASK), \
429 "m" (_eflags), "m" ((_dst).val) ); \
430 } while (0)
431 #define __emulate_1op_8byte(_op, _dst, _eflags) \
432 do{ __asm__ __volatile__ ( \
433 _PRE_EFLAGS("0","3","2") \
434 _op"q %1; " \
435 _POST_EFLAGS("0","3","2") \
436 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
437 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
438 } while (0)
439 #elif defined(__i386__)
440 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
441 #define __emulate_1op_8byte(_op, _dst, _eflags)
442 #endif /* __i386__ */
444 /* Fetch next part of the instruction being emulated. */
445 #define insn_fetch_bytes(_size) \
446 ({ unsigned long _x, _eip = _regs.eip; \
447 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
448 _regs.eip += (_size); /* real hardware doesn't truncate */ \
449 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
450 EXC_GP); \
451 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
452 if ( rc ) goto done; \
453 _x; \
454 })
455 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
457 #define _truncate_ea(ea, byte_width) \
458 ({ unsigned long __ea = (ea); \
459 unsigned int _width = (byte_width); \
460 ((_width == sizeof(unsigned long)) ? __ea : \
461 (__ea & ((1UL << (_width << 3)) - 1))); \
462 })
463 #define truncate_ea(ea) _truncate_ea((ea), ad_bytes)
465 #define mode_64bit() (def_ad_bytes == 8)
467 #define fail_if(p) \
468 do { \
469 rc = (p) ? X86EMUL_UNHANDLEABLE : 0; \
470 if ( rc ) goto done; \
471 } while (0)
473 /* In future we will be able to generate arbitrary exceptions. */
474 #define generate_exception_if(p, e) fail_if(p)
476 /* To be done... */
477 #define mode_ring0() (0)
478 #define mode_iopl() (0)
480 /* Given byte has even parity (even number of 1s)? */
481 static int even_parity(uint8_t v)
482 {
483 __asm__ ( "test %%al,%%al; setp %%al"
484 : "=a" (v) : "0" (v) );
485 return v;
486 }
488 /* Update address held in a register, based on addressing mode. */
489 #define _register_address_increment(reg, inc, byte_width) \
490 do { \
491 int _inc = (inc); /* signed type ensures sign extension to long */ \
492 unsigned int _width = (byte_width); \
493 if ( _width == sizeof(unsigned long) ) \
494 (reg) += _inc; \
495 else if ( mode_64bit() ) \
496 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
497 else \
498 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
499 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
500 } while (0)
501 #define register_address_increment(reg, inc) \
502 _register_address_increment((reg), (inc), ad_bytes)
504 #define sp_pre_dec(dec) ({ \
505 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
506 _truncate_ea(_regs.esp, ctxt->sp_size/8); \
507 })
508 #define sp_post_inc(inc) ({ \
509 unsigned long __esp = _truncate_ea(_regs.esp, ctxt->sp_size/8); \
510 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
511 __esp; \
512 })
514 #define jmp_rel(rel) \
515 do { \
516 _regs.eip += (int)(rel); \
517 if ( !mode_64bit() ) \
518 _regs.eip = ((op_bytes == 2) \
519 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
520 } while (0)
522 static int __handle_rep_prefix(
523 struct cpu_user_regs *int_regs,
524 struct cpu_user_regs *ext_regs,
525 int ad_bytes)
526 {
527 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
528 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
529 int_regs->ecx);
531 if ( ecx-- == 0 )
532 {
533 ext_regs->eip = int_regs->eip;
534 return 1;
535 }
537 if ( ad_bytes == 2 )
538 *(uint16_t *)&int_regs->ecx = ecx;
539 else if ( ad_bytes == 4 )
540 int_regs->ecx = (uint32_t)ecx;
541 else
542 int_regs->ecx = ecx;
543 int_regs->eip = ext_regs->eip;
544 return 0;
545 }
547 #define handle_rep_prefix() \
548 do { \
549 if ( rep_prefix && __handle_rep_prefix(&_regs, ctxt->regs, ad_bytes) ) \
550 goto done; \
551 } while (0)
553 /*
554 * Unsigned multiplication with double-word result.
555 * IN: Multiplicand=m[0], Multiplier=m[1]
556 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
557 */
558 static int mul_dbl(unsigned long m[2])
559 {
560 int rc;
561 asm ( "mul %4; seto %b2"
562 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
563 : "0" (m[0]), "1" (m[1]), "2" (0) );
564 return rc;
565 }
567 /*
568 * Signed multiplication with double-word result.
569 * IN: Multiplicand=m[0], Multiplier=m[1]
570 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
571 */
572 static int imul_dbl(unsigned long m[2])
573 {
574 int rc;
575 asm ( "imul %4; seto %b2"
576 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
577 : "0" (m[0]), "1" (m[1]), "2" (0) );
578 return rc;
579 }
581 /*
582 * Unsigned division of double-word dividend.
583 * IN: Dividend=u[1]:u[0], Divisor=v
584 * OUT: Return 1: #DE
585 * Return 0: Quotient=u[0], Remainder=u[1]
586 */
587 static int div_dbl(unsigned long u[2], unsigned long v)
588 {
589 if ( (v == 0) || (u[1] > v) || ((u[1] == v) && (u[0] != 0)) )
590 return 1;
591 asm ( "div %4"
592 : "=a" (u[0]), "=d" (u[1])
593 : "0" (u[0]), "1" (u[1]), "r" (v) );
594 return 0;
595 }
597 /*
598 * Signed division of double-word dividend.
599 * IN: Dividend=u[1]:u[0], Divisor=v
600 * OUT: Return 1: #DE
601 * Return 0: Quotient=u[0], Remainder=u[1]
602 * NB. We don't use idiv directly as it's moderately hard to work out
603 * ahead of time whether it will #DE, which we cannot allow to happen.
604 */
605 static int idiv_dbl(unsigned long u[2], unsigned long v)
606 {
607 int negu = (long)u[1] < 0, negv = (long)v < 0;
609 /* u = abs(u) */
610 if ( negu )
611 {
612 u[1] = ~u[1];
613 if ( (u[0] = -u[0]) == 0 )
614 u[1]++;
615 }
617 /* abs(u) / abs(v) */
618 if ( div_dbl(u, negv ? -v : v) )
619 return 1;
621 /* Remainder has same sign as dividend. It cannot overflow. */
622 if ( negu )
623 u[1] = -u[1];
625 /* Quotient is overflowed if sign bit is set. */
626 if ( negu ^ negv )
627 {
628 if ( (long)u[0] >= 0 )
629 u[0] = -u[0];
630 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
631 return 1;
632 }
633 else if ( (long)u[0] < 0 )
634 return 1;
636 return 0;
637 }
639 static int
640 test_cc(
641 unsigned int condition, unsigned int flags)
642 {
643 int rc = 0;
645 switch ( (condition & 15) >> 1 )
646 {
647 case 0: /* o */
648 rc |= (flags & EFLG_OF);
649 break;
650 case 1: /* b/c/nae */
651 rc |= (flags & EFLG_CF);
652 break;
653 case 2: /* z/e */
654 rc |= (flags & EFLG_ZF);
655 break;
656 case 3: /* be/na */
657 rc |= (flags & (EFLG_CF|EFLG_ZF));
658 break;
659 case 4: /* s */
660 rc |= (flags & EFLG_SF);
661 break;
662 case 5: /* p/pe */
663 rc |= (flags & EFLG_PF);
664 break;
665 case 7: /* le/ng */
666 rc |= (flags & EFLG_ZF);
667 /* fall through */
668 case 6: /* l/nge */
669 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
670 break;
671 }
673 /* Odd condition identifiers (lsb == 1) have inverted sense. */
674 return (!!rc ^ (condition & 1));
675 }
677 void *
678 decode_register(
679 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
680 {
681 void *p;
683 switch ( modrm_reg )
684 {
685 case 0: p = &regs->eax; break;
686 case 1: p = &regs->ecx; break;
687 case 2: p = &regs->edx; break;
688 case 3: p = &regs->ebx; break;
689 case 4: p = (highbyte_regs ?
690 ((unsigned char *)&regs->eax + 1) :
691 (unsigned char *)&regs->esp); break;
692 case 5: p = (highbyte_regs ?
693 ((unsigned char *)&regs->ecx + 1) :
694 (unsigned char *)&regs->ebp); break;
695 case 6: p = (highbyte_regs ?
696 ((unsigned char *)&regs->edx + 1) :
697 (unsigned char *)&regs->esi); break;
698 case 7: p = (highbyte_regs ?
699 ((unsigned char *)&regs->ebx + 1) :
700 (unsigned char *)&regs->edi); break;
701 #if defined(__x86_64__)
702 case 8: p = &regs->r8; break;
703 case 9: p = &regs->r9; break;
704 case 10: p = &regs->r10; break;
705 case 11: p = &regs->r11; break;
706 case 12: p = &regs->r12; break;
707 case 13: p = &regs->r13; break;
708 case 14: p = &regs->r14; break;
709 case 15: p = &regs->r15; break;
710 #endif
711 default: p = NULL; break;
712 }
714 return p;
715 }
717 int
718 x86_emulate(
719 struct x86_emulate_ctxt *ctxt,
720 struct x86_emulate_ops *ops)
721 {
722 /* Shadow copy of register state. Committed on successful emulation. */
723 struct cpu_user_regs _regs = *ctxt->regs;
725 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
726 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
727 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
728 unsigned int lock_prefix = 0, rep_prefix = 0;
729 int rc = 0;
730 struct operand src, dst;
732 /* Data operand effective address (usually computed from ModRM). */
733 struct operand ea;
735 /* Default is a memory operand relative to segment DS. */
736 ea.type = OP_MEM;
737 ea.mem.seg = x86_seg_ds;
738 ea.mem.off = 0;
740 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
741 if ( op_bytes == 8 )
742 {
743 op_bytes = def_op_bytes = 4;
744 #ifndef __x86_64__
745 return -1;
746 #endif
747 }
749 /* Prefix bytes. */
750 for ( ; ; )
751 {
752 switch ( b = insn_fetch_type(uint8_t) )
753 {
754 case 0x66: /* operand-size override */
755 op_bytes = def_op_bytes ^ 6;
756 break;
757 case 0x67: /* address-size override */
758 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
759 break;
760 case 0x2e: /* CS override */
761 ea.mem.seg = x86_seg_cs;
762 break;
763 case 0x3e: /* DS override */
764 ea.mem.seg = x86_seg_ds;
765 break;
766 case 0x26: /* ES override */
767 ea.mem.seg = x86_seg_es;
768 break;
769 case 0x64: /* FS override */
770 ea.mem.seg = x86_seg_fs;
771 break;
772 case 0x65: /* GS override */
773 ea.mem.seg = x86_seg_gs;
774 break;
775 case 0x36: /* SS override */
776 ea.mem.seg = x86_seg_ss;
777 break;
778 case 0xf0: /* LOCK */
779 lock_prefix = 1;
780 break;
781 case 0xf2: /* REPNE/REPNZ */
782 case 0xf3: /* REP/REPE/REPZ */
783 rep_prefix = 1;
784 break;
785 case 0x40 ... 0x4f: /* REX */
786 if ( !mode_64bit() )
787 goto done_prefixes;
788 rex_prefix = b;
789 continue;
790 default:
791 goto done_prefixes;
792 }
794 /* Any legacy prefix after a REX prefix nullifies its effect. */
795 rex_prefix = 0;
796 }
797 done_prefixes:
799 if ( rex_prefix & 8 ) /* REX.W */
800 op_bytes = 8;
802 /* Opcode byte(s). */
803 d = opcode_table[b];
804 if ( d == 0 )
805 {
806 /* Two-byte opcode? */
807 if ( b == 0x0f )
808 {
809 twobyte = 1;
810 b = insn_fetch_type(uint8_t);
811 d = twobyte_table[b];
812 }
814 /* Unrecognised? */
815 if ( d == 0 )
816 goto cannot_emulate;
817 }
819 /* Lock prefix is allowed only on RMW instructions. */
820 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
822 /* ModRM and SIB bytes. */
823 if ( d & ModRM )
824 {
825 modrm = insn_fetch_type(uint8_t);
826 modrm_mod = (modrm & 0xc0) >> 6;
827 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
828 modrm_rm = modrm & 0x07;
830 if ( modrm_mod == 3 )
831 {
832 modrm_rm |= (rex_prefix & 1) << 3;
833 ea.type = OP_REG;
834 ea.reg = decode_register(
835 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
836 }
837 else if ( ad_bytes == 2 )
838 {
839 /* 16-bit ModR/M decode. */
840 switch ( modrm_rm )
841 {
842 case 0: ea.mem.off = _regs.ebx + _regs.esi; break;
843 case 1: ea.mem.off = _regs.ebx + _regs.edi; break;
844 case 2: ea.mem.off = _regs.ebp + _regs.esi; break;
845 case 3: ea.mem.off = _regs.ebp + _regs.edi; break;
846 case 4: ea.mem.off = _regs.esi; break;
847 case 5: ea.mem.off = _regs.edi; break;
848 case 6: ea.mem.off = _regs.ebp; break;
849 case 7: ea.mem.off = _regs.ebx; break;
850 }
851 switch ( modrm_mod )
852 {
853 case 0:
854 if ( modrm_rm == 6 )
855 ea.mem.off = insn_fetch_type(int16_t);
856 break;
857 case 1:
858 ea.mem.off += insn_fetch_type(int8_t);
859 break;
860 case 2:
861 ea.mem.off += insn_fetch_type(int16_t);
862 break;
863 }
864 ea.mem.off = truncate_ea(ea.mem.off);
865 }
866 else
867 {
868 /* 32/64-bit ModR/M decode. */
869 if ( modrm_rm == 4 )
870 {
871 sib = insn_fetch_type(uint8_t);
872 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
873 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
874 if ( sib_index != 4 )
875 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
876 ea.mem.off <<= (sib >> 6) & 3;
877 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
878 ea.mem.off += insn_fetch_type(int32_t);
879 else if ( (sib_base == 4) && !twobyte && (b == 0x8f) )
880 /* POP <rm> must have its EA calculated post increment. */
881 ea.mem.off += _regs.esp +
882 ((mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes);
883 else
884 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
885 }
886 else
887 {
888 modrm_rm |= (rex_prefix & 1) << 3;
889 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
890 }
891 switch ( modrm_mod )
892 {
893 case 0:
894 if ( (modrm_rm & 7) != 5 )
895 break;
896 ea.mem.off = insn_fetch_type(int32_t);
897 if ( !mode_64bit() )
898 break;
899 /* Relative to RIP of next instruction. Argh! */
900 ea.mem.off += _regs.eip;
901 if ( (d & SrcMask) == SrcImm )
902 ea.mem.off += (d & ByteOp) ? 1 :
903 ((op_bytes == 8) ? 4 : op_bytes);
904 else if ( (d & SrcMask) == SrcImmByte )
905 ea.mem.off += 1;
906 else if ( ((b == 0xf6) || (b == 0xf7)) &&
907 ((modrm_reg & 7) <= 1) )
908 /* Special case in Grp3: test has immediate operand. */
909 ea.mem.off += (d & ByteOp) ? 1
910 : ((op_bytes == 8) ? 4 : op_bytes);
911 break;
912 case 1:
913 ea.mem.off += insn_fetch_type(int8_t);
914 break;
915 case 2:
916 ea.mem.off += insn_fetch_type(int32_t);
917 break;
918 }
919 ea.mem.off = truncate_ea(ea.mem.off);
920 }
921 }
923 /* Special instructions do their own operand decoding. */
924 if ( (d & DstMask) == ImplicitOps )
925 goto special_insn;
927 /* Decode and fetch the source operand: register, memory or immediate. */
928 switch ( d & SrcMask )
929 {
930 case SrcNone:
931 break;
932 case SrcReg:
933 src.type = OP_REG;
934 if ( d & ByteOp )
935 {
936 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
937 src.val = *(uint8_t *)src.reg;
938 src.bytes = 1;
939 }
940 else
941 {
942 src.reg = decode_register(modrm_reg, &_regs, 0);
943 switch ( (src.bytes = op_bytes) )
944 {
945 case 2: src.val = *(uint16_t *)src.reg; break;
946 case 4: src.val = *(uint32_t *)src.reg; break;
947 case 8: src.val = *(uint64_t *)src.reg; break;
948 }
949 }
950 break;
951 case SrcMem16:
952 ea.bytes = 2;
953 goto srcmem_common;
954 case SrcMem:
955 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
956 srcmem_common:
957 src = ea;
958 if ( src.type == OP_REG )
959 {
960 switch ( src.bytes )
961 {
962 case 1: src.val = *(uint8_t *)src.reg; break;
963 case 2: src.val = *(uint16_t *)src.reg; break;
964 case 4: src.val = *(uint32_t *)src.reg; break;
965 case 8: src.val = *(uint64_t *)src.reg; break;
966 }
967 }
968 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
969 &src.val, src.bytes, ctxt)) )
970 goto done;
971 break;
972 case SrcImm:
973 src.type = OP_IMM;
974 src.bytes = (d & ByteOp) ? 1 : op_bytes;
975 if ( src.bytes == 8 ) src.bytes = 4;
976 /* NB. Immediates are sign-extended as necessary. */
977 switch ( src.bytes )
978 {
979 case 1: src.val = insn_fetch_type(int8_t); break;
980 case 2: src.val = insn_fetch_type(int16_t); break;
981 case 4: src.val = insn_fetch_type(int32_t); break;
982 }
983 break;
984 case SrcImmByte:
985 src.type = OP_IMM;
986 src.bytes = 1;
987 src.val = insn_fetch_type(int8_t);
988 break;
989 }
991 /* Decode and fetch the destination operand: register or memory. */
992 switch ( d & DstMask )
993 {
994 case DstReg:
995 dst.type = OP_REG;
996 if ( d & ByteOp )
997 {
998 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
999 dst.val = *(uint8_t *)dst.reg;
1000 dst.bytes = 1;
1002 else
1004 dst.reg = decode_register(modrm_reg, &_regs, 0);
1005 switch ( (dst.bytes = op_bytes) )
1007 case 2: dst.val = *(uint16_t *)dst.reg; break;
1008 case 4: dst.val = *(uint32_t *)dst.reg; break;
1009 case 8: dst.val = *(uint64_t *)dst.reg; break;
1012 break;
1013 case DstBitBase:
1014 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1016 src.val &= (op_bytes << 3) - 1;
1018 else
1020 /*
1021 * EA += BitOffset DIV op_bytes*8
1022 * BitOffset = BitOffset MOD op_bytes*8
1023 * DIV truncates towards negative infinity.
1024 * MOD always produces a positive result.
1025 */
1026 if ( op_bytes == 2 )
1027 src.val = (int16_t)src.val;
1028 else if ( op_bytes == 4 )
1029 src.val = (int32_t)src.val;
1030 if ( (long)src.val < 0 )
1032 unsigned long byte_offset;
1033 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1034 ea.mem.off -= byte_offset;
1035 src.val = (byte_offset << 3) + src.val;
1037 else
1039 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1040 src.val &= (op_bytes << 3) - 1;
1043 /* Becomes a normal DstMem operation from here on. */
1044 d = (d & ~DstMask) | DstMem;
1045 case DstMem:
1046 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1047 dst = ea;
1048 if ( dst.type == OP_REG )
1050 switch ( dst.bytes )
1052 case 1: dst.val = *(uint8_t *)dst.reg; break;
1053 case 2: dst.val = *(uint16_t *)dst.reg; break;
1054 case 4: dst.val = *(uint32_t *)dst.reg; break;
1055 case 8: dst.val = *(uint64_t *)dst.reg; break;
1058 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1060 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1061 &dst.val, dst.bytes, ctxt)) )
1062 goto done;
1063 dst.orig_val = dst.val;
1065 break;
1068 /* LOCK prefix allowed only on instructions with memory destination. */
1069 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
1071 if ( twobyte )
1072 goto twobyte_insn;
1074 switch ( b )
1076 case 0x04 ... 0x05: /* add imm,%%eax */
1077 dst.reg = (unsigned long *)&_regs.eax;
1078 dst.val = _regs.eax;
1079 case 0x00 ... 0x03: add: /* add */
1080 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1081 break;
1083 case 0x0c ... 0x0d: /* or imm,%%eax */
1084 dst.reg = (unsigned long *)&_regs.eax;
1085 dst.val = _regs.eax;
1086 case 0x08 ... 0x0b: or: /* or */
1087 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1088 break;
1090 case 0x14 ... 0x15: /* adc imm,%%eax */
1091 dst.reg = (unsigned long *)&_regs.eax;
1092 dst.val = _regs.eax;
1093 case 0x10 ... 0x13: adc: /* adc */
1094 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1095 break;
1097 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1098 dst.reg = (unsigned long *)&_regs.eax;
1099 dst.val = _regs.eax;
1100 case 0x18 ... 0x1b: sbb: /* sbb */
1101 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1102 break;
1104 case 0x24 ... 0x25: /* and imm,%%eax */
1105 dst.reg = (unsigned long *)&_regs.eax;
1106 dst.val = _regs.eax;
1107 case 0x20 ... 0x23: and: /* and */
1108 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1109 break;
1111 case 0x2c ... 0x2d: /* sub imm,%%eax */
1112 dst.reg = (unsigned long *)&_regs.eax;
1113 dst.val = _regs.eax;
1114 case 0x28 ... 0x2b: sub: /* sub */
1115 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1116 break;
1118 case 0x34 ... 0x35: /* xor imm,%%eax */
1119 dst.reg = (unsigned long *)&_regs.eax;
1120 dst.val = _regs.eax;
1121 case 0x30 ... 0x33: xor: /* xor */
1122 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1123 break;
1125 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1126 dst.reg = (unsigned long *)&_regs.eax;
1127 dst.val = _regs.eax;
1128 case 0x38 ... 0x3b: cmp: /* cmp */
1129 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1130 break;
1132 case 0x62: /* bound */ {
1133 unsigned long src_val2;
1134 int lb, ub, idx;
1135 generate_exception_if(mode_64bit() || (src.type != OP_MEM), EXC_UD);
1136 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1137 &src_val2, op_bytes, ctxt)) )
1138 goto done;
1139 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1140 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1141 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1142 generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
1143 dst.type = OP_NONE;
1144 break;
1147 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1148 if ( mode_64bit() )
1150 /* movsxd */
1151 if ( src.type == OP_REG )
1152 src.val = *(int32_t *)src.reg;
1153 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1154 &src.val, 4, ctxt)) )
1155 goto done;
1156 dst.val = (int32_t)src.val;
1158 else
1160 /* arpl */
1161 uint16_t src_val = dst.val;
1162 dst = src;
1163 _regs.eflags &= ~EFLG_ZF;
1164 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1165 if ( _regs.eflags & EFLG_ZF )
1166 dst.val = (dst.val & ~3) | (src_val & 3);
1167 else
1168 dst.type = OP_NONE;
1170 break;
1172 case 0x69: /* imul imm16/32 */
1173 case 0x6b: /* imul imm8 */ {
1174 unsigned long reg = *(long *)decode_register(modrm_reg, &_regs, 0);
1175 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1176 switch ( dst.bytes )
1178 case 2:
1179 dst.val = ((uint32_t)(int16_t)src.val *
1180 (uint32_t)(int16_t)reg);
1181 if ( (int16_t)dst.val != (uint32_t)dst.val )
1182 _regs.eflags |= EFLG_OF|EFLG_CF;
1183 break;
1184 #ifdef __x86_64__
1185 case 4:
1186 dst.val = ((uint64_t)(int32_t)src.val *
1187 (uint64_t)(int32_t)reg);
1188 if ( (int32_t)dst.val != dst.val )
1189 _regs.eflags |= EFLG_OF|EFLG_CF;
1190 break;
1191 #endif
1192 default: {
1193 unsigned long m[2] = { src.val, reg };
1194 if ( imul_dbl(m) )
1195 _regs.eflags |= EFLG_OF|EFLG_CF;
1196 dst.val = m[0];
1197 break;
1200 dst.type = OP_REG;
1201 dst.reg = decode_register(modrm_reg, &_regs, 0);
1202 break;
1205 case 0x82: /* Grp1 (x86/32 only) */
1206 generate_exception_if(mode_64bit(), EXC_UD);
1207 case 0x80: case 0x81: case 0x83: /* Grp1 */
1208 switch ( modrm_reg & 7 )
1210 case 0: goto add;
1211 case 1: goto or;
1212 case 2: goto adc;
1213 case 3: goto sbb;
1214 case 4: goto and;
1215 case 5: goto sub;
1216 case 6: goto xor;
1217 case 7: goto cmp;
1219 break;
1221 case 0xa8 ... 0xa9: /* test imm,%%eax */
1222 dst.reg = (unsigned long *)&_regs.eax;
1223 dst.val = _regs.eax;
1224 case 0x84 ... 0x85: test: /* test */
1225 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1226 break;
1228 case 0x86 ... 0x87: xchg: /* xchg */
1229 /* Write back the register source. */
1230 switch ( dst.bytes )
1232 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1233 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1234 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1235 case 8: *src.reg = dst.val; break;
1237 /* Write back the memory destination with implicit LOCK prefix. */
1238 dst.val = src.val;
1239 lock_prefix = 1;
1240 break;
1242 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1243 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1244 case 0x88 ... 0x8b: /* mov */
1245 dst.val = src.val;
1246 break;
1248 case 0x8d: /* lea */
1249 dst.val = ea.mem.off;
1250 break;
1252 case 0x8f: /* pop (sole member of Grp1a) */
1253 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1254 /* 64-bit mode: POP defaults to a 64-bit operand. */
1255 if ( mode_64bit() && (dst.bytes == 4) )
1256 dst.bytes = 8;
1257 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1258 &dst.val, dst.bytes, ctxt)) != 0 )
1259 goto done;
1260 break;
1262 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1263 dst.reg = decode_register(
1264 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1265 dst.val = src.val;
1266 break;
1268 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1269 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1270 src.val = ((uint32_t)src.val |
1271 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1272 dst.reg = decode_register(
1273 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1274 dst.val = src.val;
1275 break;
1277 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1278 switch ( modrm_reg & 7 )
1280 case 0: /* rol */
1281 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1282 break;
1283 case 1: /* ror */
1284 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1285 break;
1286 case 2: /* rcl */
1287 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1288 break;
1289 case 3: /* rcr */
1290 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1291 break;
1292 case 4: /* sal/shl */
1293 case 6: /* sal/shl */
1294 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1295 break;
1296 case 5: /* shr */
1297 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1298 break;
1299 case 7: /* sar */
1300 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1301 break;
1303 break;
1305 case 0xd0 ... 0xd1: /* Grp2 */
1306 src.val = 1;
1307 goto grp2;
1309 case 0xd2 ... 0xd3: /* Grp2 */
1310 src.val = _regs.ecx;
1311 goto grp2;
1313 case 0xf6 ... 0xf7: /* Grp3 */
1314 switch ( modrm_reg & 7 )
1316 case 0 ... 1: /* test */
1317 /* Special case in Grp3: test has an immediate source operand. */
1318 src.type = OP_IMM;
1319 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1320 if ( src.bytes == 8 ) src.bytes = 4;
1321 switch ( src.bytes )
1323 case 1: src.val = insn_fetch_type(int8_t); break;
1324 case 2: src.val = insn_fetch_type(int16_t); break;
1325 case 4: src.val = insn_fetch_type(int32_t); break;
1327 goto test;
1328 case 2: /* not */
1329 dst.val = ~dst.val;
1330 break;
1331 case 3: /* neg */
1332 emulate_1op("neg", dst, _regs.eflags);
1333 break;
1334 case 4: /* mul */
1335 src = dst;
1336 dst.type = OP_REG;
1337 dst.reg = (unsigned long *)&_regs.eax;
1338 dst.val = *dst.reg;
1339 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1340 switch ( src.bytes )
1342 case 1:
1343 dst.val *= src.val;
1344 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1345 _regs.eflags |= EFLG_OF|EFLG_CF;
1346 break;
1347 case 2:
1348 dst.val *= src.val;
1349 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1350 _regs.eflags |= EFLG_OF|EFLG_CF;
1351 *(uint16_t *)&_regs.edx = dst.val >> 16;
1352 break;
1353 #ifdef __x86_64__
1354 case 4:
1355 dst.val *= src.val;
1356 if ( (uint32_t)dst.val != dst.val )
1357 _regs.eflags |= EFLG_OF|EFLG_CF;
1358 _regs.edx = (uint32_t)(dst.val >> 32);
1359 break;
1360 #endif
1361 default: {
1362 unsigned long m[2] = { src.val, dst.val };
1363 if ( mul_dbl(m) )
1364 _regs.eflags |= EFLG_OF|EFLG_CF;
1365 _regs.edx = m[1];
1366 dst.val = m[0];
1367 break;
1370 break;
1371 case 5: /* imul */
1372 src = dst;
1373 dst.type = OP_REG;
1374 dst.reg = (unsigned long *)&_regs.eax;
1375 dst.val = *dst.reg;
1376 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1377 switch ( src.bytes )
1379 case 1:
1380 dst.val = ((uint16_t)(int8_t)src.val *
1381 (uint16_t)(int8_t)dst.val);
1382 if ( (int8_t)dst.val != (uint16_t)dst.val )
1383 _regs.eflags |= EFLG_OF|EFLG_CF;
1384 break;
1385 case 2:
1386 dst.val = ((uint32_t)(int16_t)src.val *
1387 (uint32_t)(int16_t)dst.val);
1388 if ( (int16_t)dst.val != (uint32_t)dst.val )
1389 _regs.eflags |= EFLG_OF|EFLG_CF;
1390 *(uint16_t *)&_regs.edx = dst.val >> 16;
1391 break;
1392 #ifdef __x86_64__
1393 case 4:
1394 dst.val = ((uint64_t)(int32_t)src.val *
1395 (uint64_t)(int32_t)dst.val);
1396 if ( (int32_t)dst.val != dst.val )
1397 _regs.eflags |= EFLG_OF|EFLG_CF;
1398 _regs.edx = (uint32_t)(dst.val >> 32);
1399 break;
1400 #endif
1401 default: {
1402 unsigned long m[2] = { src.val, dst.val };
1403 if ( imul_dbl(m) )
1404 _regs.eflags |= EFLG_OF|EFLG_CF;
1405 _regs.edx = m[1];
1406 dst.val = m[0];
1407 break;
1410 break;
1411 case 6: /* div */ {
1412 unsigned long u[2], v;
1413 src = dst;
1414 dst.type = OP_REG;
1415 dst.reg = (unsigned long *)&_regs.eax;
1416 switch ( src.bytes )
1418 case 1:
1419 u[0] = (uint16_t)_regs.eax;
1420 u[1] = 0;
1421 v = (uint8_t)src.val;
1422 generate_exception_if(
1423 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1424 EXC_DE);
1425 dst.val = (uint8_t)u[0];
1426 ((uint8_t *)&_regs.eax)[1] = u[1];
1427 break;
1428 case 2:
1429 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1430 u[1] = 0;
1431 v = (uint16_t)src.val;
1432 generate_exception_if(
1433 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1434 EXC_DE);
1435 dst.val = (uint16_t)u[0];
1436 *(uint16_t *)&_regs.edx = u[1];
1437 break;
1438 #ifdef __x86_64__
1439 case 4:
1440 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1441 u[1] = 0;
1442 v = (uint32_t)src.val;
1443 generate_exception_if(
1444 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1445 EXC_DE);
1446 dst.val = (uint32_t)u[0];
1447 _regs.edx = (uint32_t)u[1];
1448 break;
1449 #endif
1450 default:
1451 u[0] = _regs.eax;
1452 u[1] = _regs.edx;
1453 v = src.val;
1454 generate_exception_if(div_dbl(u, v), EXC_DE);
1455 dst.val = u[0];
1456 _regs.edx = u[1];
1457 break;
1459 break;
1461 case 7: /* idiv */ {
1462 unsigned long u[2], v;
1463 src = dst;
1464 dst.type = OP_REG;
1465 dst.reg = (unsigned long *)&_regs.eax;
1466 switch ( src.bytes )
1468 case 1:
1469 u[0] = (int16_t)_regs.eax;
1470 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1471 v = (int8_t)src.val;
1472 generate_exception_if(
1473 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1474 EXC_DE);
1475 dst.val = (int8_t)u[0];
1476 ((int8_t *)&_regs.eax)[1] = u[1];
1477 break;
1478 case 2:
1479 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1480 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1481 v = (int16_t)src.val;
1482 generate_exception_if(
1483 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1484 EXC_DE);
1485 dst.val = (int16_t)u[0];
1486 *(int16_t *)&_regs.edx = u[1];
1487 break;
1488 #ifdef __x86_64__
1489 case 4:
1490 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1491 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1492 v = (int32_t)src.val;
1493 generate_exception_if(
1494 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1495 EXC_DE);
1496 dst.val = (int32_t)u[0];
1497 _regs.edx = (uint32_t)u[1];
1498 break;
1499 #endif
1500 default:
1501 u[0] = _regs.eax;
1502 u[1] = _regs.edx;
1503 v = src.val;
1504 generate_exception_if(idiv_dbl(u, v), EXC_DE);
1505 dst.val = u[0];
1506 _regs.edx = u[1];
1507 break;
1509 break;
1511 default:
1512 goto cannot_emulate;
1514 break;
1516 case 0xfe: /* Grp4 */
1517 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
1518 case 0xff: /* Grp5 */
1519 switch ( modrm_reg & 7 )
1521 case 0: /* inc */
1522 emulate_1op("inc", dst, _regs.eflags);
1523 break;
1524 case 1: /* dec */
1525 emulate_1op("dec", dst, _regs.eflags);
1526 break;
1527 case 2: /* call (near) */
1528 case 4: /* jmp (near) */
1529 if ( ((op_bytes = dst.bytes) != 8) && mode_64bit() )
1531 dst.bytes = op_bytes = 8;
1532 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1533 &dst.val, 8, ctxt)) != 0 )
1534 goto done;
1536 src.val = _regs.eip;
1537 _regs.eip = dst.val;
1538 if ( (modrm_reg & 7) == 2 )
1539 goto push; /* call */
1540 break;
1541 case 6: /* push */
1542 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1543 if ( mode_64bit() && (dst.bytes == 4) )
1545 dst.bytes = 8;
1546 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1547 &dst.val, 8, ctxt)) != 0 )
1548 goto done;
1550 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
1551 dst.val, dst.bytes, ctxt)) != 0 )
1552 goto done;
1553 dst.type = OP_NONE;
1554 break;
1555 case 7:
1556 generate_exception_if(1, EXC_UD);
1557 default:
1558 goto cannot_emulate;
1560 break;
1563 writeback:
1564 switch ( dst.type )
1566 case OP_REG:
1567 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1568 switch ( dst.bytes )
1570 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1571 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1572 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1573 case 8: *dst.reg = dst.val; break;
1575 break;
1576 case OP_MEM:
1577 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1578 /* nothing to do */;
1579 else if ( lock_prefix )
1580 rc = ops->cmpxchg(
1581 dst.mem.seg, dst.mem.off, dst.orig_val,
1582 dst.val, dst.bytes, ctxt);
1583 else
1584 rc = ops->write(
1585 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1586 if ( rc != 0 )
1587 goto done;
1588 default:
1589 break;
1592 /* Commit shadow register state. */
1593 *ctxt->regs = _regs;
1595 done:
1596 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1598 special_insn:
1599 dst.type = OP_NONE;
1601 /*
1602 * The only implicit-operands instructions allowed a LOCK prefix are
1603 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1604 */
1605 generate_exception_if(lock_prefix &&
1606 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1607 (b != 0xc7), /* CMPXCHG{8,16}B */
1608 EXC_GP);
1610 if ( twobyte )
1611 goto twobyte_special_insn;
1613 switch ( b )
1615 case 0x27: /* daa */ {
1616 uint8_t al = _regs.eax;
1617 unsigned long eflags = _regs.eflags;
1618 generate_exception_if(mode_64bit(), EXC_UD);
1619 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1620 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1622 *(uint8_t *)&_regs.eax += 6;
1623 _regs.eflags |= EFLG_AF;
1625 if ( (al > 0x99) || (eflags & EFLG_CF) )
1627 *(uint8_t *)&_regs.eax += 0x60;
1628 _regs.eflags |= EFLG_CF;
1630 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1631 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1632 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1633 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1634 break;
1637 case 0x2f: /* das */ {
1638 uint8_t al = _regs.eax;
1639 unsigned long eflags = _regs.eflags;
1640 generate_exception_if(mode_64bit(), EXC_UD);
1641 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1642 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1644 _regs.eflags |= EFLG_AF;
1645 if ( (al < 6) || (eflags & EFLG_CF) )
1646 _regs.eflags |= EFLG_CF;
1647 *(uint8_t *)&_regs.eax -= 6;
1649 if ( (al > 0x99) || (eflags & EFLG_CF) )
1651 *(uint8_t *)&_regs.eax -= 0x60;
1652 _regs.eflags |= EFLG_CF;
1654 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1655 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1656 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1657 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1658 break;
1661 case 0x37: /* aaa */
1662 case 0x3f: /* aas */
1663 generate_exception_if(mode_64bit(), EXC_UD);
1664 _regs.eflags &= ~EFLG_CF;
1665 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1667 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1668 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1669 _regs.eflags |= EFLG_CF | EFLG_AF;
1671 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1672 break;
1674 case 0x40 ... 0x4f: /* inc/dec reg */
1675 dst.type = OP_REG;
1676 dst.reg = decode_register(b & 7, &_regs, 0);
1677 dst.bytes = op_bytes;
1678 dst.val = *dst.reg;
1679 if ( b & 8 )
1680 emulate_1op("dec", dst, _regs.eflags);
1681 else
1682 emulate_1op("inc", dst, _regs.eflags);
1683 break;
1685 case 0x50 ... 0x57: /* push reg */
1686 src.val = *(unsigned long *)decode_register(
1687 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1688 goto push;
1690 case 0x58 ... 0x5f: /* pop reg */
1691 dst.type = OP_REG;
1692 dst.reg = decode_register(
1693 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1694 dst.bytes = op_bytes;
1695 if ( mode_64bit() && (dst.bytes == 4) )
1696 dst.bytes = 8;
1697 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1698 &dst.val, dst.bytes, ctxt)) != 0 )
1699 goto done;
1700 break;
1702 case 0x60: /* pusha */ {
1703 int i;
1704 unsigned long regs[] = {
1705 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1706 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1707 generate_exception_if(mode_64bit(), EXC_UD);
1708 for ( i = 0; i < 8; i++ )
1709 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1710 regs[i], op_bytes, ctxt)) != 0 )
1711 goto done;
1712 break;
1715 case 0x61: /* popa */ {
1716 int i;
1717 unsigned long dummy_esp, *regs[] = {
1718 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
1719 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
1720 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
1721 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
1722 generate_exception_if(mode_64bit(), EXC_UD);
1723 for ( i = 0; i < 8; i++ )
1724 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
1725 regs[i], op_bytes, ctxt)) != 0 )
1726 goto done;
1727 break;
1730 case 0x68: /* push imm{16,32,64} */
1731 src.val = ((op_bytes == 2)
1732 ? (int32_t)insn_fetch_type(int16_t)
1733 : insn_fetch_type(int32_t));
1734 goto push;
1736 case 0x6a: /* push imm8 */
1737 src.val = insn_fetch_type(int8_t);
1738 push:
1739 d |= Mov; /* force writeback */
1740 dst.type = OP_MEM;
1741 dst.bytes = op_bytes;
1742 if ( mode_64bit() && (dst.bytes == 4) )
1743 dst.bytes = 8;
1744 dst.val = src.val;
1745 dst.mem.seg = x86_seg_ss;
1746 dst.mem.off = sp_pre_dec(dst.bytes);
1747 break;
1749 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */
1750 handle_rep_prefix();
1751 generate_exception_if(!mode_iopl(), EXC_GP);
1752 dst.type = OP_MEM;
1753 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1754 dst.mem.seg = x86_seg_es;
1755 dst.mem.off = truncate_ea(_regs.edi);
1756 fail_if(ops->read_io == NULL);
1757 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
1758 &dst.val, ctxt)) != 0 )
1759 goto done;
1760 register_address_increment(
1761 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1762 break;
1764 case 0x6e ... 0x6f: /* outs %esi,%dx */
1765 handle_rep_prefix();
1766 generate_exception_if(!mode_iopl(), EXC_GP);
1767 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1768 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1769 &dst.val, dst.bytes, ctxt)) != 0 )
1770 goto done;
1771 fail_if(ops->write_io == NULL);
1772 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
1773 dst.val, ctxt)) != 0 )
1774 goto done;
1775 register_address_increment(
1776 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1777 break;
1779 case 0x70 ... 0x7f: /* jcc (short) */ {
1780 int rel = insn_fetch_type(int8_t);
1781 if ( test_cc(b, _regs.eflags) )
1782 jmp_rel(rel);
1783 break;
1786 case 0x90: /* nop / xchg %%r8,%%rax */
1787 if ( !(rex_prefix & 1) )
1788 break; /* nop */
1790 case 0x91 ... 0x97: /* xchg reg,%%rax */
1791 src.type = dst.type = OP_REG;
1792 src.bytes = dst.bytes = op_bytes;
1793 src.reg = (unsigned long *)&_regs.eax;
1794 src.val = *src.reg;
1795 dst.reg = decode_register(
1796 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1797 dst.val = *dst.reg;
1798 goto xchg;
1800 case 0x98: /* cbw/cwde/cdqe */
1801 switch ( op_bytes )
1803 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
1804 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
1805 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
1807 break;
1809 case 0x99: /* cwd/cdq/cqo */
1810 switch ( op_bytes )
1812 case 2:
1813 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
1814 break;
1815 case 4:
1816 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
1817 break;
1818 case 8:
1819 _regs.edx = (_regs.eax < 0) ? -1 : 0;
1820 break;
1822 break;
1824 case 0x9e: /* sahf */
1825 *(uint8_t *)_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
1826 break;
1828 case 0x9f: /* lahf */
1829 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
1830 break;
1832 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
1833 /* Source EA is not encoded via ModRM. */
1834 dst.type = OP_REG;
1835 dst.reg = (unsigned long *)&_regs.eax;
1836 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1837 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
1838 &dst.val, dst.bytes, ctxt)) != 0 )
1839 goto done;
1840 break;
1842 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
1843 /* Destination EA is not encoded via ModRM. */
1844 dst.type = OP_MEM;
1845 dst.mem.seg = ea.mem.seg;
1846 dst.mem.off = insn_fetch_bytes(ad_bytes);
1847 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1848 dst.val = (unsigned long)_regs.eax;
1849 break;
1851 case 0xa4 ... 0xa5: /* movs */
1852 handle_rep_prefix();
1853 dst.type = OP_MEM;
1854 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1855 dst.mem.seg = x86_seg_es;
1856 dst.mem.off = truncate_ea(_regs.edi);
1857 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1858 &dst.val, dst.bytes, ctxt)) != 0 )
1859 goto done;
1860 register_address_increment(
1861 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1862 register_address_increment(
1863 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1864 break;
1866 case 0xaa ... 0xab: /* stos */
1867 handle_rep_prefix();
1868 dst.type = OP_MEM;
1869 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1870 dst.mem.seg = x86_seg_es;
1871 dst.mem.off = truncate_ea(_regs.edi);
1872 dst.val = _regs.eax;
1873 register_address_increment(
1874 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1875 break;
1877 case 0xac ... 0xad: /* lods */
1878 handle_rep_prefix();
1879 dst.type = OP_REG;
1880 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1881 dst.reg = (unsigned long *)&_regs.eax;
1882 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1883 &dst.val, dst.bytes, ctxt)) != 0 )
1884 goto done;
1885 register_address_increment(
1886 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1887 break;
1889 case 0xc2: /* ret imm16 (near) */
1890 case 0xc3: /* ret (near) */ {
1891 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
1892 op_bytes = mode_64bit() ? 8 : op_bytes;
1893 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
1894 &dst.val, op_bytes, ctxt)) != 0 )
1895 goto done;
1896 _regs.eip = dst.val;
1897 break;
1900 case 0xd4: /* aam */ {
1901 unsigned int base = insn_fetch_type(uint8_t);
1902 uint8_t al = _regs.eax;
1903 generate_exception_if(mode_64bit(), EXC_UD);
1904 generate_exception_if(base == 0, EXC_DE);
1905 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
1906 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1907 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1908 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1909 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1910 break;
1913 case 0xd5: /* aad */ {
1914 unsigned int base = insn_fetch_type(uint8_t);
1915 uint16_t ax = _regs.eax;
1916 generate_exception_if(mode_64bit(), EXC_UD);
1917 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
1918 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1919 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1920 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1921 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1922 break;
1925 case 0xd6: /* salc */
1926 generate_exception_if(mode_64bit(), EXC_UD);
1927 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
1928 break;
1930 case 0xd7: /* xlat */ {
1931 unsigned long al = (uint8_t)_regs.eax;
1932 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
1933 &al, 1, ctxt)) != 0 )
1934 goto done;
1935 *(uint8_t *)&_regs.eax = al;
1936 break;
1939 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
1940 int rel = insn_fetch_type(int8_t);
1941 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
1942 if ( b == 0xe1 )
1943 do_jmp = !do_jmp; /* loopz */
1944 else if ( b == 0xe2 )
1945 do_jmp = 1; /* loop */
1946 switch ( ad_bytes )
1948 case 2:
1949 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
1950 break;
1951 case 4:
1952 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
1953 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
1954 break;
1955 default: /* case 8: */
1956 do_jmp &= --_regs.ecx != 0;
1957 break;
1959 if ( do_jmp )
1960 jmp_rel(rel);
1961 break;
1964 case 0xe3: /* jcxz/jecxz (short) */ {
1965 int rel = insn_fetch_type(int8_t);
1966 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
1967 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
1968 jmp_rel(rel);
1969 break;
1972 case 0xe4: /* in imm8,%al */
1973 case 0xe5: /* in imm8,%eax */
1974 case 0xe6: /* out %al,imm8 */
1975 case 0xe7: /* out %eax,imm8 */
1976 case 0xec: /* in %dx,%al */
1977 case 0xed: /* in %dx,%eax */
1978 case 0xee: /* out %al,%dx */
1979 case 0xef: /* out %eax,%dx */ {
1980 unsigned int port = ((b < 0xe8)
1981 ? insn_fetch_type(uint8_t)
1982 : (uint16_t)_regs.edx);
1983 generate_exception_if(!mode_iopl(), EXC_GP);
1984 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1985 if ( b & 2 )
1987 /* out */
1988 fail_if(ops->write_io == NULL);
1989 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
1992 else
1994 /* in */
1995 dst.type = OP_REG;
1996 dst.bytes = op_bytes;
1997 dst.reg = (unsigned long *)&_regs.eax;
1998 fail_if(ops->read_io == NULL);
1999 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2001 if ( rc != 0 )
2002 goto done;
2003 break;
2006 case 0xe8: /* call (near) */ {
2007 int rel = (((op_bytes == 2) && !mode_64bit())
2008 ? (int32_t)insn_fetch_type(int16_t)
2009 : insn_fetch_type(int32_t));
2010 op_bytes = mode_64bit() ? 8 : op_bytes;
2011 src.val = _regs.eip;
2012 jmp_rel(rel);
2013 goto push;
2016 case 0xe9: /* jmp (near) */ {
2017 int rel = (((op_bytes == 2) && !mode_64bit())
2018 ? (int32_t)insn_fetch_type(int16_t)
2019 : insn_fetch_type(int32_t));
2020 jmp_rel(rel);
2021 break;
2024 case 0xeb: /* jmp (short) */
2025 jmp_rel(insn_fetch_type(int8_t));
2026 break;
2028 case 0xf5: /* cmc */
2029 _regs.eflags ^= EFLG_CF;
2030 break;
2032 case 0xf8: /* clc */
2033 _regs.eflags &= ~EFLG_CF;
2034 break;
2036 case 0xf9: /* stc */
2037 _regs.eflags |= EFLG_CF;
2038 break;
2040 case 0xfa: /* cli */
2041 generate_exception_if(!mode_iopl(), EXC_GP);
2042 fail_if(ops->write_rflags == NULL);
2043 if ( (rc = ops->write_rflags(_regs.eflags & ~EFLG_IF, ctxt)) != 0 )
2044 goto done;
2045 break;
2047 case 0xfb: /* sti */
2048 generate_exception_if(!mode_iopl(), EXC_GP);
2049 fail_if(ops->write_rflags == NULL);
2050 if ( (rc = ops->write_rflags(_regs.eflags | EFLG_IF, ctxt)) != 0 )
2051 goto done;
2052 break;
2054 case 0xfc: /* cld */
2055 _regs.eflags &= ~EFLG_DF;
2056 break;
2058 case 0xfd: /* std */
2059 _regs.eflags |= EFLG_DF;
2060 break;
2062 goto writeback;
2064 twobyte_insn:
2065 switch ( b )
2067 case 0x40 ... 0x4f: /* cmovcc */
2068 dst.val = src.val;
2069 if ( !test_cc(b, _regs.eflags) )
2070 dst.type = OP_NONE;
2071 break;
2073 case 0x90 ... 0x9f: /* setcc */
2074 dst.val = test_cc(b, _regs.eflags);
2075 break;
2077 case 0xb0 ... 0xb1: /* cmpxchg */
2078 /* Save real source value, then compare EAX against destination. */
2079 src.orig_val = src.val;
2080 src.val = _regs.eax;
2081 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2082 /* Always write back. The question is: where to? */
2083 d |= Mov;
2084 if ( _regs.eflags & EFLG_ZF )
2086 /* Success: write back to memory. */
2087 dst.val = src.orig_val;
2089 else
2091 /* Failure: write the value we saw to EAX. */
2092 dst.type = OP_REG;
2093 dst.reg = (unsigned long *)&_regs.eax;
2095 break;
2097 case 0xa3: bt: /* bt */
2098 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2099 break;
2101 case 0xb3: btr: /* btr */
2102 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
2103 break;
2105 case 0xab: bts: /* bts */
2106 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
2107 break;
2109 case 0xaf: /* imul */
2110 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
2111 switch ( dst.bytes )
2113 case 2:
2114 dst.val = ((uint32_t)(int16_t)src.val *
2115 (uint32_t)(int16_t)dst.val);
2116 if ( (int16_t)dst.val != (uint32_t)dst.val )
2117 _regs.eflags |= EFLG_OF|EFLG_CF;
2118 break;
2119 #ifdef __x86_64__
2120 case 4:
2121 dst.val = ((uint64_t)(int32_t)src.val *
2122 (uint64_t)(int32_t)dst.val);
2123 if ( (int32_t)dst.val != dst.val )
2124 _regs.eflags |= EFLG_OF|EFLG_CF;
2125 break;
2126 #endif
2127 default: {
2128 unsigned long m[2] = { src.val, dst.val };
2129 if ( imul_dbl(m) )
2130 _regs.eflags |= EFLG_OF|EFLG_CF;
2131 dst.val = m[0];
2132 break;
2135 break;
2137 case 0xb6: /* movzx rm8,r{16,32,64} */
2138 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2139 dst.reg = decode_register(modrm_reg, &_regs, 0);
2140 dst.bytes = op_bytes;
2141 dst.val = (uint8_t)src.val;
2142 break;
2144 case 0xbc: /* bsf */ {
2145 int zf;
2146 asm ( "bsf %2,%0; setz %b1"
2147 : "=r" (dst.val), "=q" (zf)
2148 : "r" (src.val), "1" (0) );
2149 _regs.eflags &= ~EFLG_ZF;
2150 _regs.eflags |= zf ? EFLG_ZF : 0;
2151 break;
2154 case 0xbd: /* bsr */ {
2155 int zf;
2156 asm ( "bsr %2,%0; setz %b1"
2157 : "=r" (dst.val), "=q" (zf)
2158 : "r" (src.val), "1" (0) );
2159 _regs.eflags &= ~EFLG_ZF;
2160 _regs.eflags |= zf ? EFLG_ZF : 0;
2161 break;
2164 case 0xb7: /* movzx rm16,r{16,32,64} */
2165 dst.val = (uint16_t)src.val;
2166 break;
2168 case 0xbb: btc: /* btc */
2169 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
2170 break;
2172 case 0xba: /* Grp8 */
2173 switch ( modrm_reg & 7 )
2175 case 4: goto bt;
2176 case 5: goto bts;
2177 case 6: goto btr;
2178 case 7: goto btc;
2179 default: generate_exception_if(1, EXC_UD);
2181 break;
2183 case 0xbe: /* movsx rm8,r{16,32,64} */
2184 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2185 dst.reg = decode_register(modrm_reg, &_regs, 0);
2186 dst.bytes = op_bytes;
2187 dst.val = (int8_t)src.val;
2188 break;
2190 case 0xbf: /* movsx rm16,r{16,32,64} */
2191 dst.val = (int16_t)src.val;
2192 break;
2194 case 0xc0 ... 0xc1: /* xadd */
2195 /* Write back the register source. */
2196 switch ( dst.bytes )
2198 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2199 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2200 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2201 case 8: *src.reg = dst.val; break;
2203 goto add;
2205 goto writeback;
2207 twobyte_special_insn:
2208 switch ( b )
2210 case 0x06: /* clts */
2211 generate_exception_if(!mode_ring0(), EXC_GP);
2212 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
2213 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
2214 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
2215 goto done;
2216 break;
2218 case 0x08: /* invd */
2219 case 0x09: /* wbinvd */
2220 generate_exception_if(!mode_ring0(), EXC_GP);
2221 fail_if(ops->wbinvd == NULL);
2222 if ( (rc = ops->wbinvd(ctxt)) != 0 )
2223 goto done;
2224 break;
2226 case 0x0d: /* GrpP (prefetch) */
2227 case 0x18: /* Grp16 (prefetch/nop) */
2228 case 0x19 ... 0x1f: /* nop (amd-defined) */
2229 break;
2231 case 0x20: /* mov cr,reg */
2232 case 0x21: /* mov dr,reg */
2233 case 0x22: /* mov reg,cr */
2234 case 0x23: /* mov reg,dr */
2235 generate_exception_if(!mode_ring0(), EXC_GP);
2236 modrm_rm |= (rex_prefix & 1) << 3;
2237 modrm_reg |= lock_prefix << 3;
2238 if ( b & 2 )
2240 /* Write to CR/DR. */
2241 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
2242 if ( !mode_64bit() )
2243 src.val = (uint32_t)src.val;
2244 rc = ((b & 1)
2245 ? (ops->write_dr
2246 ? ops->write_dr(modrm_reg, src.val, ctxt)
2247 : X86EMUL_UNHANDLEABLE)
2248 : (ops->write_cr
2249 ? ops->write_dr(modrm_reg, src.val, ctxt)
2250 : X86EMUL_UNHANDLEABLE));
2252 else
2254 /* Read from CR/DR. */
2255 dst.type = OP_REG;
2256 dst.bytes = mode_64bit() ? 8 : 4;
2257 dst.reg = decode_register(modrm_rm, &_regs, 0);
2258 rc = ((b & 1)
2259 ? (ops->read_dr
2260 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2261 : X86EMUL_UNHANDLEABLE)
2262 : (ops->read_cr
2263 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2264 : X86EMUL_UNHANDLEABLE));
2266 if ( rc != 0 )
2267 goto done;
2268 break;
2270 case 0x30: /* wrmsr */ {
2271 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
2272 generate_exception_if(!mode_ring0(), EXC_GP);
2273 fail_if(ops->write_msr == NULL);
2274 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
2275 goto done;
2276 break;
2279 case 0x32: /* rdmsr */ {
2280 uint64_t val;
2281 generate_exception_if(!mode_ring0(), EXC_GP);
2282 fail_if(ops->read_msr == NULL);
2283 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
2284 goto done;
2285 _regs.edx = (uint32_t)(val >> 32);
2286 _regs.eax = (uint32_t)(val >> 0);
2287 break;
2290 case 0x80 ... 0x8f: /* jcc (near) */ {
2291 int rel = (((op_bytes == 2) && !mode_64bit())
2292 ? (int32_t)insn_fetch_type(int16_t)
2293 : insn_fetch_type(int32_t));
2294 if ( test_cc(b, _regs.eflags) )
2295 jmp_rel(rel);
2296 break;
2299 case 0xc7: /* Grp9 (cmpxchg8b) */
2300 #if defined(__i386__)
2302 unsigned long old_lo, old_hi;
2303 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2304 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
2305 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
2306 goto done;
2307 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
2309 _regs.eax = old_lo;
2310 _regs.edx = old_hi;
2311 _regs.eflags &= ~EFLG_ZF;
2313 else if ( ops->cmpxchg8b == NULL )
2315 rc = X86EMUL_UNHANDLEABLE;
2316 goto done;
2318 else
2320 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
2321 _regs.ebx, _regs.ecx, ctxt)) != 0 )
2322 goto done;
2323 _regs.eflags |= EFLG_ZF;
2325 break;
2327 #elif defined(__x86_64__)
2329 unsigned long old, new;
2330 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2331 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
2332 goto done;
2333 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
2334 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
2336 _regs.eax = (uint32_t)(old>>0);
2337 _regs.edx = (uint32_t)(old>>32);
2338 _regs.eflags &= ~EFLG_ZF;
2340 else
2342 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
2343 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
2344 new, 8, ctxt)) != 0 )
2345 goto done;
2346 _regs.eflags |= EFLG_ZF;
2348 break;
2350 #endif
2352 case 0xc8 ... 0xcf: /* bswap */
2353 dst.type = OP_REG;
2354 dst.reg = decode_register(b & 7, &_regs, 0);
2355 dst.val = *dst.reg;
2356 switch ( dst.bytes = op_bytes )
2358 case 2:
2359 dst.val = (((dst.val & 0x00FFUL) << 8) |
2360 ((dst.val & 0xFF00UL) >> 8));
2361 break;
2362 case 4:
2363 dst.val = (((dst.val & 0x000000FFUL) << 24) |
2364 ((dst.val & 0x0000FF00UL) << 8) |
2365 ((dst.val & 0x00FF0000UL) >> 8) |
2366 ((dst.val & 0xFF000000UL) >> 24));
2367 break;
2368 #ifdef __x86_64__
2369 case 8:
2370 dst.val = (((dst.val & 0x00000000000000FFUL) << 56) |
2371 ((dst.val & 0x000000000000FF00UL) << 40) |
2372 ((dst.val & 0x0000000000FF0000UL) << 24) |
2373 ((dst.val & 0x00000000FF000000UL) << 8) |
2374 ((dst.val & 0x000000FF00000000UL) >> 8) |
2375 ((dst.val & 0x0000FF0000000000UL) >> 24) |
2376 ((dst.val & 0x00FF000000000000UL) >> 40) |
2377 ((dst.val & 0xFF00000000000000UL) >> 56));
2378 break;
2379 #endif
2381 break;
2383 goto writeback;
2385 cannot_emulate:
2386 #ifdef __XEN__
2387 gdprintk(XENLOG_DEBUG, "Instr:");
2388 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
2390 unsigned long x;
2391 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
2392 printk(" %02x", (uint8_t)x);
2394 printk("\n");
2395 #endif
2396 return -1;