direct-io.hg

view xen/arch/x86/x86_emulate.c @ 14346:e39964673c6f

xen: Fix emulator to use default segment SS for ESP/EBP references.
Signed-off-by: Keir Fraser <keir@xensource.com>
author kfraser@localhost.localdomain
date Mon Mar 12 11:00:44 2007 +0000 (2007-03-12)
parents b010e556fe2c
children b685bb321d1e
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <public/xen.h>
28 #else
29 #include <xen/config.h>
30 #include <xen/types.h>
31 #include <xen/lib.h>
32 #include <asm/regs.h>
33 #undef cmpxchg
34 #endif
35 #include <asm-x86/x86_emulate.h>
37 /* Operand sizes: 8-bit operands or specified/overridden size. */
38 #define ByteOp (1<<0) /* 8-bit operands. */
39 /* Destination operand type. */
40 #define DstBitBase (0<<1) /* Memory operand, bit string. */
41 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
42 #define DstReg (2<<1) /* Register operand. */
43 #define DstMem (3<<1) /* Memory operand. */
44 #define DstMask (3<<1)
45 /* Source operand type. */
46 #define SrcNone (0<<3) /* No source operand. */
47 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
48 #define SrcReg (1<<3) /* Register operand. */
49 #define SrcMem (2<<3) /* Memory operand. */
50 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
51 #define SrcImm (4<<3) /* Immediate operand. */
52 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
53 #define SrcMask (7<<3)
54 /* Generic ModRM decode. */
55 #define ModRM (1<<6)
56 /* Destination is only written; never read. */
57 #define Mov (1<<7)
59 static uint8_t opcode_table[256] = {
60 /* 0x00 - 0x07 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
64 /* 0x08 - 0x0F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
68 /* 0x10 - 0x17 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
72 /* 0x18 - 0x1F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, 0,
76 /* 0x20 - 0x27 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
80 /* 0x28 - 0x2F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
84 /* 0x30 - 0x37 */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
88 /* 0x38 - 0x3F */
89 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
90 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
91 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
92 /* 0x40 - 0x4F */
93 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 /* 0x50 - 0x5F */
98 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 /* 0x60 - 0x67 */
103 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
104 0, 0, 0, 0,
105 /* 0x68 - 0x6F */
106 ImplicitOps|Mov, DstMem|SrcImm|ModRM|Mov,
107 ImplicitOps|Mov, DstMem|SrcImmByte|ModRM|Mov,
108 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
109 /* 0x70 - 0x77 */
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 /* 0x78 - 0x7F */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x80 - 0x87 */
116 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
118 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 /* 0x88 - 0x8F */
121 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
122 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
123 0, DstReg|SrcNone|ModRM, 0, DstMem|SrcNone|ModRM|Mov,
124 /* 0x90 - 0x97 */
125 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
126 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 /* 0x98 - 0x9F */
128 ImplicitOps, ImplicitOps, 0, 0, 0, 0, ImplicitOps, ImplicitOps,
129 /* 0xA0 - 0xA7 */
130 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
131 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
132 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
133 /* 0xA8 - 0xAF */
134 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
135 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
136 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov, 0, 0,
137 /* 0xB0 - 0xB7 */
138 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
139 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
140 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
141 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
142 /* 0xB8 - 0xBF */
143 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
144 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
145 /* 0xC0 - 0xC7 */
146 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
147 ImplicitOps, ImplicitOps,
148 0, 0, ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
149 /* 0xC8 - 0xCF */
150 0, 0, 0, 0, 0, 0, 0, 0,
151 /* 0xD0 - 0xD7 */
152 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
153 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
154 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
155 /* 0xD8 - 0xDF */
156 0, 0, 0, 0, 0, 0, 0, 0,
157 /* 0xE0 - 0xE7 */
158 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
159 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
160 /* 0xE8 - 0xEF */
161 ImplicitOps, ImplicitOps, 0, ImplicitOps,
162 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
163 /* 0xF0 - 0xF7 */
164 0, 0, 0, 0,
165 0, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
166 /* 0xF8 - 0xFF */
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
169 };
171 static uint8_t twobyte_table[256] = {
172 /* 0x00 - 0x07 */
173 0, 0, 0, 0, 0, ImplicitOps, 0, 0,
174 /* 0x08 - 0x0F */
175 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
176 /* 0x10 - 0x17 */
177 0, 0, 0, 0, 0, 0, 0, 0,
178 /* 0x18 - 0x1F */
179 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
180 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
181 /* 0x20 - 0x27 */
182 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
183 0, 0, 0, 0,
184 /* 0x28 - 0x2F */
185 0, 0, 0, 0, 0, 0, 0, 0,
186 /* 0x30 - 0x37 */
187 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0,
188 /* 0x38 - 0x3F */
189 0, 0, 0, 0, 0, 0, 0, 0,
190 /* 0x40 - 0x47 */
191 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
192 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
193 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
194 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
195 /* 0x48 - 0x4F */
196 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
197 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 /* 0x50 - 0x5F */
201 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 /* 0x60 - 0x6F */
203 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
204 /* 0x70 - 0x7F */
205 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
206 /* 0x80 - 0x87 */
207 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
208 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
209 /* 0x88 - 0x8F */
210 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
211 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
212 /* 0x90 - 0x97 */
213 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
214 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
215 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
216 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
217 /* 0x98 - 0x9F */
218 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
219 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 /* 0xA0 - 0xA7 */
223 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, 0,
224 /* 0xA8 - 0xAF */
225 0, 0, 0, DstBitBase|SrcReg|ModRM, 0, 0, 0, DstReg|SrcMem|ModRM,
226 /* 0xB0 - 0xB7 */
227 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
228 0, DstBitBase|SrcReg|ModRM,
229 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
230 /* 0xB8 - 0xBF */
231 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
232 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
233 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
234 /* 0xC0 - 0xC7 */
235 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
236 0, 0, 0, ImplicitOps|ModRM,
237 /* 0xC8 - 0xCF */
238 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
239 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
240 /* 0xD0 - 0xDF */
241 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
242 /* 0xE0 - 0xEF */
243 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
244 /* 0xF0 - 0xFF */
245 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
246 };
248 /* Type, address-of, and value of an instruction's operand. */
249 struct operand {
250 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
251 unsigned int bytes;
252 unsigned long val, orig_val;
253 union {
254 /* OP_REG: Pointer to register field. */
255 unsigned long *reg;
256 /* OP_MEM: Segment and offset. */
257 struct {
258 enum x86_segment seg;
259 unsigned long off;
260 } mem;
261 };
262 };
264 /* EFLAGS bit definitions. */
265 #define EFLG_OF (1<<11)
266 #define EFLG_DF (1<<10)
267 #define EFLG_IF (1<<9)
268 #define EFLG_SF (1<<7)
269 #define EFLG_ZF (1<<6)
270 #define EFLG_AF (1<<4)
271 #define EFLG_PF (1<<2)
272 #define EFLG_CF (1<<0)
274 /* Exception definitions. */
275 #define EXC_DE 0
276 #define EXC_BR 5
277 #define EXC_UD 6
278 #define EXC_GP 13
280 /*
281 * Instruction emulation:
282 * Most instructions are emulated directly via a fragment of inline assembly
283 * code. This allows us to save/restore EFLAGS and thus very easily pick up
284 * any modified flags.
285 */
287 #if defined(__x86_64__)
288 #define _LO32 "k" /* force 32-bit operand */
289 #define _STK "%%rsp" /* stack pointer */
290 #elif defined(__i386__)
291 #define _LO32 "" /* force 32-bit operand */
292 #define _STK "%%esp" /* stack pointer */
293 #endif
295 /*
296 * These EFLAGS bits are restored from saved value during emulation, and
297 * any changes are written back to the saved value after emulation.
298 */
299 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
301 /* Before executing instruction: restore necessary bits in EFLAGS. */
302 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
303 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */\
304 "push %"_sav"; " \
305 "movl %"_msk",%"_LO32 _tmp"; " \
306 "andl %"_LO32 _tmp",("_STK"); " \
307 "pushf; " \
308 "notl %"_LO32 _tmp"; " \
309 "andl %"_LO32 _tmp",("_STK"); " \
310 "pop %"_tmp"; " \
311 "orl %"_LO32 _tmp",("_STK"); " \
312 "popf; " \
313 /* _sav &= ~msk; */ \
314 "movl %"_msk",%"_LO32 _tmp"; " \
315 "notl %"_LO32 _tmp"; " \
316 "andl %"_LO32 _tmp",%"_sav"; "
318 /* After executing instruction: write-back necessary bits in EFLAGS. */
319 #define _POST_EFLAGS(_sav, _msk, _tmp) \
320 /* _sav |= EFLAGS & _msk; */ \
321 "pushf; " \
322 "pop %"_tmp"; " \
323 "andl %"_msk",%"_LO32 _tmp"; " \
324 "orl %"_LO32 _tmp",%"_sav"; "
326 /* Raw emulation: instruction has two explicit operands. */
327 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
328 do{ unsigned long _tmp; \
329 switch ( (_dst).bytes ) \
330 { \
331 case 2: \
332 __asm__ __volatile__ ( \
333 _PRE_EFLAGS("0","4","2") \
334 _op"w %"_wx"3,%1; " \
335 _POST_EFLAGS("0","4","2") \
336 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
337 : _wy ((_src).val), "i" (EFLAGS_MASK), \
338 "m" (_eflags), "m" ((_dst).val) ); \
339 break; \
340 case 4: \
341 __asm__ __volatile__ ( \
342 _PRE_EFLAGS("0","4","2") \
343 _op"l %"_lx"3,%1; " \
344 _POST_EFLAGS("0","4","2") \
345 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
346 : _ly ((_src).val), "i" (EFLAGS_MASK), \
347 "m" (_eflags), "m" ((_dst).val) ); \
348 break; \
349 case 8: \
350 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
351 break; \
352 } \
353 } while (0)
354 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
355 do{ unsigned long _tmp; \
356 switch ( (_dst).bytes ) \
357 { \
358 case 1: \
359 __asm__ __volatile__ ( \
360 _PRE_EFLAGS("0","4","2") \
361 _op"b %"_bx"3,%1; " \
362 _POST_EFLAGS("0","4","2") \
363 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
364 : _by ((_src).val), "i" (EFLAGS_MASK), \
365 "m" (_eflags), "m" ((_dst).val) ); \
366 break; \
367 default: \
368 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
369 break; \
370 } \
371 } while (0)
372 /* Source operand is byte-sized and may be restricted to just %cl. */
373 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
374 __emulate_2op(_op, _src, _dst, _eflags, \
375 "b", "c", "b", "c", "b", "c", "b", "c")
376 /* Source operand is byte, word, long or quad sized. */
377 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
378 __emulate_2op(_op, _src, _dst, _eflags, \
379 "b", "q", "w", "r", _LO32, "r", "", "r")
380 /* Source operand is word, long or quad sized. */
381 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
382 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
383 "w", "r", _LO32, "r", "", "r")
385 /* Instruction has only one explicit operand (no source operand). */
386 #define emulate_1op(_op,_dst,_eflags) \
387 do{ unsigned long _tmp; \
388 switch ( (_dst).bytes ) \
389 { \
390 case 1: \
391 __asm__ __volatile__ ( \
392 _PRE_EFLAGS("0","3","2") \
393 _op"b %1; " \
394 _POST_EFLAGS("0","3","2") \
395 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
396 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
397 break; \
398 case 2: \
399 __asm__ __volatile__ ( \
400 _PRE_EFLAGS("0","3","2") \
401 _op"w %1; " \
402 _POST_EFLAGS("0","3","2") \
403 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
404 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
405 break; \
406 case 4: \
407 __asm__ __volatile__ ( \
408 _PRE_EFLAGS("0","3","2") \
409 _op"l %1; " \
410 _POST_EFLAGS("0","3","2") \
411 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
412 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
413 break; \
414 case 8: \
415 __emulate_1op_8byte(_op, _dst, _eflags); \
416 break; \
417 } \
418 } while (0)
420 /* Emulate an instruction with quadword operands (x86/64 only). */
421 #if defined(__x86_64__)
422 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
423 do{ __asm__ __volatile__ ( \
424 _PRE_EFLAGS("0","4","2") \
425 _op"q %"_qx"3,%1; " \
426 _POST_EFLAGS("0","4","2") \
427 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
428 : _qy ((_src).val), "i" (EFLAGS_MASK), \
429 "m" (_eflags), "m" ((_dst).val) ); \
430 } while (0)
431 #define __emulate_1op_8byte(_op, _dst, _eflags) \
432 do{ __asm__ __volatile__ ( \
433 _PRE_EFLAGS("0","3","2") \
434 _op"q %1; " \
435 _POST_EFLAGS("0","3","2") \
436 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
437 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
438 } while (0)
439 #elif defined(__i386__)
440 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
441 #define __emulate_1op_8byte(_op, _dst, _eflags)
442 #endif /* __i386__ */
444 /* Fetch next part of the instruction being emulated. */
445 #define insn_fetch_bytes(_size) \
446 ({ unsigned long _x, _eip = _regs.eip; \
447 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
448 _regs.eip += (_size); /* real hardware doesn't truncate */ \
449 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
450 EXC_GP); \
451 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
452 if ( rc ) goto done; \
453 _x; \
454 })
455 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
457 #define _truncate_ea(ea, byte_width) \
458 ({ unsigned long __ea = (ea); \
459 unsigned int _width = (byte_width); \
460 ((_width == sizeof(unsigned long)) ? __ea : \
461 (__ea & ((1UL << (_width << 3)) - 1))); \
462 })
463 #define truncate_ea(ea) _truncate_ea((ea), ad_bytes)
465 #define mode_64bit() (def_ad_bytes == 8)
467 #define fail_if(p) \
468 do { \
469 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
470 if ( rc ) goto done; \
471 } while (0)
473 /* In future we will be able to generate arbitrary exceptions. */
474 #define generate_exception_if(p, e) fail_if(p)
476 /* To be done... */
477 #define mode_ring0() (0)
478 #define mode_iopl() (0)
480 /* Given byte has even parity (even number of 1s)? */
481 static int even_parity(uint8_t v)
482 {
483 __asm__ ( "test %%al,%%al; setp %%al"
484 : "=a" (v) : "0" (v) );
485 return v;
486 }
488 /* Update address held in a register, based on addressing mode. */
489 #define _register_address_increment(reg, inc, byte_width) \
490 do { \
491 int _inc = (inc); /* signed type ensures sign extension to long */ \
492 unsigned int _width = (byte_width); \
493 if ( _width == sizeof(unsigned long) ) \
494 (reg) += _inc; \
495 else if ( mode_64bit() ) \
496 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
497 else \
498 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
499 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
500 } while (0)
501 #define register_address_increment(reg, inc) \
502 _register_address_increment((reg), (inc), ad_bytes)
504 #define sp_pre_dec(dec) ({ \
505 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
506 _truncate_ea(_regs.esp, ctxt->sp_size/8); \
507 })
508 #define sp_post_inc(inc) ({ \
509 unsigned long __esp = _truncate_ea(_regs.esp, ctxt->sp_size/8); \
510 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
511 __esp; \
512 })
514 #define jmp_rel(rel) \
515 do { \
516 _regs.eip += (int)(rel); \
517 if ( !mode_64bit() ) \
518 _regs.eip = ((op_bytes == 2) \
519 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
520 } while (0)
522 static int __handle_rep_prefix(
523 struct cpu_user_regs *int_regs,
524 struct cpu_user_regs *ext_regs,
525 int ad_bytes)
526 {
527 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
528 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
529 int_regs->ecx);
531 if ( ecx-- == 0 )
532 {
533 ext_regs->eip = int_regs->eip;
534 return 1;
535 }
537 if ( ad_bytes == 2 )
538 *(uint16_t *)&int_regs->ecx = ecx;
539 else if ( ad_bytes == 4 )
540 int_regs->ecx = (uint32_t)ecx;
541 else
542 int_regs->ecx = ecx;
543 int_regs->eip = ext_regs->eip;
544 return 0;
545 }
547 #define handle_rep_prefix() \
548 do { \
549 if ( rep_prefix && __handle_rep_prefix(&_regs, ctxt->regs, ad_bytes) ) \
550 goto done; \
551 } while (0)
553 /*
554 * Unsigned multiplication with double-word result.
555 * IN: Multiplicand=m[0], Multiplier=m[1]
556 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
557 */
558 static int mul_dbl(unsigned long m[2])
559 {
560 int rc;
561 asm ( "mul %4; seto %b2"
562 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
563 : "0" (m[0]), "1" (m[1]), "2" (0) );
564 return rc;
565 }
567 /*
568 * Signed multiplication with double-word result.
569 * IN: Multiplicand=m[0], Multiplier=m[1]
570 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
571 */
572 static int imul_dbl(unsigned long m[2])
573 {
574 int rc;
575 asm ( "imul %4; seto %b2"
576 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
577 : "0" (m[0]), "1" (m[1]), "2" (0) );
578 return rc;
579 }
581 /*
582 * Unsigned division of double-word dividend.
583 * IN: Dividend=u[1]:u[0], Divisor=v
584 * OUT: Return 1: #DE
585 * Return 0: Quotient=u[0], Remainder=u[1]
586 */
587 static int div_dbl(unsigned long u[2], unsigned long v)
588 {
589 if ( (v == 0) || (u[1] > v) || ((u[1] == v) && (u[0] != 0)) )
590 return 1;
591 asm ( "div %4"
592 : "=a" (u[0]), "=d" (u[1])
593 : "0" (u[0]), "1" (u[1]), "r" (v) );
594 return 0;
595 }
597 /*
598 * Signed division of double-word dividend.
599 * IN: Dividend=u[1]:u[0], Divisor=v
600 * OUT: Return 1: #DE
601 * Return 0: Quotient=u[0], Remainder=u[1]
602 * NB. We don't use idiv directly as it's moderately hard to work out
603 * ahead of time whether it will #DE, which we cannot allow to happen.
604 */
605 static int idiv_dbl(unsigned long u[2], unsigned long v)
606 {
607 int negu = (long)u[1] < 0, negv = (long)v < 0;
609 /* u = abs(u) */
610 if ( negu )
611 {
612 u[1] = ~u[1];
613 if ( (u[0] = -u[0]) == 0 )
614 u[1]++;
615 }
617 /* abs(u) / abs(v) */
618 if ( div_dbl(u, negv ? -v : v) )
619 return 1;
621 /* Remainder has same sign as dividend. It cannot overflow. */
622 if ( negu )
623 u[1] = -u[1];
625 /* Quotient is overflowed if sign bit is set. */
626 if ( negu ^ negv )
627 {
628 if ( (long)u[0] >= 0 )
629 u[0] = -u[0];
630 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
631 return 1;
632 }
633 else if ( (long)u[0] < 0 )
634 return 1;
636 return 0;
637 }
639 static int
640 test_cc(
641 unsigned int condition, unsigned int flags)
642 {
643 int rc = 0;
645 switch ( (condition & 15) >> 1 )
646 {
647 case 0: /* o */
648 rc |= (flags & EFLG_OF);
649 break;
650 case 1: /* b/c/nae */
651 rc |= (flags & EFLG_CF);
652 break;
653 case 2: /* z/e */
654 rc |= (flags & EFLG_ZF);
655 break;
656 case 3: /* be/na */
657 rc |= (flags & (EFLG_CF|EFLG_ZF));
658 break;
659 case 4: /* s */
660 rc |= (flags & EFLG_SF);
661 break;
662 case 5: /* p/pe */
663 rc |= (flags & EFLG_PF);
664 break;
665 case 7: /* le/ng */
666 rc |= (flags & EFLG_ZF);
667 /* fall through */
668 case 6: /* l/nge */
669 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
670 break;
671 }
673 /* Odd condition identifiers (lsb == 1) have inverted sense. */
674 return (!!rc ^ (condition & 1));
675 }
677 void *
678 decode_register(
679 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
680 {
681 void *p;
683 switch ( modrm_reg )
684 {
685 case 0: p = &regs->eax; break;
686 case 1: p = &regs->ecx; break;
687 case 2: p = &regs->edx; break;
688 case 3: p = &regs->ebx; break;
689 case 4: p = (highbyte_regs ?
690 ((unsigned char *)&regs->eax + 1) :
691 (unsigned char *)&regs->esp); break;
692 case 5: p = (highbyte_regs ?
693 ((unsigned char *)&regs->ecx + 1) :
694 (unsigned char *)&regs->ebp); break;
695 case 6: p = (highbyte_regs ?
696 ((unsigned char *)&regs->edx + 1) :
697 (unsigned char *)&regs->esi); break;
698 case 7: p = (highbyte_regs ?
699 ((unsigned char *)&regs->ebx + 1) :
700 (unsigned char *)&regs->edi); break;
701 #if defined(__x86_64__)
702 case 8: p = &regs->r8; break;
703 case 9: p = &regs->r9; break;
704 case 10: p = &regs->r10; break;
705 case 11: p = &regs->r11; break;
706 case 12: p = &regs->r12; break;
707 case 13: p = &regs->r13; break;
708 case 14: p = &regs->r14; break;
709 case 15: p = &regs->r15; break;
710 #endif
711 default: p = NULL; break;
712 }
714 return p;
715 }
717 int
718 x86_emulate(
719 struct x86_emulate_ctxt *ctxt,
720 struct x86_emulate_ops *ops)
721 {
722 /* Shadow copy of register state. Committed on successful emulation. */
723 struct cpu_user_regs _regs = *ctxt->regs;
725 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
726 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
727 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
728 unsigned int lock_prefix = 0, rep_prefix = 0;
729 int override_seg = -1, rc = X86EMUL_OKAY;
730 struct operand src, dst;
732 /* Data operand effective address (usually computed from ModRM). */
733 struct operand ea;
735 /* Default is a memory operand relative to segment DS. */
736 ea.type = OP_MEM;
737 ea.mem.seg = x86_seg_ds;
738 ea.mem.off = 0;
740 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
741 if ( op_bytes == 8 )
742 {
743 op_bytes = def_op_bytes = 4;
744 #ifndef __x86_64__
745 return X86EMUL_UNHANDLEABLE;
746 #endif
747 }
749 /* Prefix bytes. */
750 for ( ; ; )
751 {
752 switch ( b = insn_fetch_type(uint8_t) )
753 {
754 case 0x66: /* operand-size override */
755 op_bytes = def_op_bytes ^ 6;
756 break;
757 case 0x67: /* address-size override */
758 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
759 break;
760 case 0x2e: /* CS override */
761 override_seg = x86_seg_cs;
762 break;
763 case 0x3e: /* DS override */
764 override_seg = x86_seg_ds;
765 break;
766 case 0x26: /* ES override */
767 override_seg = x86_seg_es;
768 break;
769 case 0x64: /* FS override */
770 override_seg = x86_seg_fs;
771 break;
772 case 0x65: /* GS override */
773 override_seg = x86_seg_gs;
774 break;
775 case 0x36: /* SS override */
776 override_seg = x86_seg_ss;
777 break;
778 case 0xf0: /* LOCK */
779 lock_prefix = 1;
780 break;
781 case 0xf2: /* REPNE/REPNZ */
782 case 0xf3: /* REP/REPE/REPZ */
783 rep_prefix = 1;
784 break;
785 case 0x40 ... 0x4f: /* REX */
786 if ( !mode_64bit() )
787 goto done_prefixes;
788 rex_prefix = b;
789 continue;
790 default:
791 goto done_prefixes;
792 }
794 /* Any legacy prefix after a REX prefix nullifies its effect. */
795 rex_prefix = 0;
796 }
797 done_prefixes:
799 if ( rex_prefix & 8 ) /* REX.W */
800 op_bytes = 8;
802 /* Opcode byte(s). */
803 d = opcode_table[b];
804 if ( d == 0 )
805 {
806 /* Two-byte opcode? */
807 if ( b == 0x0f )
808 {
809 twobyte = 1;
810 b = insn_fetch_type(uint8_t);
811 d = twobyte_table[b];
812 }
814 /* Unrecognised? */
815 if ( d == 0 )
816 goto cannot_emulate;
817 }
819 /* Lock prefix is allowed only on RMW instructions. */
820 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
822 /* ModRM and SIB bytes. */
823 if ( d & ModRM )
824 {
825 modrm = insn_fetch_type(uint8_t);
826 modrm_mod = (modrm & 0xc0) >> 6;
827 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
828 modrm_rm = modrm & 0x07;
830 if ( modrm_mod == 3 )
831 {
832 modrm_rm |= (rex_prefix & 1) << 3;
833 ea.type = OP_REG;
834 ea.reg = decode_register(
835 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
836 }
837 else if ( ad_bytes == 2 )
838 {
839 /* 16-bit ModR/M decode. */
840 switch ( modrm_rm )
841 {
842 case 0:
843 ea.mem.off = _regs.ebx + _regs.esi;
844 break;
845 case 1:
846 ea.mem.off = _regs.ebx + _regs.edi;
847 break;
848 case 2:
849 ea.mem.seg = x86_seg_ss;
850 ea.mem.off = _regs.ebp + _regs.esi;
851 break;
852 case 3:
853 ea.mem.seg = x86_seg_ss;
854 ea.mem.off = _regs.ebp + _regs.edi;
855 break;
856 case 4:
857 ea.mem.off = _regs.esi;
858 break;
859 case 5:
860 ea.mem.off = _regs.edi;
861 break;
862 case 6:
863 if ( modrm_mod == 0 )
864 break;
865 ea.mem.seg = x86_seg_ss;
866 ea.mem.off = _regs.ebp;
867 break;
868 case 7:
869 ea.mem.off = _regs.ebx;
870 break;
871 }
872 switch ( modrm_mod )
873 {
874 case 0:
875 if ( modrm_rm == 6 )
876 ea.mem.off = insn_fetch_type(int16_t);
877 break;
878 case 1:
879 ea.mem.off += insn_fetch_type(int8_t);
880 break;
881 case 2:
882 ea.mem.off += insn_fetch_type(int16_t);
883 break;
884 }
885 ea.mem.off = truncate_ea(ea.mem.off);
886 }
887 else
888 {
889 /* 32/64-bit ModR/M decode. */
890 if ( modrm_rm == 4 )
891 {
892 sib = insn_fetch_type(uint8_t);
893 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
894 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
895 if ( sib_index != 4 )
896 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
897 ea.mem.off <<= (sib >> 6) & 3;
898 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
899 ea.mem.off += insn_fetch_type(int32_t);
900 else if ( sib_base == 4 )
901 {
902 ea.mem.seg = x86_seg_ss;
903 ea.mem.off += _regs.esp;
904 if ( !twobyte && (b == 0x8f) )
905 /* POP <rm> computes its EA post increment. */
906 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
907 ? 8 : op_bytes);
908 }
909 else if ( sib_base == 5 )
910 {
911 ea.mem.seg = x86_seg_ss;
912 ea.mem.off += _regs.ebp;
913 }
914 else
915 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
916 }
917 else
918 {
919 modrm_rm |= (rex_prefix & 1) << 3;
920 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
921 if ( (modrm_rm == 5) && (modrm_mod != 0) )
922 ea.mem.seg = x86_seg_ss;
923 }
924 switch ( modrm_mod )
925 {
926 case 0:
927 if ( (modrm_rm & 7) != 5 )
928 break;
929 ea.mem.off = insn_fetch_type(int32_t);
930 if ( !mode_64bit() )
931 break;
932 /* Relative to RIP of next instruction. Argh! */
933 ea.mem.off += _regs.eip;
934 if ( (d & SrcMask) == SrcImm )
935 ea.mem.off += (d & ByteOp) ? 1 :
936 ((op_bytes == 8) ? 4 : op_bytes);
937 else if ( (d & SrcMask) == SrcImmByte )
938 ea.mem.off += 1;
939 else if ( ((b == 0xf6) || (b == 0xf7)) &&
940 ((modrm_reg & 7) <= 1) )
941 /* Special case in Grp3: test has immediate operand. */
942 ea.mem.off += (d & ByteOp) ? 1
943 : ((op_bytes == 8) ? 4 : op_bytes);
944 break;
945 case 1:
946 ea.mem.off += insn_fetch_type(int8_t);
947 break;
948 case 2:
949 ea.mem.off += insn_fetch_type(int32_t);
950 break;
951 }
952 ea.mem.off = truncate_ea(ea.mem.off);
953 }
954 }
956 if ( override_seg != -1 )
957 ea.mem.seg = override_seg;
959 /* Special instructions do their own operand decoding. */
960 if ( (d & DstMask) == ImplicitOps )
961 goto special_insn;
963 /* Decode and fetch the source operand: register, memory or immediate. */
964 switch ( d & SrcMask )
965 {
966 case SrcNone:
967 break;
968 case SrcReg:
969 src.type = OP_REG;
970 if ( d & ByteOp )
971 {
972 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
973 src.val = *(uint8_t *)src.reg;
974 src.bytes = 1;
975 }
976 else
977 {
978 src.reg = decode_register(modrm_reg, &_regs, 0);
979 switch ( (src.bytes = op_bytes) )
980 {
981 case 2: src.val = *(uint16_t *)src.reg; break;
982 case 4: src.val = *(uint32_t *)src.reg; break;
983 case 8: src.val = *(uint64_t *)src.reg; break;
984 }
985 }
986 break;
987 case SrcMem16:
988 ea.bytes = 2;
989 goto srcmem_common;
990 case SrcMem:
991 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
992 srcmem_common:
993 src = ea;
994 if ( src.type == OP_REG )
995 {
996 switch ( src.bytes )
997 {
998 case 1: src.val = *(uint8_t *)src.reg; break;
999 case 2: src.val = *(uint16_t *)src.reg; break;
1000 case 4: src.val = *(uint32_t *)src.reg; break;
1001 case 8: src.val = *(uint64_t *)src.reg; break;
1004 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1005 &src.val, src.bytes, ctxt)) )
1006 goto done;
1007 break;
1008 case SrcImm:
1009 src.type = OP_IMM;
1010 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1011 if ( src.bytes == 8 ) src.bytes = 4;
1012 /* NB. Immediates are sign-extended as necessary. */
1013 switch ( src.bytes )
1015 case 1: src.val = insn_fetch_type(int8_t); break;
1016 case 2: src.val = insn_fetch_type(int16_t); break;
1017 case 4: src.val = insn_fetch_type(int32_t); break;
1019 break;
1020 case SrcImmByte:
1021 src.type = OP_IMM;
1022 src.bytes = 1;
1023 src.val = insn_fetch_type(int8_t);
1024 break;
1027 /* Decode and fetch the destination operand: register or memory. */
1028 switch ( d & DstMask )
1030 case DstReg:
1031 dst.type = OP_REG;
1032 if ( d & ByteOp )
1034 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1035 dst.val = *(uint8_t *)dst.reg;
1036 dst.bytes = 1;
1038 else
1040 dst.reg = decode_register(modrm_reg, &_regs, 0);
1041 switch ( (dst.bytes = op_bytes) )
1043 case 2: dst.val = *(uint16_t *)dst.reg; break;
1044 case 4: dst.val = *(uint32_t *)dst.reg; break;
1045 case 8: dst.val = *(uint64_t *)dst.reg; break;
1048 break;
1049 case DstBitBase:
1050 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1052 src.val &= (op_bytes << 3) - 1;
1054 else
1056 /*
1057 * EA += BitOffset DIV op_bytes*8
1058 * BitOffset = BitOffset MOD op_bytes*8
1059 * DIV truncates towards negative infinity.
1060 * MOD always produces a positive result.
1061 */
1062 if ( op_bytes == 2 )
1063 src.val = (int16_t)src.val;
1064 else if ( op_bytes == 4 )
1065 src.val = (int32_t)src.val;
1066 if ( (long)src.val < 0 )
1068 unsigned long byte_offset;
1069 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1070 ea.mem.off -= byte_offset;
1071 src.val = (byte_offset << 3) + src.val;
1073 else
1075 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1076 src.val &= (op_bytes << 3) - 1;
1079 /* Becomes a normal DstMem operation from here on. */
1080 d = (d & ~DstMask) | DstMem;
1081 case DstMem:
1082 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1083 dst = ea;
1084 if ( dst.type == OP_REG )
1086 switch ( dst.bytes )
1088 case 1: dst.val = *(uint8_t *)dst.reg; break;
1089 case 2: dst.val = *(uint16_t *)dst.reg; break;
1090 case 4: dst.val = *(uint32_t *)dst.reg; break;
1091 case 8: dst.val = *(uint64_t *)dst.reg; break;
1094 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1096 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1097 &dst.val, dst.bytes, ctxt)) )
1098 goto done;
1099 dst.orig_val = dst.val;
1101 break;
1104 /* LOCK prefix allowed only on instructions with memory destination. */
1105 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
1107 if ( twobyte )
1108 goto twobyte_insn;
1110 switch ( b )
1112 case 0x04 ... 0x05: /* add imm,%%eax */
1113 dst.reg = (unsigned long *)&_regs.eax;
1114 dst.val = _regs.eax;
1115 case 0x00 ... 0x03: add: /* add */
1116 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1117 break;
1119 case 0x0c ... 0x0d: /* or imm,%%eax */
1120 dst.reg = (unsigned long *)&_regs.eax;
1121 dst.val = _regs.eax;
1122 case 0x08 ... 0x0b: or: /* or */
1123 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1124 break;
1126 case 0x14 ... 0x15: /* adc imm,%%eax */
1127 dst.reg = (unsigned long *)&_regs.eax;
1128 dst.val = _regs.eax;
1129 case 0x10 ... 0x13: adc: /* adc */
1130 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1131 break;
1133 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1134 dst.reg = (unsigned long *)&_regs.eax;
1135 dst.val = _regs.eax;
1136 case 0x18 ... 0x1b: sbb: /* sbb */
1137 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1138 break;
1140 case 0x24 ... 0x25: /* and imm,%%eax */
1141 dst.reg = (unsigned long *)&_regs.eax;
1142 dst.val = _regs.eax;
1143 case 0x20 ... 0x23: and: /* and */
1144 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1145 break;
1147 case 0x2c ... 0x2d: /* sub imm,%%eax */
1148 dst.reg = (unsigned long *)&_regs.eax;
1149 dst.val = _regs.eax;
1150 case 0x28 ... 0x2b: sub: /* sub */
1151 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1152 break;
1154 case 0x34 ... 0x35: /* xor imm,%%eax */
1155 dst.reg = (unsigned long *)&_regs.eax;
1156 dst.val = _regs.eax;
1157 case 0x30 ... 0x33: xor: /* xor */
1158 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1159 break;
1161 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1162 dst.reg = (unsigned long *)&_regs.eax;
1163 dst.val = _regs.eax;
1164 case 0x38 ... 0x3b: cmp: /* cmp */
1165 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1166 break;
1168 case 0x62: /* bound */ {
1169 unsigned long src_val2;
1170 int lb, ub, idx;
1171 generate_exception_if(mode_64bit() || (src.type != OP_MEM), EXC_UD);
1172 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1173 &src_val2, op_bytes, ctxt)) )
1174 goto done;
1175 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1176 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1177 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1178 generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
1179 dst.type = OP_NONE;
1180 break;
1183 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1184 if ( mode_64bit() )
1186 /* movsxd */
1187 if ( src.type == OP_REG )
1188 src.val = *(int32_t *)src.reg;
1189 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1190 &src.val, 4, ctxt)) )
1191 goto done;
1192 dst.val = (int32_t)src.val;
1194 else
1196 /* arpl */
1197 uint16_t src_val = dst.val;
1198 dst = src;
1199 _regs.eflags &= ~EFLG_ZF;
1200 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1201 if ( _regs.eflags & EFLG_ZF )
1202 dst.val = (dst.val & ~3) | (src_val & 3);
1203 else
1204 dst.type = OP_NONE;
1206 break;
1208 case 0x69: /* imul imm16/32 */
1209 case 0x6b: /* imul imm8 */ {
1210 unsigned long reg = *(long *)decode_register(modrm_reg, &_regs, 0);
1211 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1212 switch ( dst.bytes )
1214 case 2:
1215 dst.val = ((uint32_t)(int16_t)src.val *
1216 (uint32_t)(int16_t)reg);
1217 if ( (int16_t)dst.val != (uint32_t)dst.val )
1218 _regs.eflags |= EFLG_OF|EFLG_CF;
1219 break;
1220 #ifdef __x86_64__
1221 case 4:
1222 dst.val = ((uint64_t)(int32_t)src.val *
1223 (uint64_t)(int32_t)reg);
1224 if ( (int32_t)dst.val != dst.val )
1225 _regs.eflags |= EFLG_OF|EFLG_CF;
1226 break;
1227 #endif
1228 default: {
1229 unsigned long m[2] = { src.val, reg };
1230 if ( imul_dbl(m) )
1231 _regs.eflags |= EFLG_OF|EFLG_CF;
1232 dst.val = m[0];
1233 break;
1236 dst.type = OP_REG;
1237 dst.reg = decode_register(modrm_reg, &_regs, 0);
1238 break;
1241 case 0x82: /* Grp1 (x86/32 only) */
1242 generate_exception_if(mode_64bit(), EXC_UD);
1243 case 0x80: case 0x81: case 0x83: /* Grp1 */
1244 switch ( modrm_reg & 7 )
1246 case 0: goto add;
1247 case 1: goto or;
1248 case 2: goto adc;
1249 case 3: goto sbb;
1250 case 4: goto and;
1251 case 5: goto sub;
1252 case 6: goto xor;
1253 case 7: goto cmp;
1255 break;
1257 case 0xa8 ... 0xa9: /* test imm,%%eax */
1258 dst.reg = (unsigned long *)&_regs.eax;
1259 dst.val = _regs.eax;
1260 case 0x84 ... 0x85: test: /* test */
1261 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1262 break;
1264 case 0x86 ... 0x87: xchg: /* xchg */
1265 /* Write back the register source. */
1266 switch ( dst.bytes )
1268 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1269 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1270 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1271 case 8: *src.reg = dst.val; break;
1273 /* Write back the memory destination with implicit LOCK prefix. */
1274 dst.val = src.val;
1275 lock_prefix = 1;
1276 break;
1278 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1279 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1280 case 0x88 ... 0x8b: /* mov */
1281 dst.val = src.val;
1282 break;
1284 case 0x8d: /* lea */
1285 dst.val = ea.mem.off;
1286 break;
1288 case 0x8f: /* pop (sole member of Grp1a) */
1289 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1290 /* 64-bit mode: POP defaults to a 64-bit operand. */
1291 if ( mode_64bit() && (dst.bytes == 4) )
1292 dst.bytes = 8;
1293 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1294 &dst.val, dst.bytes, ctxt)) != 0 )
1295 goto done;
1296 break;
1298 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1299 dst.reg = decode_register(
1300 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1301 dst.val = src.val;
1302 break;
1304 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1305 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1306 src.val = ((uint32_t)src.val |
1307 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1308 dst.reg = decode_register(
1309 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1310 dst.val = src.val;
1311 break;
1313 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1314 switch ( modrm_reg & 7 )
1316 case 0: /* rol */
1317 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1318 break;
1319 case 1: /* ror */
1320 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1321 break;
1322 case 2: /* rcl */
1323 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1324 break;
1325 case 3: /* rcr */
1326 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1327 break;
1328 case 4: /* sal/shl */
1329 case 6: /* sal/shl */
1330 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1331 break;
1332 case 5: /* shr */
1333 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1334 break;
1335 case 7: /* sar */
1336 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1337 break;
1339 break;
1341 case 0xd0 ... 0xd1: /* Grp2 */
1342 src.val = 1;
1343 goto grp2;
1345 case 0xd2 ... 0xd3: /* Grp2 */
1346 src.val = _regs.ecx;
1347 goto grp2;
1349 case 0xf6 ... 0xf7: /* Grp3 */
1350 switch ( modrm_reg & 7 )
1352 case 0 ... 1: /* test */
1353 /* Special case in Grp3: test has an immediate source operand. */
1354 src.type = OP_IMM;
1355 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1356 if ( src.bytes == 8 ) src.bytes = 4;
1357 switch ( src.bytes )
1359 case 1: src.val = insn_fetch_type(int8_t); break;
1360 case 2: src.val = insn_fetch_type(int16_t); break;
1361 case 4: src.val = insn_fetch_type(int32_t); break;
1363 goto test;
1364 case 2: /* not */
1365 dst.val = ~dst.val;
1366 break;
1367 case 3: /* neg */
1368 emulate_1op("neg", dst, _regs.eflags);
1369 break;
1370 case 4: /* mul */
1371 src = dst;
1372 dst.type = OP_REG;
1373 dst.reg = (unsigned long *)&_regs.eax;
1374 dst.val = *dst.reg;
1375 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1376 switch ( src.bytes )
1378 case 1:
1379 dst.val *= src.val;
1380 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1381 _regs.eflags |= EFLG_OF|EFLG_CF;
1382 break;
1383 case 2:
1384 dst.val *= src.val;
1385 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1386 _regs.eflags |= EFLG_OF|EFLG_CF;
1387 *(uint16_t *)&_regs.edx = dst.val >> 16;
1388 break;
1389 #ifdef __x86_64__
1390 case 4:
1391 dst.val *= src.val;
1392 if ( (uint32_t)dst.val != dst.val )
1393 _regs.eflags |= EFLG_OF|EFLG_CF;
1394 _regs.edx = (uint32_t)(dst.val >> 32);
1395 break;
1396 #endif
1397 default: {
1398 unsigned long m[2] = { src.val, dst.val };
1399 if ( mul_dbl(m) )
1400 _regs.eflags |= EFLG_OF|EFLG_CF;
1401 _regs.edx = m[1];
1402 dst.val = m[0];
1403 break;
1406 break;
1407 case 5: /* imul */
1408 src = dst;
1409 dst.type = OP_REG;
1410 dst.reg = (unsigned long *)&_regs.eax;
1411 dst.val = *dst.reg;
1412 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1413 switch ( src.bytes )
1415 case 1:
1416 dst.val = ((uint16_t)(int8_t)src.val *
1417 (uint16_t)(int8_t)dst.val);
1418 if ( (int8_t)dst.val != (uint16_t)dst.val )
1419 _regs.eflags |= EFLG_OF|EFLG_CF;
1420 break;
1421 case 2:
1422 dst.val = ((uint32_t)(int16_t)src.val *
1423 (uint32_t)(int16_t)dst.val);
1424 if ( (int16_t)dst.val != (uint32_t)dst.val )
1425 _regs.eflags |= EFLG_OF|EFLG_CF;
1426 *(uint16_t *)&_regs.edx = dst.val >> 16;
1427 break;
1428 #ifdef __x86_64__
1429 case 4:
1430 dst.val = ((uint64_t)(int32_t)src.val *
1431 (uint64_t)(int32_t)dst.val);
1432 if ( (int32_t)dst.val != dst.val )
1433 _regs.eflags |= EFLG_OF|EFLG_CF;
1434 _regs.edx = (uint32_t)(dst.val >> 32);
1435 break;
1436 #endif
1437 default: {
1438 unsigned long m[2] = { src.val, dst.val };
1439 if ( imul_dbl(m) )
1440 _regs.eflags |= EFLG_OF|EFLG_CF;
1441 _regs.edx = m[1];
1442 dst.val = m[0];
1443 break;
1446 break;
1447 case 6: /* div */ {
1448 unsigned long u[2], v;
1449 src = dst;
1450 dst.type = OP_REG;
1451 dst.reg = (unsigned long *)&_regs.eax;
1452 switch ( src.bytes )
1454 case 1:
1455 u[0] = (uint16_t)_regs.eax;
1456 u[1] = 0;
1457 v = (uint8_t)src.val;
1458 generate_exception_if(
1459 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1460 EXC_DE);
1461 dst.val = (uint8_t)u[0];
1462 ((uint8_t *)&_regs.eax)[1] = u[1];
1463 break;
1464 case 2:
1465 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1466 u[1] = 0;
1467 v = (uint16_t)src.val;
1468 generate_exception_if(
1469 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1470 EXC_DE);
1471 dst.val = (uint16_t)u[0];
1472 *(uint16_t *)&_regs.edx = u[1];
1473 break;
1474 #ifdef __x86_64__
1475 case 4:
1476 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1477 u[1] = 0;
1478 v = (uint32_t)src.val;
1479 generate_exception_if(
1480 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1481 EXC_DE);
1482 dst.val = (uint32_t)u[0];
1483 _regs.edx = (uint32_t)u[1];
1484 break;
1485 #endif
1486 default:
1487 u[0] = _regs.eax;
1488 u[1] = _regs.edx;
1489 v = src.val;
1490 generate_exception_if(div_dbl(u, v), EXC_DE);
1491 dst.val = u[0];
1492 _regs.edx = u[1];
1493 break;
1495 break;
1497 case 7: /* idiv */ {
1498 unsigned long u[2], v;
1499 src = dst;
1500 dst.type = OP_REG;
1501 dst.reg = (unsigned long *)&_regs.eax;
1502 switch ( src.bytes )
1504 case 1:
1505 u[0] = (int16_t)_regs.eax;
1506 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1507 v = (int8_t)src.val;
1508 generate_exception_if(
1509 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1510 EXC_DE);
1511 dst.val = (int8_t)u[0];
1512 ((int8_t *)&_regs.eax)[1] = u[1];
1513 break;
1514 case 2:
1515 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1516 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1517 v = (int16_t)src.val;
1518 generate_exception_if(
1519 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1520 EXC_DE);
1521 dst.val = (int16_t)u[0];
1522 *(int16_t *)&_regs.edx = u[1];
1523 break;
1524 #ifdef __x86_64__
1525 case 4:
1526 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1527 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1528 v = (int32_t)src.val;
1529 generate_exception_if(
1530 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1531 EXC_DE);
1532 dst.val = (int32_t)u[0];
1533 _regs.edx = (uint32_t)u[1];
1534 break;
1535 #endif
1536 default:
1537 u[0] = _regs.eax;
1538 u[1] = _regs.edx;
1539 v = src.val;
1540 generate_exception_if(idiv_dbl(u, v), EXC_DE);
1541 dst.val = u[0];
1542 _regs.edx = u[1];
1543 break;
1545 break;
1547 default:
1548 goto cannot_emulate;
1550 break;
1552 case 0xfe: /* Grp4 */
1553 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
1554 case 0xff: /* Grp5 */
1555 switch ( modrm_reg & 7 )
1557 case 0: /* inc */
1558 emulate_1op("inc", dst, _regs.eflags);
1559 break;
1560 case 1: /* dec */
1561 emulate_1op("dec", dst, _regs.eflags);
1562 break;
1563 case 2: /* call (near) */
1564 case 4: /* jmp (near) */
1565 if ( ((op_bytes = dst.bytes) != 8) && mode_64bit() )
1567 dst.bytes = op_bytes = 8;
1568 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1569 &dst.val, 8, ctxt)) != 0 )
1570 goto done;
1572 src.val = _regs.eip;
1573 _regs.eip = dst.val;
1574 if ( (modrm_reg & 7) == 2 )
1575 goto push; /* call */
1576 break;
1577 case 6: /* push */
1578 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1579 if ( mode_64bit() && (dst.bytes == 4) )
1581 dst.bytes = 8;
1582 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1583 &dst.val, 8, ctxt)) != 0 )
1584 goto done;
1586 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
1587 dst.val, dst.bytes, ctxt)) != 0 )
1588 goto done;
1589 dst.type = OP_NONE;
1590 break;
1591 case 7:
1592 generate_exception_if(1, EXC_UD);
1593 default:
1594 goto cannot_emulate;
1596 break;
1599 writeback:
1600 switch ( dst.type )
1602 case OP_REG:
1603 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1604 switch ( dst.bytes )
1606 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1607 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1608 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1609 case 8: *dst.reg = dst.val; break;
1611 break;
1612 case OP_MEM:
1613 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1614 /* nothing to do */;
1615 else if ( lock_prefix )
1616 rc = ops->cmpxchg(
1617 dst.mem.seg, dst.mem.off, dst.orig_val,
1618 dst.val, dst.bytes, ctxt);
1619 else
1620 rc = ops->write(
1621 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1622 if ( rc != 0 )
1623 goto done;
1624 default:
1625 break;
1628 /* Commit shadow register state. */
1629 *ctxt->regs = _regs;
1631 done:
1632 return rc;
1634 special_insn:
1635 dst.type = OP_NONE;
1637 /*
1638 * The only implicit-operands instructions allowed a LOCK prefix are
1639 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1640 */
1641 generate_exception_if(lock_prefix &&
1642 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1643 (b != 0xc7), /* CMPXCHG{8,16}B */
1644 EXC_GP);
1646 if ( twobyte )
1647 goto twobyte_special_insn;
1649 switch ( b )
1651 case 0x27: /* daa */ {
1652 uint8_t al = _regs.eax;
1653 unsigned long eflags = _regs.eflags;
1654 generate_exception_if(mode_64bit(), EXC_UD);
1655 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1656 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1658 *(uint8_t *)&_regs.eax += 6;
1659 _regs.eflags |= EFLG_AF;
1661 if ( (al > 0x99) || (eflags & EFLG_CF) )
1663 *(uint8_t *)&_regs.eax += 0x60;
1664 _regs.eflags |= EFLG_CF;
1666 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1667 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1668 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1669 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1670 break;
1673 case 0x2f: /* das */ {
1674 uint8_t al = _regs.eax;
1675 unsigned long eflags = _regs.eflags;
1676 generate_exception_if(mode_64bit(), EXC_UD);
1677 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1678 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1680 _regs.eflags |= EFLG_AF;
1681 if ( (al < 6) || (eflags & EFLG_CF) )
1682 _regs.eflags |= EFLG_CF;
1683 *(uint8_t *)&_regs.eax -= 6;
1685 if ( (al > 0x99) || (eflags & EFLG_CF) )
1687 *(uint8_t *)&_regs.eax -= 0x60;
1688 _regs.eflags |= EFLG_CF;
1690 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1691 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1692 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1693 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1694 break;
1697 case 0x37: /* aaa */
1698 case 0x3f: /* aas */
1699 generate_exception_if(mode_64bit(), EXC_UD);
1700 _regs.eflags &= ~EFLG_CF;
1701 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1703 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1704 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1705 _regs.eflags |= EFLG_CF | EFLG_AF;
1707 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1708 break;
1710 case 0x40 ... 0x4f: /* inc/dec reg */
1711 dst.type = OP_REG;
1712 dst.reg = decode_register(b & 7, &_regs, 0);
1713 dst.bytes = op_bytes;
1714 dst.val = *dst.reg;
1715 if ( b & 8 )
1716 emulate_1op("dec", dst, _regs.eflags);
1717 else
1718 emulate_1op("inc", dst, _regs.eflags);
1719 break;
1721 case 0x50 ... 0x57: /* push reg */
1722 src.val = *(unsigned long *)decode_register(
1723 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1724 goto push;
1726 case 0x58 ... 0x5f: /* pop reg */
1727 dst.type = OP_REG;
1728 dst.reg = decode_register(
1729 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1730 dst.bytes = op_bytes;
1731 if ( mode_64bit() && (dst.bytes == 4) )
1732 dst.bytes = 8;
1733 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1734 &dst.val, dst.bytes, ctxt)) != 0 )
1735 goto done;
1736 break;
1738 case 0x60: /* pusha */ {
1739 int i;
1740 unsigned long regs[] = {
1741 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1742 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1743 generate_exception_if(mode_64bit(), EXC_UD);
1744 for ( i = 0; i < 8; i++ )
1745 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1746 regs[i], op_bytes, ctxt)) != 0 )
1747 goto done;
1748 break;
1751 case 0x61: /* popa */ {
1752 int i;
1753 unsigned long dummy_esp, *regs[] = {
1754 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
1755 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
1756 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
1757 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
1758 generate_exception_if(mode_64bit(), EXC_UD);
1759 for ( i = 0; i < 8; i++ )
1760 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
1761 regs[i], op_bytes, ctxt)) != 0 )
1762 goto done;
1763 break;
1766 case 0x68: /* push imm{16,32,64} */
1767 src.val = ((op_bytes == 2)
1768 ? (int32_t)insn_fetch_type(int16_t)
1769 : insn_fetch_type(int32_t));
1770 goto push;
1772 case 0x6a: /* push imm8 */
1773 src.val = insn_fetch_type(int8_t);
1774 push:
1775 d |= Mov; /* force writeback */
1776 dst.type = OP_MEM;
1777 dst.bytes = op_bytes;
1778 if ( mode_64bit() && (dst.bytes == 4) )
1779 dst.bytes = 8;
1780 dst.val = src.val;
1781 dst.mem.seg = x86_seg_ss;
1782 dst.mem.off = sp_pre_dec(dst.bytes);
1783 break;
1785 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */
1786 handle_rep_prefix();
1787 generate_exception_if(!mode_iopl(), EXC_GP);
1788 dst.type = OP_MEM;
1789 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1790 dst.mem.seg = x86_seg_es;
1791 dst.mem.off = truncate_ea(_regs.edi);
1792 fail_if(ops->read_io == NULL);
1793 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
1794 &dst.val, ctxt)) != 0 )
1795 goto done;
1796 register_address_increment(
1797 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1798 break;
1800 case 0x6e ... 0x6f: /* outs %esi,%dx */
1801 handle_rep_prefix();
1802 generate_exception_if(!mode_iopl(), EXC_GP);
1803 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
1804 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1805 &dst.val, dst.bytes, ctxt)) != 0 )
1806 goto done;
1807 fail_if(ops->write_io == NULL);
1808 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
1809 dst.val, ctxt)) != 0 )
1810 goto done;
1811 register_address_increment(
1812 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1813 break;
1815 case 0x70 ... 0x7f: /* jcc (short) */ {
1816 int rel = insn_fetch_type(int8_t);
1817 if ( test_cc(b, _regs.eflags) )
1818 jmp_rel(rel);
1819 break;
1822 case 0x90: /* nop / xchg %%r8,%%rax */
1823 if ( !(rex_prefix & 1) )
1824 break; /* nop */
1826 case 0x91 ... 0x97: /* xchg reg,%%rax */
1827 src.type = dst.type = OP_REG;
1828 src.bytes = dst.bytes = op_bytes;
1829 src.reg = (unsigned long *)&_regs.eax;
1830 src.val = *src.reg;
1831 dst.reg = decode_register(
1832 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1833 dst.val = *dst.reg;
1834 goto xchg;
1836 case 0x98: /* cbw/cwde/cdqe */
1837 switch ( op_bytes )
1839 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
1840 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
1841 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
1843 break;
1845 case 0x99: /* cwd/cdq/cqo */
1846 switch ( op_bytes )
1848 case 2:
1849 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
1850 break;
1851 case 4:
1852 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
1853 break;
1854 case 8:
1855 _regs.edx = (_regs.eax < 0) ? -1 : 0;
1856 break;
1858 break;
1860 case 0x9e: /* sahf */
1861 *(uint8_t *)_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
1862 break;
1864 case 0x9f: /* lahf */
1865 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
1866 break;
1868 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
1869 /* Source EA is not encoded via ModRM. */
1870 dst.type = OP_REG;
1871 dst.reg = (unsigned long *)&_regs.eax;
1872 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1873 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
1874 &dst.val, dst.bytes, ctxt)) != 0 )
1875 goto done;
1876 break;
1878 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
1879 /* Destination EA is not encoded via ModRM. */
1880 dst.type = OP_MEM;
1881 dst.mem.seg = ea.mem.seg;
1882 dst.mem.off = insn_fetch_bytes(ad_bytes);
1883 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1884 dst.val = (unsigned long)_regs.eax;
1885 break;
1887 case 0xa4 ... 0xa5: /* movs */
1888 handle_rep_prefix();
1889 dst.type = OP_MEM;
1890 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1891 dst.mem.seg = x86_seg_es;
1892 dst.mem.off = truncate_ea(_regs.edi);
1893 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1894 &dst.val, dst.bytes, ctxt)) != 0 )
1895 goto done;
1896 register_address_increment(
1897 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1898 register_address_increment(
1899 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1900 break;
1902 case 0xaa ... 0xab: /* stos */
1903 handle_rep_prefix();
1904 dst.type = OP_MEM;
1905 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1906 dst.mem.seg = x86_seg_es;
1907 dst.mem.off = truncate_ea(_regs.edi);
1908 dst.val = _regs.eax;
1909 register_address_increment(
1910 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1911 break;
1913 case 0xac ... 0xad: /* lods */
1914 handle_rep_prefix();
1915 dst.type = OP_REG;
1916 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
1917 dst.reg = (unsigned long *)&_regs.eax;
1918 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
1919 &dst.val, dst.bytes, ctxt)) != 0 )
1920 goto done;
1921 register_address_increment(
1922 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
1923 break;
1925 case 0xc2: /* ret imm16 (near) */
1926 case 0xc3: /* ret (near) */ {
1927 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
1928 op_bytes = mode_64bit() ? 8 : op_bytes;
1929 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
1930 &dst.val, op_bytes, ctxt)) != 0 )
1931 goto done;
1932 _regs.eip = dst.val;
1933 break;
1936 case 0xd4: /* aam */ {
1937 unsigned int base = insn_fetch_type(uint8_t);
1938 uint8_t al = _regs.eax;
1939 generate_exception_if(mode_64bit(), EXC_UD);
1940 generate_exception_if(base == 0, EXC_DE);
1941 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
1942 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1943 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1944 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1945 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1946 break;
1949 case 0xd5: /* aad */ {
1950 unsigned int base = insn_fetch_type(uint8_t);
1951 uint16_t ax = _regs.eax;
1952 generate_exception_if(mode_64bit(), EXC_UD);
1953 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
1954 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1955 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1956 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1957 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1958 break;
1961 case 0xd6: /* salc */
1962 generate_exception_if(mode_64bit(), EXC_UD);
1963 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
1964 break;
1966 case 0xd7: /* xlat */ {
1967 unsigned long al = (uint8_t)_regs.eax;
1968 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
1969 &al, 1, ctxt)) != 0 )
1970 goto done;
1971 *(uint8_t *)&_regs.eax = al;
1972 break;
1975 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
1976 int rel = insn_fetch_type(int8_t);
1977 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
1978 if ( b == 0xe1 )
1979 do_jmp = !do_jmp; /* loopz */
1980 else if ( b == 0xe2 )
1981 do_jmp = 1; /* loop */
1982 switch ( ad_bytes )
1984 case 2:
1985 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
1986 break;
1987 case 4:
1988 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
1989 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
1990 break;
1991 default: /* case 8: */
1992 do_jmp &= --_regs.ecx != 0;
1993 break;
1995 if ( do_jmp )
1996 jmp_rel(rel);
1997 break;
2000 case 0xe3: /* jcxz/jecxz (short) */ {
2001 int rel = insn_fetch_type(int8_t);
2002 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2003 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2004 jmp_rel(rel);
2005 break;
2008 case 0xe4: /* in imm8,%al */
2009 case 0xe5: /* in imm8,%eax */
2010 case 0xe6: /* out %al,imm8 */
2011 case 0xe7: /* out %eax,imm8 */
2012 case 0xec: /* in %dx,%al */
2013 case 0xed: /* in %dx,%eax */
2014 case 0xee: /* out %al,%dx */
2015 case 0xef: /* out %eax,%dx */ {
2016 unsigned int port = ((b < 0xe8)
2017 ? insn_fetch_type(uint8_t)
2018 : (uint16_t)_regs.edx);
2019 generate_exception_if(!mode_iopl(), EXC_GP);
2020 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2021 if ( b & 2 )
2023 /* out */
2024 fail_if(ops->write_io == NULL);
2025 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2028 else
2030 /* in */
2031 dst.type = OP_REG;
2032 dst.bytes = op_bytes;
2033 dst.reg = (unsigned long *)&_regs.eax;
2034 fail_if(ops->read_io == NULL);
2035 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2037 if ( rc != 0 )
2038 goto done;
2039 break;
2042 case 0xe8: /* call (near) */ {
2043 int rel = (((op_bytes == 2) && !mode_64bit())
2044 ? (int32_t)insn_fetch_type(int16_t)
2045 : insn_fetch_type(int32_t));
2046 op_bytes = mode_64bit() ? 8 : op_bytes;
2047 src.val = _regs.eip;
2048 jmp_rel(rel);
2049 goto push;
2052 case 0xe9: /* jmp (near) */ {
2053 int rel = (((op_bytes == 2) && !mode_64bit())
2054 ? (int32_t)insn_fetch_type(int16_t)
2055 : insn_fetch_type(int32_t));
2056 jmp_rel(rel);
2057 break;
2060 case 0xeb: /* jmp (short) */
2061 jmp_rel(insn_fetch_type(int8_t));
2062 break;
2064 case 0xf5: /* cmc */
2065 _regs.eflags ^= EFLG_CF;
2066 break;
2068 case 0xf8: /* clc */
2069 _regs.eflags &= ~EFLG_CF;
2070 break;
2072 case 0xf9: /* stc */
2073 _regs.eflags |= EFLG_CF;
2074 break;
2076 case 0xfa: /* cli */
2077 generate_exception_if(!mode_iopl(), EXC_GP);
2078 fail_if(ops->write_rflags == NULL);
2079 if ( (rc = ops->write_rflags(_regs.eflags & ~EFLG_IF, ctxt)) != 0 )
2080 goto done;
2081 break;
2083 case 0xfb: /* sti */
2084 generate_exception_if(!mode_iopl(), EXC_GP);
2085 fail_if(ops->write_rflags == NULL);
2086 if ( (rc = ops->write_rflags(_regs.eflags | EFLG_IF, ctxt)) != 0 )
2087 goto done;
2088 break;
2090 case 0xfc: /* cld */
2091 _regs.eflags &= ~EFLG_DF;
2092 break;
2094 case 0xfd: /* std */
2095 _regs.eflags |= EFLG_DF;
2096 break;
2098 goto writeback;
2100 twobyte_insn:
2101 switch ( b )
2103 case 0x40 ... 0x4f: /* cmovcc */
2104 dst.val = src.val;
2105 if ( !test_cc(b, _regs.eflags) )
2106 dst.type = OP_NONE;
2107 break;
2109 case 0x90 ... 0x9f: /* setcc */
2110 dst.val = test_cc(b, _regs.eflags);
2111 break;
2113 case 0xb0 ... 0xb1: /* cmpxchg */
2114 /* Save real source value, then compare EAX against destination. */
2115 src.orig_val = src.val;
2116 src.val = _regs.eax;
2117 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2118 /* Always write back. The question is: where to? */
2119 d |= Mov;
2120 if ( _regs.eflags & EFLG_ZF )
2122 /* Success: write back to memory. */
2123 dst.val = src.orig_val;
2125 else
2127 /* Failure: write the value we saw to EAX. */
2128 dst.type = OP_REG;
2129 dst.reg = (unsigned long *)&_regs.eax;
2131 break;
2133 case 0xa3: bt: /* bt */
2134 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2135 break;
2137 case 0xb3: btr: /* btr */
2138 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
2139 break;
2141 case 0xab: bts: /* bts */
2142 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
2143 break;
2145 case 0xaf: /* imul */
2146 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
2147 switch ( dst.bytes )
2149 case 2:
2150 dst.val = ((uint32_t)(int16_t)src.val *
2151 (uint32_t)(int16_t)dst.val);
2152 if ( (int16_t)dst.val != (uint32_t)dst.val )
2153 _regs.eflags |= EFLG_OF|EFLG_CF;
2154 break;
2155 #ifdef __x86_64__
2156 case 4:
2157 dst.val = ((uint64_t)(int32_t)src.val *
2158 (uint64_t)(int32_t)dst.val);
2159 if ( (int32_t)dst.val != dst.val )
2160 _regs.eflags |= EFLG_OF|EFLG_CF;
2161 break;
2162 #endif
2163 default: {
2164 unsigned long m[2] = { src.val, dst.val };
2165 if ( imul_dbl(m) )
2166 _regs.eflags |= EFLG_OF|EFLG_CF;
2167 dst.val = m[0];
2168 break;
2171 break;
2173 case 0xb6: /* movzx rm8,r{16,32,64} */
2174 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2175 dst.reg = decode_register(modrm_reg, &_regs, 0);
2176 dst.bytes = op_bytes;
2177 dst.val = (uint8_t)src.val;
2178 break;
2180 case 0xbc: /* bsf */ {
2181 int zf;
2182 asm ( "bsf %2,%0; setz %b1"
2183 : "=r" (dst.val), "=q" (zf)
2184 : "r" (src.val), "1" (0) );
2185 _regs.eflags &= ~EFLG_ZF;
2186 _regs.eflags |= zf ? EFLG_ZF : 0;
2187 break;
2190 case 0xbd: /* bsr */ {
2191 int zf;
2192 asm ( "bsr %2,%0; setz %b1"
2193 : "=r" (dst.val), "=q" (zf)
2194 : "r" (src.val), "1" (0) );
2195 _regs.eflags &= ~EFLG_ZF;
2196 _regs.eflags |= zf ? EFLG_ZF : 0;
2197 break;
2200 case 0xb7: /* movzx rm16,r{16,32,64} */
2201 dst.val = (uint16_t)src.val;
2202 break;
2204 case 0xbb: btc: /* btc */
2205 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
2206 break;
2208 case 0xba: /* Grp8 */
2209 switch ( modrm_reg & 7 )
2211 case 4: goto bt;
2212 case 5: goto bts;
2213 case 6: goto btr;
2214 case 7: goto btc;
2215 default: generate_exception_if(1, EXC_UD);
2217 break;
2219 case 0xbe: /* movsx rm8,r{16,32,64} */
2220 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2221 dst.reg = decode_register(modrm_reg, &_regs, 0);
2222 dst.bytes = op_bytes;
2223 dst.val = (int8_t)src.val;
2224 break;
2226 case 0xbf: /* movsx rm16,r{16,32,64} */
2227 dst.val = (int16_t)src.val;
2228 break;
2230 case 0xc0 ... 0xc1: /* xadd */
2231 /* Write back the register source. */
2232 switch ( dst.bytes )
2234 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2235 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2236 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2237 case 8: *src.reg = dst.val; break;
2239 goto add;
2241 goto writeback;
2243 twobyte_special_insn:
2244 switch ( b )
2246 case 0x06: /* clts */
2247 generate_exception_if(!mode_ring0(), EXC_GP);
2248 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
2249 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
2250 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
2251 goto done;
2252 break;
2254 case 0x08: /* invd */
2255 case 0x09: /* wbinvd */
2256 generate_exception_if(!mode_ring0(), EXC_GP);
2257 fail_if(ops->wbinvd == NULL);
2258 if ( (rc = ops->wbinvd(ctxt)) != 0 )
2259 goto done;
2260 break;
2262 case 0x0d: /* GrpP (prefetch) */
2263 case 0x18: /* Grp16 (prefetch/nop) */
2264 case 0x19 ... 0x1f: /* nop (amd-defined) */
2265 break;
2267 case 0x20: /* mov cr,reg */
2268 case 0x21: /* mov dr,reg */
2269 case 0x22: /* mov reg,cr */
2270 case 0x23: /* mov reg,dr */
2271 generate_exception_if(!mode_ring0(), EXC_GP);
2272 modrm_rm |= (rex_prefix & 1) << 3;
2273 modrm_reg |= lock_prefix << 3;
2274 if ( b & 2 )
2276 /* Write to CR/DR. */
2277 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
2278 if ( !mode_64bit() )
2279 src.val = (uint32_t)src.val;
2280 rc = ((b & 1)
2281 ? (ops->write_dr
2282 ? ops->write_dr(modrm_reg, src.val, ctxt)
2283 : X86EMUL_UNHANDLEABLE)
2284 : (ops->write_cr
2285 ? ops->write_dr(modrm_reg, src.val, ctxt)
2286 : X86EMUL_UNHANDLEABLE));
2288 else
2290 /* Read from CR/DR. */
2291 dst.type = OP_REG;
2292 dst.bytes = mode_64bit() ? 8 : 4;
2293 dst.reg = decode_register(modrm_rm, &_regs, 0);
2294 rc = ((b & 1)
2295 ? (ops->read_dr
2296 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2297 : X86EMUL_UNHANDLEABLE)
2298 : (ops->read_cr
2299 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2300 : X86EMUL_UNHANDLEABLE));
2302 if ( rc != 0 )
2303 goto done;
2304 break;
2306 case 0x30: /* wrmsr */ {
2307 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
2308 generate_exception_if(!mode_ring0(), EXC_GP);
2309 fail_if(ops->write_msr == NULL);
2310 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
2311 goto done;
2312 break;
2315 case 0x32: /* rdmsr */ {
2316 uint64_t val;
2317 generate_exception_if(!mode_ring0(), EXC_GP);
2318 fail_if(ops->read_msr == NULL);
2319 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
2320 goto done;
2321 _regs.edx = (uint32_t)(val >> 32);
2322 _regs.eax = (uint32_t)(val >> 0);
2323 break;
2326 case 0x80 ... 0x8f: /* jcc (near) */ {
2327 int rel = (((op_bytes == 2) && !mode_64bit())
2328 ? (int32_t)insn_fetch_type(int16_t)
2329 : insn_fetch_type(int32_t));
2330 if ( test_cc(b, _regs.eflags) )
2331 jmp_rel(rel);
2332 break;
2335 case 0xc7: /* Grp9 (cmpxchg8b) */
2336 #if defined(__i386__)
2338 unsigned long old_lo, old_hi;
2339 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2340 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
2341 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
2342 goto done;
2343 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
2345 _regs.eax = old_lo;
2346 _regs.edx = old_hi;
2347 _regs.eflags &= ~EFLG_ZF;
2349 else if ( ops->cmpxchg8b == NULL )
2351 rc = X86EMUL_UNHANDLEABLE;
2352 goto done;
2354 else
2356 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
2357 _regs.ebx, _regs.ecx, ctxt)) != 0 )
2358 goto done;
2359 _regs.eflags |= EFLG_ZF;
2361 break;
2363 #elif defined(__x86_64__)
2365 unsigned long old, new;
2366 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2367 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
2368 goto done;
2369 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
2370 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
2372 _regs.eax = (uint32_t)(old>>0);
2373 _regs.edx = (uint32_t)(old>>32);
2374 _regs.eflags &= ~EFLG_ZF;
2376 else
2378 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
2379 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
2380 new, 8, ctxt)) != 0 )
2381 goto done;
2382 _regs.eflags |= EFLG_ZF;
2384 break;
2386 #endif
2388 case 0xc8 ... 0xcf: /* bswap */
2389 dst.type = OP_REG;
2390 dst.reg = decode_register(
2391 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2392 switch ( dst.bytes = op_bytes )
2394 default: /* case 2: */
2395 /* Undefined behaviour. Writes zero on all tested CPUs. */
2396 dst.val = 0;
2397 break;
2398 case 4:
2399 #ifdef __x86_64__
2400 __asm__ ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
2401 break;
2402 case 8:
2403 #endif
2404 __asm__ ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
2405 break;
2407 break;
2409 goto writeback;
2411 cannot_emulate:
2412 #ifdef __XEN__
2413 gdprintk(XENLOG_DEBUG, "Instr:");
2414 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
2416 unsigned long x;
2417 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
2418 printk(" %02x", (uint8_t)x);
2420 printk("\n");
2421 #endif
2422 return X86EMUL_UNHANDLEABLE;