ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 11281:d389123fad85

Don't emulate faulting writes to stack. May fix a number of recent hvm
bugs with the new shadow code. A more complete clean-up of the emulation
code will follow.

Signed-off-by: Steven Hand <steven@xensource.com>
author Steven Hand <steven@xensource.com>
date Wed Aug 23 17:25:11 2006 +0100 (2006-08-23)
parents af9809f51f81
children 51a98a6c2c05
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 */
9 #ifndef __XEN__
10 #include <stdio.h>
11 #include <stdint.h>
12 #include <public/xen.h>
13 #define DPRINTF(_f, _a...) printf( _f , ## _a )
14 #else
15 #include <xen/config.h>
16 #include <xen/types.h>
17 #include <xen/lib.h>
18 #include <xen/mm.h>
19 #include <asm/regs.h>
20 #define DPRINTF DPRINTK
21 #endif
22 #include <asm-x86/x86_emulate.h>
24 /*
25 * Opcode effective-address decode tables.
26 * Note that we only emulate instructions that have at least one memory
27 * operand (excluding implicit stack references). We assume that stack
28 * references and instruction fetches will never occur in special memory
29 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
30 * not be handled.
31 */
33 /* Operand sizes: 8-bit operands or specified/overridden size. */
34 #define ByteOp (1<<0) /* 8-bit operands. */
35 /* Destination operand type. */
36 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
37 #define DstReg (2<<1) /* Register operand. */
38 #define DstMem (3<<1) /* Memory operand. */
39 #define DstMask (3<<1)
40 /* Source operand type. */
41 #define SrcNone (0<<3) /* No source operand. */
42 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
43 #define SrcReg (1<<3) /* Register operand. */
44 #define SrcMem (2<<3) /* Memory operand. */
45 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
46 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
47 #define SrcImm (5<<3) /* Immediate operand. */
48 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
49 #define SrcMask (7<<3)
50 /* Generic ModRM decode. */
51 #define ModRM (1<<6)
52 /* Destination is only written; never read. */
53 #define Mov (1<<7)
55 static uint8_t opcode_table[256] = {
56 /* 0x00 - 0x07 */
57 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
58 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
59 0, 0, 0, 0,
60 /* 0x08 - 0x0F */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 0, 0, 0, 0,
64 /* 0x10 - 0x17 */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 0, 0, 0, 0,
68 /* 0x18 - 0x1F */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 0, 0, 0, 0,
72 /* 0x20 - 0x27 */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 0, 0, 0, 0,
76 /* 0x28 - 0x2F */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 0, 0, 0, 0,
80 /* 0x30 - 0x37 */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 0, 0, 0, 0,
84 /* 0x38 - 0x3F */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 0, 0, 0, 0,
88 /* 0x40 - 0x4F */
89 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
90 /* 0x50 - 0x5F */
91 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
92 /* 0x60 - 0x6F */
93 0, 0, 0, DstReg|SrcMem32|ModRM|Mov /* movsxd (x86/64) */,
94 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
95 /* 0x70 - 0x7F */
96 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
97 /* 0x80 - 0x87 */
98 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
99 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
100 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
101 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
102 /* 0x88 - 0x8F */
103 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
104 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
105 0, 0, 0, DstMem|SrcNone|ModRM|Mov,
106 /* 0x90 - 0x9F */
107 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
108 /* 0xA0 - 0xA7 */
109 ByteOp|DstReg|SrcMem|Mov, DstReg|SrcMem|Mov,
110 ByteOp|DstMem|SrcReg|Mov, DstMem|SrcReg|Mov,
111 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
112 ByteOp|ImplicitOps, ImplicitOps,
113 /* 0xA8 - 0xAF */
114 0, 0, ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
115 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
116 ByteOp|ImplicitOps, ImplicitOps,
117 /* 0xB0 - 0xBF */
118 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
119 /* 0xC0 - 0xC7 */
120 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM, 0, 0,
121 0, 0, ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
122 /* 0xC8 - 0xCF */
123 0, 0, 0, 0, 0, 0, 0, 0,
124 /* 0xD0 - 0xD7 */
125 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
126 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
127 0, 0, 0, 0,
128 /* 0xD8 - 0xDF */
129 0, 0, 0, 0, 0, 0, 0, 0,
130 /* 0xE0 - 0xEF */
131 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
132 /* 0xF0 - 0xF7 */
133 0, 0, 0, 0,
134 0, 0, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
135 /* 0xF8 - 0xFF */
136 0, 0, 0, 0,
137 0, 0, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
138 };
140 static uint8_t twobyte_table[256] = {
141 /* 0x00 - 0x0F */
142 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
143 /* 0x10 - 0x1F */
144 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0, 0, 0, 0, 0, 0,
145 /* 0x20 - 0x2F */
146 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
147 /* 0x30 - 0x3F */
148 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
149 /* 0x40 - 0x47 */
150 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
151 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
152 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
153 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
154 /* 0x48 - 0x4F */
155 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
156 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
157 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
158 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
159 /* 0x50 - 0x5F */
160 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
161 /* 0x60 - 0x6F */
162 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
163 /* 0x70 - 0x7F */
164 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
165 /* 0x80 - 0x8F */
166 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
167 /* 0x90 - 0x9F */
168 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
169 /* 0xA0 - 0xA7 */
170 0, 0, 0, DstMem|SrcReg|ModRM, 0, 0, 0, 0,
171 /* 0xA8 - 0xAF */
172 0, 0, 0, DstMem|SrcReg|ModRM, 0, 0, 0, 0,
173 /* 0xB0 - 0xB7 */
174 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstMem|SrcReg|ModRM,
175 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
176 /* 0xB8 - 0xBF */
177 0, 0, DstMem|SrcImmByte|ModRM, DstMem|SrcReg|ModRM,
178 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
179 /* 0xC0 - 0xCF */
180 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
181 /* 0xD0 - 0xDF */
182 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
183 /* 0xE0 - 0xEF */
184 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0xF0 - 0xFF */
186 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
187 };
189 /* Type, address-of, and value of an instruction's operand. */
190 struct operand {
191 enum { OP_REG, OP_MEM, OP_IMM } type;
192 unsigned int bytes;
193 unsigned long val, orig_val, *ptr;
194 };
196 /* EFLAGS bit definitions. */
197 #define EFLG_OF (1<<11)
198 #define EFLG_DF (1<<10)
199 #define EFLG_SF (1<<7)
200 #define EFLG_ZF (1<<6)
201 #define EFLG_AF (1<<4)
202 #define EFLG_PF (1<<2)
203 #define EFLG_CF (1<<0)
205 /*
206 * Instruction emulation:
207 * Most instructions are emulated directly via a fragment of inline assembly
208 * code. This allows us to save/restore EFLAGS and thus very easily pick up
209 * any modified flags.
210 */
212 #if defined(__x86_64__)
213 #define _LO32 "k" /* force 32-bit operand */
214 #define _STK "%%rsp" /* stack pointer */
215 #elif defined(__i386__)
216 #define _LO32 "" /* force 32-bit operand */
217 #define _STK "%%esp" /* stack pointer */
218 #endif
220 /*
221 * These EFLAGS bits are restored from saved value during emulation, and
222 * any changes are written back to the saved value after emulation.
223 */
224 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
226 /* Before executing instruction: restore necessary bits in EFLAGS. */
227 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
228 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */\
229 "push %"_sav"; " \
230 "movl %"_msk",%"_LO32 _tmp"; " \
231 "andl %"_LO32 _tmp",("_STK"); " \
232 "pushf; " \
233 "notl %"_LO32 _tmp"; " \
234 "andl %"_LO32 _tmp",("_STK"); " \
235 "pop %"_tmp"; " \
236 "orl %"_LO32 _tmp",("_STK"); " \
237 "popf; " \
238 /* _sav &= ~msk; */ \
239 "movl %"_msk",%"_LO32 _tmp"; " \
240 "notl %"_LO32 _tmp"; " \
241 "andl %"_LO32 _tmp",%"_sav"; "
243 /* After executing instruction: write-back necessary bits in EFLAGS. */
244 #define _POST_EFLAGS(_sav, _msk, _tmp) \
245 /* _sav |= EFLAGS & _msk; */ \
246 "pushf; " \
247 "pop %"_tmp"; " \
248 "andl %"_msk",%"_LO32 _tmp"; " \
249 "orl %"_LO32 _tmp",%"_sav"; "
251 /* Raw emulation: instruction has two explicit operands. */
252 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
253 do{ unsigned long _tmp; \
254 switch ( (_dst).bytes ) \
255 { \
256 case 2: \
257 __asm__ __volatile__ ( \
258 _PRE_EFLAGS("0","4","2") \
259 _op"w %"_wx"3,%1; " \
260 _POST_EFLAGS("0","4","2") \
261 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
262 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
263 break; \
264 case 4: \
265 __asm__ __volatile__ ( \
266 _PRE_EFLAGS("0","4","2") \
267 _op"l %"_lx"3,%1; " \
268 _POST_EFLAGS("0","4","2") \
269 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
270 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
271 break; \
272 case 8: \
273 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
274 break; \
275 } \
276 } while (0)
277 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
278 do{ unsigned long _tmp; \
279 switch ( (_dst).bytes ) \
280 { \
281 case 1: \
282 __asm__ __volatile__ ( \
283 _PRE_EFLAGS("0","4","2") \
284 _op"b %"_bx"3,%1; " \
285 _POST_EFLAGS("0","4","2") \
286 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
287 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
288 break; \
289 default: \
290 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
291 break; \
292 } \
293 } while (0)
294 /* Source operand is byte-sized and may be restricted to just %cl. */
295 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
296 __emulate_2op(_op, _src, _dst, _eflags, \
297 "b", "c", "b", "c", "b", "c", "b", "c")
298 /* Source operand is byte, word, long or quad sized. */
299 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
300 __emulate_2op(_op, _src, _dst, _eflags, \
301 "b", "q", "w", "r", _LO32, "r", "", "r")
302 /* Source operand is word, long or quad sized. */
303 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
304 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
305 "w", "r", _LO32, "r", "", "r")
307 /* Instruction has only one explicit operand (no source operand). */
308 #define emulate_1op(_op,_dst,_eflags) \
309 do{ unsigned long _tmp; \
310 switch ( (_dst).bytes ) \
311 { \
312 case 1: \
313 __asm__ __volatile__ ( \
314 _PRE_EFLAGS("0","3","2") \
315 _op"b %1; " \
316 _POST_EFLAGS("0","3","2") \
317 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
318 : "i" (EFLAGS_MASK) ); \
319 break; \
320 case 2: \
321 __asm__ __volatile__ ( \
322 _PRE_EFLAGS("0","3","2") \
323 _op"w %1; " \
324 _POST_EFLAGS("0","3","2") \
325 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
326 : "i" (EFLAGS_MASK) ); \
327 break; \
328 case 4: \
329 __asm__ __volatile__ ( \
330 _PRE_EFLAGS("0","3","2") \
331 _op"l %1; " \
332 _POST_EFLAGS("0","3","2") \
333 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
334 : "i" (EFLAGS_MASK) ); \
335 break; \
336 case 8: \
337 __emulate_1op_8byte(_op, _dst, _eflags); \
338 break; \
339 } \
340 } while (0)
342 /* Emulate an instruction with quadword operands (x86/64 only). */
343 #if defined(__x86_64__)
344 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
345 do{ __asm__ __volatile__ ( \
346 _PRE_EFLAGS("0","4","2") \
347 _op"q %"_qx"3,%1; " \
348 _POST_EFLAGS("0","4","2") \
349 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
350 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
351 } while (0)
352 #define __emulate_1op_8byte(_op, _dst, _eflags) \
353 do{ __asm__ __volatile__ ( \
354 _PRE_EFLAGS("0","3","2") \
355 _op"q %1; " \
356 _POST_EFLAGS("0","3","2") \
357 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
358 : "i" (EFLAGS_MASK) ); \
359 } while (0)
360 #elif defined(__i386__)
361 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
362 #define __emulate_1op_8byte(_op, _dst, _eflags)
363 #endif /* __i386__ */
365 /* Fetch next part of the instruction being emulated. */
366 #define insn_fetch(_type, _size, _eip) \
367 ({ unsigned long _x; \
368 rc = ops->read_std((unsigned long)(_eip), &_x, (_size), ctxt); \
369 if ( rc != 0 ) \
370 goto done; \
371 (_eip) += (_size); \
372 (_type)_x; \
373 })
375 /* Access/update address held in a register, based on addressing mode. */
376 #define register_address(sel, reg) \
377 (((mode == X86EMUL_MODE_REAL) ? ((unsigned long)(sel) << 4) : 0) + \
378 ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
379 ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
380 #define register_address_increment(reg, inc) \
381 do { \
382 int _inc = (inc); /* signed type ensures sign extension to long */ \
383 if ( ad_bytes == sizeof(unsigned long) ) \
384 (reg) += _inc; \
385 else \
386 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
387 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
388 } while (0)
390 void *
391 decode_register(
392 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
393 {
394 void *p;
396 switch ( modrm_reg )
397 {
398 case 0: p = &regs->eax; break;
399 case 1: p = &regs->ecx; break;
400 case 2: p = &regs->edx; break;
401 case 3: p = &regs->ebx; break;
402 case 4: p = (highbyte_regs ?
403 ((unsigned char *)&regs->eax + 1) :
404 (unsigned char *)&regs->esp); break;
405 case 5: p = (highbyte_regs ?
406 ((unsigned char *)&regs->ecx + 1) :
407 (unsigned char *)&regs->ebp); break;
408 case 6: p = (highbyte_regs ?
409 ((unsigned char *)&regs->edx + 1) :
410 (unsigned char *)&regs->esi); break;
411 case 7: p = (highbyte_regs ?
412 ((unsigned char *)&regs->ebx + 1) :
413 (unsigned char *)&regs->edi); break;
414 #if defined(__x86_64__)
415 case 8: p = &regs->r8; break;
416 case 9: p = &regs->r9; break;
417 case 10: p = &regs->r10; break;
418 case 11: p = &regs->r11; break;
419 case 12: p = &regs->r12; break;
420 case 13: p = &regs->r13; break;
421 case 14: p = &regs->r14; break;
422 case 15: p = &regs->r15; break;
423 #endif
424 default: p = NULL; break;
425 }
427 return p;
428 }
430 int
431 x86_emulate_memop(
432 struct x86_emulate_ctxt *ctxt,
433 struct x86_emulate_ops *ops)
434 {
435 uint8_t b, d, sib, twobyte = 0, rex_prefix = 0;
436 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
437 uint16_t *seg = NULL; /* override segment */
438 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
439 int rc = 0;
440 struct operand src, dst;
441 unsigned long cr2 = ctxt->cr2;
442 int mode = ctxt->mode;
444 /* Shadow copy of register state. Committed on successful emulation. */
445 struct cpu_user_regs _regs = *ctxt->regs;
447 switch ( mode )
448 {
449 case X86EMUL_MODE_REAL:
450 case X86EMUL_MODE_PROT16:
451 op_bytes = ad_bytes = 2;
452 break;
453 case X86EMUL_MODE_PROT32:
454 op_bytes = ad_bytes = 4;
455 break;
456 #ifdef __x86_64__
457 case X86EMUL_MODE_PROT64:
458 op_bytes = 4;
459 ad_bytes = 8;
460 break;
461 #endif
462 default:
463 return -1;
464 }
466 /* Legacy prefixes. */
467 for ( i = 0; i < 8; i++ )
468 {
469 switch ( b = insn_fetch(uint8_t, 1, _regs.eip) )
470 {
471 case 0x66: /* operand-size override */
472 op_bytes ^= 6; /* switch between 2/4 bytes */
473 break;
474 case 0x67: /* address-size override */
475 if ( mode == X86EMUL_MODE_PROT64 )
476 ad_bytes ^= 12; /* switch between 4/8 bytes */
477 else
478 ad_bytes ^= 6; /* switch between 2/4 bytes */
479 break;
480 case 0x2e: /* CS override */
481 seg = &_regs.cs;
482 break;
483 case 0x3e: /* DS override */
484 seg = &_regs.ds;
485 break;
486 case 0x26: /* ES override */
487 seg = &_regs.es;
488 break;
489 case 0x64: /* FS override */
490 seg = &_regs.fs;
491 break;
492 case 0x65: /* GS override */
493 seg = &_regs.gs;
494 break;
495 case 0x36: /* SS override */
496 seg = &_regs.ss;
497 break;
498 case 0xf0: /* LOCK */
499 lock_prefix = 1;
500 break;
501 case 0xf3: /* REP/REPE/REPZ */
502 rep_prefix = 1;
503 break;
504 case 0xf2: /* REPNE/REPNZ */
505 break;
506 default:
507 goto done_prefixes;
508 }
509 }
510 done_prefixes:
512 /* REX prefix. */
513 if ( (mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40) )
514 {
515 rex_prefix = b;
516 if ( b & 8 )
517 op_bytes = 8; /* REX.W */
518 modrm_reg = (b & 4) << 1; /* REX.R */
519 /* REX.B and REX.X do not need to be decoded. */
520 b = insn_fetch(uint8_t, 1, _regs.eip);
521 }
523 /* Opcode byte(s). */
524 d = opcode_table[b];
525 if ( d == 0 )
526 {
527 /* Two-byte opcode? */
528 if ( b == 0x0f )
529 {
530 twobyte = 1;
531 b = insn_fetch(uint8_t, 1, _regs.eip);
532 d = twobyte_table[b];
533 }
535 /* Unrecognised? */
536 if ( d == 0 )
537 goto cannot_emulate;
538 }
540 /* ModRM and SIB bytes. */
541 if ( d & ModRM )
542 {
543 modrm = insn_fetch(uint8_t, 1, _regs.eip);
544 modrm_mod |= (modrm & 0xc0) >> 6;
545 modrm_reg |= (modrm & 0x38) >> 3;
546 modrm_rm |= (modrm & 0x07);
548 if ( modrm_mod == 3 )
549 {
550 DPRINTF("Cannot parse ModRM.mod == 3.\n");
551 goto cannot_emulate;
552 }
554 if ( ad_bytes == 2 )
555 {
556 /* 16-bit ModR/M decode. */
557 switch ( modrm_mod )
558 {
559 case 0:
560 if ( modrm_rm == 6 )
561 _regs.eip += 2; /* skip disp16 */
562 break;
563 case 1:
564 _regs.eip += 1; /* skip disp8 */
565 break;
566 case 2:
567 _regs.eip += 2; /* skip disp16 */
568 break;
569 }
570 }
571 else
572 {
573 /* 32/64-bit ModR/M decode. */
574 switch ( modrm_mod )
575 {
576 case 0:
577 if ( (modrm_rm == 4) &&
578 (((sib = insn_fetch(uint8_t, 1, _regs.eip)) & 7) == 5) )
579 _regs.eip += 4; /* skip disp32 specified by SIB.base */
580 else if ( modrm_rm == 5 )
581 _regs.eip += 4; /* skip disp32 */
582 break;
583 case 1:
584 if ( modrm_rm == 4 )
585 sib = insn_fetch(uint8_t, 1, _regs.eip);
586 _regs.eip += 1; /* skip disp8 */
587 break;
588 case 2:
589 if ( modrm_rm == 4 )
590 sib = insn_fetch(uint8_t, 1, _regs.eip);
591 _regs.eip += 4; /* skip disp32 */
592 break;
593 }
594 }
595 }
597 /* Decode and fetch the destination operand: register or memory. */
598 switch ( d & DstMask )
599 {
600 case ImplicitOps:
601 /* Special instructions do their own operand decoding. */
602 goto special_insn;
603 case DstReg:
604 dst.type = OP_REG;
605 if ( d & ByteOp )
606 {
607 dst.ptr = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
608 dst.val = *(uint8_t *)dst.ptr;
609 dst.bytes = 1;
610 }
611 else
612 {
613 dst.ptr = decode_register(modrm_reg, &_regs, 0);
614 switch ( (dst.bytes = op_bytes) )
615 {
616 case 2: dst.val = *(uint16_t *)dst.ptr; break;
617 case 4: dst.val = *(uint32_t *)dst.ptr; break;
618 case 8: dst.val = *(uint64_t *)dst.ptr; break;
619 }
620 }
621 break;
622 case DstMem:
623 dst.type = OP_MEM;
624 dst.ptr = (unsigned long *)cr2;
625 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
626 if ( !(d & Mov) && /* optimisation - avoid slow emulated read */
627 ((rc = ops->read_emulated((unsigned long)dst.ptr,
628 &dst.val, dst.bytes, ctxt)) != 0) )
629 goto done;
630 break;
631 }
632 dst.orig_val = dst.val;
634 /* Decode and fetch the source operand: register, memory or immediate. */
635 switch ( d & SrcMask )
636 {
637 case SrcNone:
638 break;
639 case SrcReg:
640 src.type = OP_REG;
641 if ( d & ByteOp )
642 {
643 src.ptr = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
644 src.val = src.orig_val = *(uint8_t *)src.ptr;
645 src.bytes = 1;
646 }
647 else
648 {
649 src.ptr = decode_register(modrm_reg, &_regs, 0);
650 switch ( (src.bytes = op_bytes) )
651 {
652 case 2: src.val = src.orig_val = *(uint16_t *)src.ptr; break;
653 case 4: src.val = src.orig_val = *(uint32_t *)src.ptr; break;
654 case 8: src.val = src.orig_val = *(uint64_t *)src.ptr; break;
655 }
656 }
657 break;
658 case SrcMem16:
659 src.bytes = 2;
660 goto srcmem_common;
661 case SrcMem32:
662 src.bytes = 4;
663 goto srcmem_common;
664 case SrcMem:
665 src.bytes = (d & ByteOp) ? 1 : op_bytes;
666 srcmem_common:
667 src.type = OP_MEM;
668 src.ptr = (unsigned long *)cr2;
669 if ( (rc = ops->read_emulated((unsigned long)src.ptr,
670 &src.val, src.bytes, ctxt)) != 0 )
671 goto done;
672 src.orig_val = src.val;
673 break;
674 case SrcImm:
675 src.type = OP_IMM;
676 src.ptr = (unsigned long *)_regs.eip;
677 src.bytes = (d & ByteOp) ? 1 : op_bytes;
678 if ( src.bytes == 8 ) src.bytes = 4;
679 /* NB. Immediates are sign-extended as necessary. */
680 switch ( src.bytes )
681 {
682 case 1: src.val = insn_fetch(int8_t, 1, _regs.eip); break;
683 case 2: src.val = insn_fetch(int16_t, 2, _regs.eip); break;
684 case 4: src.val = insn_fetch(int32_t, 4, _regs.eip); break;
685 }
686 break;
687 case SrcImmByte:
688 src.type = OP_IMM;
689 src.ptr = (unsigned long *)_regs.eip;
690 src.bytes = 1;
691 src.val = insn_fetch(int8_t, 1, _regs.eip);
692 break;
693 }
695 if ( twobyte )
696 goto twobyte_insn;
698 switch ( b )
699 {
700 case 0x00 ... 0x05: add: /* add */
701 emulate_2op_SrcV("add", src, dst, _regs.eflags);
702 break;
703 case 0x08 ... 0x0d: or: /* or */
704 emulate_2op_SrcV("or", src, dst, _regs.eflags);
705 break;
706 case 0x10 ... 0x15: adc: /* adc */
707 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
708 break;
709 case 0x18 ... 0x1d: sbb: /* sbb */
710 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
711 break;
712 case 0x20 ... 0x25: and: /* and */
713 emulate_2op_SrcV("and", src, dst, _regs.eflags);
714 break;
715 case 0x28 ... 0x2d: sub: /* sub */
716 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
717 break;
718 case 0x30 ... 0x35: xor: /* xor */
719 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
720 break;
721 case 0x38 ... 0x3d: cmp: /* cmp */
722 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
723 break;
724 case 0x63: /* movsxd */
725 if ( mode != X86EMUL_MODE_PROT64 )
726 goto cannot_emulate;
727 dst.val = (int32_t)src.val;
728 break;
729 case 0x80 ... 0x83: /* Grp1 */
730 switch ( modrm_reg )
731 {
732 case 0: goto add;
733 case 1: goto or;
734 case 2: goto adc;
735 case 3: goto sbb;
736 case 4: goto and;
737 case 5: goto sub;
738 case 6: goto xor;
739 case 7: goto cmp;
740 }
741 break;
742 case 0x84 ... 0x85: test: /* test */
743 emulate_2op_SrcV("test", src, dst, _regs.eflags);
744 break;
745 case 0x86 ... 0x87: /* xchg */
746 /* Write back the register source. */
747 switch ( dst.bytes )
748 {
749 case 1: *(uint8_t *)src.ptr = (uint8_t)dst.val; break;
750 case 2: *(uint16_t *)src.ptr = (uint16_t)dst.val; break;
751 case 4: *src.ptr = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
752 case 8: *src.ptr = dst.val; break;
753 }
754 /* Write back the memory destination with implicit LOCK prefix. */
755 dst.val = src.val;
756 lock_prefix = 1;
757 break;
758 case 0xa0 ... 0xa1: /* mov */
759 dst.ptr = (unsigned long *)&_regs.eax;
760 dst.val = src.val;
761 _regs.eip += ad_bytes; /* skip src displacement */
762 break;
763 case 0xa2 ... 0xa3: /* mov */
764 dst.val = (unsigned long)_regs.eax;
765 _regs.eip += ad_bytes; /* skip dst displacement */
766 break;
767 case 0x88 ... 0x8b: /* mov */
768 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
769 dst.val = src.val;
770 break;
771 case 0x8f: /* pop (sole member of Grp1a) */
772 /* 64-bit mode: POP always pops a 64-bit operand. */
773 if ( mode == X86EMUL_MODE_PROT64 )
774 dst.bytes = 8;
775 if ( (rc = ops->read_std(register_address(_regs.ss, _regs.esp),
776 &dst.val, dst.bytes, ctxt)) != 0 )
777 goto done;
778 register_address_increment(_regs.esp, dst.bytes);
779 break;
780 case 0xc0 ... 0xc1: grp2: /* Grp2 */
781 switch ( modrm_reg )
782 {
783 case 0: /* rol */
784 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
785 break;
786 case 1: /* ror */
787 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
788 break;
789 case 2: /* rcl */
790 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
791 break;
792 case 3: /* rcr */
793 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
794 break;
795 case 4: /* sal/shl */
796 case 6: /* sal/shl */
797 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
798 break;
799 case 5: /* shr */
800 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
801 break;
802 case 7: /* sar */
803 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
804 break;
805 }
806 break;
807 case 0xd0 ... 0xd1: /* Grp2 */
808 src.val = 1;
809 goto grp2;
810 case 0xd2 ... 0xd3: /* Grp2 */
811 src.val = _regs.ecx;
812 goto grp2;
813 case 0xf6 ... 0xf7: /* Grp3 */
814 switch ( modrm_reg )
815 {
816 case 0 ... 1: /* test */
817 /* Special case in Grp3: test has an immediate source operand. */
818 src.type = OP_IMM;
819 src.ptr = (unsigned long *)_regs.eip;
820 src.bytes = (d & ByteOp) ? 1 : op_bytes;
821 if ( src.bytes == 8 ) src.bytes = 4;
822 switch ( src.bytes )
823 {
824 case 1: src.val = insn_fetch(int8_t, 1, _regs.eip); break;
825 case 2: src.val = insn_fetch(int16_t, 2, _regs.eip); break;
826 case 4: src.val = insn_fetch(int32_t, 4, _regs.eip); break;
827 }
828 goto test;
829 case 2: /* not */
830 dst.val = ~dst.val;
831 break;
832 case 3: /* neg */
833 emulate_1op("neg", dst, _regs.eflags);
834 break;
835 default:
836 goto cannot_emulate;
837 }
838 break;
839 case 0xfe ... 0xff: /* Grp4/Grp5 */
840 switch ( modrm_reg )
841 {
842 case 0: /* inc */
843 emulate_1op("inc", dst, _regs.eflags);
844 break;
845 case 1: /* dec */
846 emulate_1op("dec", dst, _regs.eflags);
847 break;
848 case 6: /* push */
849 /* Don't emulate if fault was on stack */
850 if ( _regs.error_code & 2 )
851 goto cannot_emulate;
852 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
853 if ( mode == X86EMUL_MODE_PROT64 )
854 {
855 dst.bytes = 8;
856 if ( (rc = ops->read_std((unsigned long)dst.ptr,
857 &dst.val, 8, ctxt)) != 0 )
858 goto done;
859 }
860 register_address_increment(_regs.esp, -dst.bytes);
861 if ( (rc = ops->write_std(register_address(_regs.ss, _regs.esp),
862 dst.val, dst.bytes, ctxt)) != 0 )
863 goto done;
864 dst.val = dst.orig_val; /* skanky: disable writeback */
865 break;
866 default:
867 goto cannot_emulate;
868 }
869 break;
870 }
872 writeback:
873 if ( (d & Mov) || (dst.orig_val != dst.val) )
874 {
875 switch ( dst.type )
876 {
877 case OP_REG:
878 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
879 switch ( dst.bytes )
880 {
881 case 1: *(uint8_t *)dst.ptr = (uint8_t)dst.val; break;
882 case 2: *(uint16_t *)dst.ptr = (uint16_t)dst.val; break;
883 case 4: *dst.ptr = (uint32_t)dst.val; break; /* 64b: zero-ext */
884 case 8: *dst.ptr = dst.val; break;
885 }
886 break;
887 case OP_MEM:
888 if ( lock_prefix )
889 rc = ops->cmpxchg_emulated(
890 (unsigned long)dst.ptr, dst.orig_val,
891 dst.val, dst.bytes, ctxt);
892 else
893 rc = ops->write_emulated(
894 (unsigned long)dst.ptr, dst.val, dst.bytes, ctxt);
895 if ( rc != 0 )
896 goto done;
897 default:
898 break;
899 }
900 }
902 /* Commit shadow register state. */
903 *ctxt->regs = _regs;
905 done:
906 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
908 special_insn:
909 if ( twobyte )
910 goto twobyte_special_insn;
911 if ( rep_prefix )
912 {
913 if ( _regs.ecx == 0 )
914 {
915 ctxt->regs->eip = _regs.eip;
916 goto done;
917 }
918 _regs.ecx--;
919 _regs.eip = ctxt->regs->eip;
920 }
921 switch ( b )
922 {
923 case 0xa4 ... 0xa5: /* movs */
924 dst.type = OP_MEM;
925 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
926 if ( _regs.error_code & 2 )
927 {
928 /* Write fault: destination is special memory. */
929 dst.ptr = (unsigned long *)cr2;
930 if ( (rc = ops->read_std(register_address(seg ? *seg : _regs.ds,
931 _regs.esi),
932 &dst.val, dst.bytes, ctxt)) != 0 )
933 goto done;
934 }
935 else
936 {
937 /* Read fault: source is special memory. */
938 dst.ptr = (unsigned long *)register_address(_regs.es, _regs.edi);
939 if ( (rc = ops->read_emulated(cr2, &dst.val,
940 dst.bytes, ctxt)) != 0 )
941 goto done;
942 }
943 register_address_increment(
944 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
945 register_address_increment(
946 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
947 break;
948 case 0xa6 ... 0xa7: /* cmps */
949 DPRINTF("Urk! I don't handle CMPS.\n");
950 goto cannot_emulate;
951 case 0xaa ... 0xab: /* stos */
952 dst.type = OP_MEM;
953 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
954 dst.ptr = (unsigned long *)cr2;
955 dst.val = _regs.eax;
956 register_address_increment(
957 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
958 break;
959 case 0xac ... 0xad: /* lods */
960 dst.type = OP_REG;
961 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
962 dst.ptr = (unsigned long *)&_regs.eax;
963 if ( (rc = ops->read_emulated(cr2, &dst.val, dst.bytes, ctxt)) != 0 )
964 goto done;
965 register_address_increment(
966 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
967 break;
968 case 0xae ... 0xaf: /* scas */
969 DPRINTF("Urk! I don't handle SCAS.\n");
970 goto cannot_emulate;
971 }
972 goto writeback;
974 twobyte_insn:
975 switch ( b )
976 {
977 case 0x40 ... 0x4f: /* cmov */
978 dst.val = dst.orig_val = src.val;
979 d &= ~Mov; /* default to no move */
980 /* First, assume we're decoding an even cmov opcode (lsb == 0). */
981 switch ( (b & 15) >> 1 )
982 {
983 case 0: /* cmovo */
984 d |= (_regs.eflags & EFLG_OF) ? Mov : 0;
985 break;
986 case 1: /* cmovb/cmovc/cmovnae */
987 d |= (_regs.eflags & EFLG_CF) ? Mov : 0;
988 break;
989 case 2: /* cmovz/cmove */
990 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
991 break;
992 case 3: /* cmovbe/cmovna */
993 d |= (_regs.eflags & (EFLG_CF|EFLG_ZF)) ? Mov : 0;
994 break;
995 case 4: /* cmovs */
996 d |= (_regs.eflags & EFLG_SF) ? Mov : 0;
997 break;
998 case 5: /* cmovp/cmovpe */
999 d |= (_regs.eflags & EFLG_PF) ? Mov : 0;
1000 break;
1001 case 7: /* cmovle/cmovng */
1002 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
1003 /* fall through */
1004 case 6: /* cmovl/cmovnge */
1005 d |= (!(_regs.eflags & EFLG_SF) != !(_regs.eflags & EFLG_OF)) ?
1006 Mov : 0;
1007 break;
1009 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1010 d ^= (b & 1) ? Mov : 0;
1011 break;
1012 case 0xb0 ... 0xb1: /* cmpxchg */
1013 /* Save real source value, then compare EAX against destination. */
1014 src.orig_val = src.val;
1015 src.val = _regs.eax;
1016 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1017 /* Always write back. The question is: where to? */
1018 d |= Mov;
1019 if ( _regs.eflags & EFLG_ZF )
1021 /* Success: write back to memory. */
1022 dst.val = src.orig_val;
1024 else
1026 /* Failure: write the value we saw to EAX. */
1027 dst.type = OP_REG;
1028 dst.ptr = (unsigned long *)&_regs.eax;
1030 break;
1031 case 0xa3: bt: /* bt */
1032 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1033 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
1034 break;
1035 case 0xb3: btr: /* btr */
1036 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1037 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
1038 break;
1039 case 0xab: bts: /* bts */
1040 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1041 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
1042 break;
1043 case 0xb6 ... 0xb7: /* movzx */
1044 dst.bytes = op_bytes;
1045 dst.val = (d & ByteOp) ? (uint8_t)src.val : (uint16_t)src.val;
1046 break;
1047 case 0xbb: btc: /* btc */
1048 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1049 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
1050 break;
1051 case 0xba: /* Grp8 */
1052 switch ( modrm_reg & 3 )
1054 case 0: goto bt;
1055 case 1: goto bts;
1056 case 2: goto btr;
1057 case 3: goto btc;
1059 break;
1060 case 0xbe ... 0xbf: /* movsx */
1061 dst.bytes = op_bytes;
1062 dst.val = (d & ByteOp) ? (int8_t)src.val : (int16_t)src.val;
1063 break;
1065 goto writeback;
1067 twobyte_special_insn:
1068 /* Disable writeback. */
1069 dst.orig_val = dst.val;
1070 switch ( b )
1072 case 0x0d: /* GrpP (prefetch) */
1073 case 0x18: /* Grp16 (prefetch/nop) */
1074 break;
1075 case 0xc7: /* Grp9 (cmpxchg8b) */
1076 #if defined(__i386__)
1078 unsigned long old_lo, old_hi;
1079 if ( ((rc = ops->read_emulated(cr2+0, &old_lo, 4, ctxt)) != 0) ||
1080 ((rc = ops->read_emulated(cr2+4, &old_hi, 4, ctxt)) != 0) )
1081 goto done;
1082 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
1084 _regs.eax = old_lo;
1085 _regs.edx = old_hi;
1086 _regs.eflags &= ~EFLG_ZF;
1088 else if ( ops->cmpxchg8b_emulated == NULL )
1090 rc = X86EMUL_UNHANDLEABLE;
1091 goto done;
1093 else
1095 if ( (rc = ops->cmpxchg8b_emulated(cr2, old_lo, old_hi, _regs.ebx,
1096 _regs.ecx, ctxt)) != 0 )
1097 goto done;
1098 _regs.eflags |= EFLG_ZF;
1100 break;
1102 #elif defined(__x86_64__)
1104 unsigned long old, new;
1105 if ( (rc = ops->read_emulated(cr2, &old, 8, ctxt)) != 0 )
1106 goto done;
1107 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
1108 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
1110 _regs.eax = (uint32_t)(old>>0);
1111 _regs.edx = (uint32_t)(old>>32);
1112 _regs.eflags &= ~EFLG_ZF;
1114 else
1116 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
1117 if ( (rc = ops->cmpxchg_emulated(cr2, old, new, 8, ctxt)) != 0 )
1118 goto done;
1119 _regs.eflags |= EFLG_ZF;
1121 break;
1123 #endif
1125 goto writeback;
1127 cannot_emulate:
1128 DPRINTF("Cannot emulate %02x\n", b);
1129 return -1;
1132 #ifdef __XEN__
1134 #include <asm/mm.h>
1135 #include <asm/uaccess.h>
1137 int
1138 x86_emulate_read_std(
1139 unsigned long addr,
1140 unsigned long *val,
1141 unsigned int bytes,
1142 struct x86_emulate_ctxt *ctxt)
1144 unsigned int rc;
1146 *val = 0;
1148 if ( (rc = copy_from_user((void *)val, (void *)addr, bytes)) != 0 )
1150 propagate_page_fault(addr + bytes - rc, 0); /* read fault */
1151 return X86EMUL_PROPAGATE_FAULT;
1154 return X86EMUL_CONTINUE;
1157 int
1158 x86_emulate_write_std(
1159 unsigned long addr,
1160 unsigned long val,
1161 unsigned int bytes,
1162 struct x86_emulate_ctxt *ctxt)
1164 unsigned int rc;
1166 if ( (rc = copy_to_user((void *)addr, (void *)&val, bytes)) != 0 )
1168 propagate_page_fault(addr + bytes - rc, PGERR_write_access);
1169 return X86EMUL_PROPAGATE_FAULT;
1172 return X86EMUL_CONTINUE;
1175 #endif