ia64/xen-unstable

view xen/arch/x86/x86_emulate.c @ 10181:f4f4dd936103

Fix x86/64 build after previous changeset.
Signed-off-by: Keir Fraser <keir@xensource.com>
author kaf24@firebug.cl.cam.ac.uk
date Thu May 25 16:00:09 2006 +0100 (2006-05-25)
parents 41de9cd7971b
children 2dc7c2712700
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 */
9 #ifndef __XEN__
10 #include <stdio.h>
11 #include <stdint.h>
12 #include <public/xen.h>
13 #define DPRINTF(_f, _a...) printf( _f , ## _a )
14 #else
15 #include <xen/config.h>
16 #include <xen/types.h>
17 #include <xen/lib.h>
18 #include <xen/mm.h>
19 #include <asm/regs.h>
20 #define DPRINTF DPRINTK
21 #endif
22 #include <asm-x86/x86_emulate.h>
24 /*
25 * Opcode effective-address decode tables.
26 * Note that we only emulate instructions that have at least one memory
27 * operand (excluding implicit stack references). We assume that stack
28 * references and instruction fetches will never occur in special memory
29 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
30 * not be handled.
31 */
33 /* Operand sizes: 8-bit operands or specified/overridden size. */
34 #define ByteOp (1<<0) /* 8-bit operands. */
35 /* Destination operand type. */
36 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
37 #define DstReg (2<<1) /* Register operand. */
38 #define DstMem (3<<1) /* Memory operand. */
39 #define DstMask (3<<1)
40 /* Source operand type. */
41 #define SrcNone (0<<3) /* No source operand. */
42 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
43 #define SrcReg (1<<3) /* Register operand. */
44 #define SrcMem (2<<3) /* Memory operand. */
45 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
46 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
47 #define SrcImm (5<<3) /* Immediate operand. */
48 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
49 #define SrcMask (7<<3)
50 /* Generic ModRM decode. */
51 #define ModRM (1<<6)
52 /* Destination is only written; never read. */
53 #define Mov (1<<7)
55 static uint8_t opcode_table[256] = {
56 /* 0x00 - 0x07 */
57 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
58 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
59 0, 0, 0, 0,
60 /* 0x08 - 0x0F */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 0, 0, 0, 0,
64 /* 0x10 - 0x17 */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 0, 0, 0, 0,
68 /* 0x18 - 0x1F */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 0, 0, 0, 0,
72 /* 0x20 - 0x27 */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 0, 0, 0, 0,
76 /* 0x28 - 0x2F */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 0, 0, 0, 0,
80 /* 0x30 - 0x37 */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 0, 0, 0, 0,
84 /* 0x38 - 0x3F */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 0, 0, 0, 0,
88 /* 0x40 - 0x4F */
89 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
90 /* 0x50 - 0x5F */
91 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
92 /* 0x60 - 0x6F */
93 0, 0, 0, DstReg|SrcMem32|ModRM|Mov /* movsxd (x86/64) */,
94 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
95 /* 0x70 - 0x7F */
96 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
97 /* 0x80 - 0x87 */
98 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
99 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
100 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
101 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
102 /* 0x88 - 0x8F */
103 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
104 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
105 0, 0, 0, DstMem|SrcNone|ModRM|Mov,
106 /* 0x90 - 0x9F */
107 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
108 /* 0xA0 - 0xA7 */
109 ByteOp|DstReg|SrcMem|Mov, DstReg|SrcMem|Mov,
110 ByteOp|DstMem|SrcReg|Mov, DstMem|SrcReg|Mov,
111 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
112 ByteOp|ImplicitOps, ImplicitOps,
113 /* 0xA8 - 0xAF */
114 0, 0, ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
115 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
116 ByteOp|ImplicitOps, ImplicitOps,
117 /* 0xB0 - 0xBF */
118 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
119 /* 0xC0 - 0xC7 */
120 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM, 0, 0,
121 0, 0, ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
122 /* 0xC8 - 0xCF */
123 0, 0, 0, 0, 0, 0, 0, 0,
124 /* 0xD0 - 0xD7 */
125 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
126 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
127 0, 0, 0, 0,
128 /* 0xD8 - 0xDF */
129 0, 0, 0, 0, 0, 0, 0, 0,
130 /* 0xE0 - 0xEF */
131 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
132 /* 0xF0 - 0xF7 */
133 0, 0, 0, 0,
134 0, 0, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
135 /* 0xF8 - 0xFF */
136 0, 0, 0, 0,
137 0, 0, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
138 };
140 static uint8_t twobyte_table[256] = {
141 /* 0x00 - 0x0F */
142 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
143 /* 0x10 - 0x1F */
144 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0, 0, 0, 0, 0, 0,
145 /* 0x20 - 0x2F */
146 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
147 /* 0x30 - 0x3F */
148 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
149 /* 0x40 - 0x47 */
150 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
151 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
152 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
153 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
154 /* 0x48 - 0x4F */
155 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
156 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
157 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
158 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
159 /* 0x50 - 0x5F */
160 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
161 /* 0x60 - 0x6F */
162 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
163 /* 0x70 - 0x7F */
164 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
165 /* 0x80 - 0x8F */
166 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
167 /* 0x90 - 0x9F */
168 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
169 /* 0xA0 - 0xA7 */
170 0, 0, 0, DstMem|SrcReg|ModRM, 0, 0, 0, 0,
171 /* 0xA8 - 0xAF */
172 0, 0, 0, DstMem|SrcReg|ModRM, 0, 0, 0, 0,
173 /* 0xB0 - 0xB7 */
174 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstMem|SrcReg|ModRM,
175 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
176 /* 0xB8 - 0xBF */
177 0, 0, DstMem|SrcImmByte|ModRM, DstMem|SrcReg|ModRM,
178 0, 0, ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
179 /* 0xC0 - 0xCF */
180 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
181 /* 0xD0 - 0xDF */
182 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
183 /* 0xE0 - 0xEF */
184 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0xF0 - 0xFF */
186 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
187 };
189 /* Type, address-of, and value of an instruction's operand. */
190 struct operand {
191 enum { OP_REG, OP_MEM, OP_IMM } type;
192 unsigned int bytes;
193 unsigned long val, orig_val, *ptr;
194 };
196 /* EFLAGS bit definitions. */
197 #define EFLG_OF (1<<11)
198 #define EFLG_DF (1<<10)
199 #define EFLG_SF (1<<7)
200 #define EFLG_ZF (1<<6)
201 #define EFLG_AF (1<<4)
202 #define EFLG_PF (1<<2)
203 #define EFLG_CF (1<<0)
205 /*
206 * Instruction emulation:
207 * Most instructions are emulated directly via a fragment of inline assembly
208 * code. This allows us to save/restore EFLAGS and thus very easily pick up
209 * any modified flags.
210 */
212 #if defined(__x86_64__)
213 #define _LO32 "k" /* force 32-bit operand */
214 #define _STK "%%rsp" /* stack pointer */
215 #elif defined(__i386__)
216 #define _LO32 "" /* force 32-bit operand */
217 #define _STK "%%esp" /* stack pointer */
218 #endif
220 /*
221 * These EFLAGS bits are restored from saved value during emulation, and
222 * any changes are written back to the saved value after emulation.
223 */
224 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
226 /* Before executing instruction: restore necessary bits in EFLAGS. */
227 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
228 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */\
229 "push %"_sav"; " \
230 "movl %"_msk",%"_LO32 _tmp"; " \
231 "andl %"_LO32 _tmp",("_STK"); " \
232 "pushf; " \
233 "notl %"_LO32 _tmp"; " \
234 "andl %"_LO32 _tmp",("_STK"); " \
235 "pop %"_tmp"; " \
236 "orl %"_LO32 _tmp",("_STK"); " \
237 "popf; " \
238 /* _sav &= ~msk; */ \
239 "movl %"_msk",%"_LO32 _tmp"; " \
240 "notl %"_LO32 _tmp"; " \
241 "andl %"_LO32 _tmp",%"_sav"; "
243 /* After executing instruction: write-back necessary bits in EFLAGS. */
244 #define _POST_EFLAGS(_sav, _msk, _tmp) \
245 /* _sav |= EFLAGS & _msk; */ \
246 "pushf; " \
247 "pop %"_tmp"; " \
248 "andl %"_msk",%"_LO32 _tmp"; " \
249 "orl %"_LO32 _tmp",%"_sav"; "
251 /* Raw emulation: instruction has two explicit operands. */
252 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
253 do{ unsigned long _tmp; \
254 switch ( (_dst).bytes ) \
255 { \
256 case 2: \
257 __asm__ __volatile__ ( \
258 _PRE_EFLAGS("0","4","2") \
259 _op"w %"_wx"3,%1; " \
260 _POST_EFLAGS("0","4","2") \
261 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
262 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
263 break; \
264 case 4: \
265 __asm__ __volatile__ ( \
266 _PRE_EFLAGS("0","4","2") \
267 _op"l %"_lx"3,%1; " \
268 _POST_EFLAGS("0","4","2") \
269 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
270 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
271 break; \
272 case 8: \
273 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
274 break; \
275 } \
276 } while (0)
277 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
278 do{ unsigned long _tmp; \
279 switch ( (_dst).bytes ) \
280 { \
281 case 1: \
282 __asm__ __volatile__ ( \
283 _PRE_EFLAGS("0","4","2") \
284 _op"b %"_bx"3,%1; " \
285 _POST_EFLAGS("0","4","2") \
286 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
287 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
288 break; \
289 default: \
290 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
291 break; \
292 } \
293 } while (0)
294 /* Source operand is byte-sized and may be restricted to just %cl. */
295 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
296 __emulate_2op(_op, _src, _dst, _eflags, \
297 "b", "c", "b", "c", "b", "c", "b", "c")
298 /* Source operand is byte, word, long or quad sized. */
299 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
300 __emulate_2op(_op, _src, _dst, _eflags, \
301 "b", "q", "w", "r", _LO32, "r", "", "r")
302 /* Source operand is word, long or quad sized. */
303 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
304 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
305 "w", "r", _LO32, "r", "", "r")
307 /* Instruction has only one explicit operand (no source operand). */
308 #define emulate_1op(_op,_dst,_eflags) \
309 do{ unsigned long _tmp; \
310 switch ( (_dst).bytes ) \
311 { \
312 case 1: \
313 __asm__ __volatile__ ( \
314 _PRE_EFLAGS("0","3","2") \
315 _op"b %1; " \
316 _POST_EFLAGS("0","3","2") \
317 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
318 : "i" (EFLAGS_MASK) ); \
319 break; \
320 case 2: \
321 __asm__ __volatile__ ( \
322 _PRE_EFLAGS("0","3","2") \
323 _op"w %1; " \
324 _POST_EFLAGS("0","3","2") \
325 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
326 : "i" (EFLAGS_MASK) ); \
327 break; \
328 case 4: \
329 __asm__ __volatile__ ( \
330 _PRE_EFLAGS("0","3","2") \
331 _op"l %1; " \
332 _POST_EFLAGS("0","3","2") \
333 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
334 : "i" (EFLAGS_MASK) ); \
335 break; \
336 case 8: \
337 __emulate_1op_8byte(_op, _dst, _eflags); \
338 break; \
339 } \
340 } while (0)
342 /* Emulate an instruction with quadword operands (x86/64 only). */
343 #if defined(__x86_64__)
344 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
345 do{ __asm__ __volatile__ ( \
346 _PRE_EFLAGS("0","4","2") \
347 _op"q %"_qx"3,%1; " \
348 _POST_EFLAGS("0","4","2") \
349 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
350 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
351 } while (0)
352 #define __emulate_1op_8byte(_op, _dst, _eflags) \
353 do{ __asm__ __volatile__ ( \
354 _PRE_EFLAGS("0","3","2") \
355 _op"q %1; " \
356 _POST_EFLAGS("0","3","2") \
357 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
358 : "i" (EFLAGS_MASK) ); \
359 } while (0)
360 #elif defined(__i386__)
361 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
362 #define __emulate_1op_8byte(_op, _dst, _eflags)
363 #endif /* __i386__ */
365 /* Fetch next part of the instruction being emulated. */
366 #define insn_fetch(_type, _size, _eip) \
367 ({ unsigned long _x; \
368 rc = ops->read_std((unsigned long)(_eip), &_x, (_size), ctxt); \
369 if ( rc != 0 ) \
370 goto done; \
371 (_eip) += (_size); \
372 (_type)_x; \
373 })
375 /* Access/update address held in a register, based on addressing mode. */
376 #define register_address(sel, reg) \
377 ((ad_bytes == sizeof(unsigned long)) ? (reg) : \
378 ((mode == X86EMUL_MODE_REAL) ? /* implies ad_bytes == 2 */ \
379 (((unsigned long)(sel) << 4) + ((reg) & 0xffff)) : \
380 ((reg) & ((1UL << (ad_bytes << 3)) - 1))))
381 #define register_address_increment(reg, inc) \
382 do { \
383 if ( ad_bytes == sizeof(unsigned long) ) \
384 (reg) += (inc); \
385 else \
386 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
387 (((reg) + (inc)) & ((1UL << (ad_bytes << 3)) - 1)); \
388 } while (0)
390 void *
391 decode_register(
392 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
393 {
394 void *p;
396 switch ( modrm_reg )
397 {
398 case 0: p = &regs->eax; break;
399 case 1: p = &regs->ecx; break;
400 case 2: p = &regs->edx; break;
401 case 3: p = &regs->ebx; break;
402 case 4: p = (highbyte_regs ?
403 ((unsigned char *)&regs->eax + 1) :
404 (unsigned char *)&regs->esp); break;
405 case 5: p = (highbyte_regs ?
406 ((unsigned char *)&regs->ecx + 1) :
407 (unsigned char *)&regs->ebp); break;
408 case 6: p = (highbyte_regs ?
409 ((unsigned char *)&regs->edx + 1) :
410 (unsigned char *)&regs->esi); break;
411 case 7: p = (highbyte_regs ?
412 ((unsigned char *)&regs->ebx + 1) :
413 (unsigned char *)&regs->edi); break;
414 #if defined(__x86_64__)
415 case 8: p = &regs->r8; break;
416 case 9: p = &regs->r9; break;
417 case 10: p = &regs->r10; break;
418 case 11: p = &regs->r11; break;
419 case 12: p = &regs->r12; break;
420 case 13: p = &regs->r13; break;
421 case 14: p = &regs->r14; break;
422 case 15: p = &regs->r15; break;
423 #endif
424 default: p = NULL; break;
425 }
427 return p;
428 }
430 int
431 x86_emulate_memop(
432 struct x86_emulate_ctxt *ctxt,
433 struct x86_emulate_ops *ops)
434 {
435 uint8_t b, d, sib, twobyte = 0, rex_prefix = 0;
436 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
437 uint16_t *seg = NULL; /* override segment */
438 unsigned int op_bytes, ad_bytes, lock_prefix = 0, rep_prefix = 0, i;
439 int rc = 0;
440 struct operand src, dst;
441 unsigned long cr2 = ctxt->cr2;
442 int mode = ctxt->mode;
444 /* Shadow copy of register state. Committed on successful emulation. */
445 struct cpu_user_regs _regs = *ctxt->regs;
447 switch ( mode )
448 {
449 case X86EMUL_MODE_REAL:
450 case X86EMUL_MODE_PROT16:
451 op_bytes = ad_bytes = 2;
452 break;
453 case X86EMUL_MODE_PROT32:
454 op_bytes = ad_bytes = 4;
455 break;
456 #ifdef __x86_64__
457 case X86EMUL_MODE_PROT64:
458 op_bytes = 4;
459 ad_bytes = 8;
460 break;
461 #endif
462 default:
463 return -1;
464 }
466 /* Legacy prefixes. */
467 for ( i = 0; i < 8; i++ )
468 {
469 switch ( b = insn_fetch(uint8_t, 1, _regs.eip) )
470 {
471 case 0x66: /* operand-size override */
472 op_bytes ^= 6; /* switch between 2/4 bytes */
473 break;
474 case 0x67: /* address-size override */
475 if ( mode == X86EMUL_MODE_PROT64 )
476 ad_bytes ^= 12; /* switch between 4/8 bytes */
477 else
478 ad_bytes ^= 6; /* switch between 2/4 bytes */
479 break;
480 case 0x2e: /* CS override */
481 seg = &_regs.cs;
482 break;
483 case 0x3e: /* DS override */
484 seg = &_regs.ds;
485 break;
486 case 0x26: /* ES override */
487 seg = &_regs.es;
488 break;
489 case 0x64: /* FS override */
490 seg = &_regs.fs;
491 break;
492 case 0x65: /* GS override */
493 seg = &_regs.gs;
494 break;
495 case 0x36: /* SS override */
496 seg = &_regs.ss;
497 break;
498 case 0xf0: /* LOCK */
499 lock_prefix = 1;
500 break;
501 case 0xf3: /* REP/REPE/REPZ */
502 rep_prefix = 1;
503 break;
504 case 0xf2: /* REPNE/REPNZ */
505 break;
506 default:
507 goto done_prefixes;
508 }
509 }
510 done_prefixes:
512 /* Note quite the same as 80386 real mode, but hopefully good enough. */
513 if ( (mode == X86EMUL_MODE_REAL) && (ad_bytes != 2) )
514 goto cannot_emulate;
516 /* REX prefix. */
517 if ( (mode == X86EMUL_MODE_PROT64) && ((b & 0xf0) == 0x40) )
518 {
519 rex_prefix = b;
520 if ( b & 8 )
521 op_bytes = 8; /* REX.W */
522 modrm_reg = (b & 4) << 1; /* REX.R */
523 /* REX.B and REX.X do not need to be decoded. */
524 b = insn_fetch(uint8_t, 1, _regs.eip);
525 }
527 /* Opcode byte(s). */
528 d = opcode_table[b];
529 if ( d == 0 )
530 {
531 /* Two-byte opcode? */
532 if ( b == 0x0f )
533 {
534 twobyte = 1;
535 b = insn_fetch(uint8_t, 1, _regs.eip);
536 d = twobyte_table[b];
537 }
539 /* Unrecognised? */
540 if ( d == 0 )
541 goto cannot_emulate;
542 }
544 /* ModRM and SIB bytes. */
545 if ( d & ModRM )
546 {
547 modrm = insn_fetch(uint8_t, 1, _regs.eip);
548 modrm_mod |= (modrm & 0xc0) >> 6;
549 modrm_reg |= (modrm & 0x38) >> 3;
550 modrm_rm |= (modrm & 0x07);
552 if ( modrm_mod == 3 )
553 {
554 DPRINTF("Cannot parse ModRM.mod == 3.\n");
555 goto cannot_emulate;
556 }
558 if ( ad_bytes == 2 )
559 {
560 /* 16-bit ModR/M decode. */
561 switch ( modrm_mod )
562 {
563 case 0:
564 if ( modrm_rm == 6 )
565 _regs.eip += 2; /* skip disp16 */
566 break;
567 case 1:
568 _regs.eip += 1; /* skip disp8 */
569 break;
570 case 2:
571 _regs.eip += 2; /* skip disp16 */
572 break;
573 }
574 }
575 else
576 {
577 /* 32/64-bit ModR/M decode. */
578 switch ( modrm_mod )
579 {
580 case 0:
581 if ( (modrm_rm == 4) &&
582 (((sib = insn_fetch(uint8_t, 1, _regs.eip)) & 7) == 5) )
583 _regs.eip += 4; /* skip disp32 specified by SIB.base */
584 else if ( modrm_rm == 5 )
585 _regs.eip += 4; /* skip disp32 */
586 break;
587 case 1:
588 if ( modrm_rm == 4 )
589 sib = insn_fetch(uint8_t, 1, _regs.eip);
590 _regs.eip += 1; /* skip disp8 */
591 break;
592 case 2:
593 if ( modrm_rm == 4 )
594 sib = insn_fetch(uint8_t, 1, _regs.eip);
595 _regs.eip += 4; /* skip disp32 */
596 break;
597 }
598 }
599 }
601 /* Decode and fetch the destination operand: register or memory. */
602 switch ( d & DstMask )
603 {
604 case ImplicitOps:
605 /* Special instructions do their own operand decoding. */
606 goto special_insn;
607 case DstReg:
608 dst.type = OP_REG;
609 if ( d & ByteOp )
610 {
611 dst.ptr = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
612 dst.val = *(uint8_t *)dst.ptr;
613 dst.bytes = 1;
614 }
615 else
616 {
617 dst.ptr = decode_register(modrm_reg, &_regs, 0);
618 switch ( (dst.bytes = op_bytes) )
619 {
620 case 2: dst.val = *(uint16_t *)dst.ptr; break;
621 case 4: dst.val = *(uint32_t *)dst.ptr; break;
622 case 8: dst.val = *(uint64_t *)dst.ptr; break;
623 }
624 }
625 break;
626 case DstMem:
627 dst.type = OP_MEM;
628 dst.ptr = (unsigned long *)cr2;
629 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
630 if ( !(d & Mov) && /* optimisation - avoid slow emulated read */
631 ((rc = ops->read_emulated((unsigned long)dst.ptr,
632 &dst.val, dst.bytes, ctxt)) != 0) )
633 goto done;
634 break;
635 }
636 dst.orig_val = dst.val;
638 /* Decode and fetch the source operand: register, memory or immediate. */
639 switch ( d & SrcMask )
640 {
641 case SrcNone:
642 break;
643 case SrcReg:
644 src.type = OP_REG;
645 if ( d & ByteOp )
646 {
647 src.ptr = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
648 src.val = src.orig_val = *(uint8_t *)src.ptr;
649 src.bytes = 1;
650 }
651 else
652 {
653 src.ptr = decode_register(modrm_reg, &_regs, 0);
654 switch ( (src.bytes = op_bytes) )
655 {
656 case 2: src.val = src.orig_val = *(uint16_t *)src.ptr; break;
657 case 4: src.val = src.orig_val = *(uint32_t *)src.ptr; break;
658 case 8: src.val = src.orig_val = *(uint64_t *)src.ptr; break;
659 }
660 }
661 break;
662 case SrcMem16:
663 src.bytes = 2;
664 goto srcmem_common;
665 case SrcMem32:
666 src.bytes = 4;
667 goto srcmem_common;
668 case SrcMem:
669 src.bytes = (d & ByteOp) ? 1 : op_bytes;
670 srcmem_common:
671 src.type = OP_MEM;
672 src.ptr = (unsigned long *)cr2;
673 if ( (rc = ops->read_emulated((unsigned long)src.ptr,
674 &src.val, src.bytes, ctxt)) != 0 )
675 goto done;
676 src.orig_val = src.val;
677 break;
678 case SrcImm:
679 src.type = OP_IMM;
680 src.ptr = (unsigned long *)_regs.eip;
681 src.bytes = (d & ByteOp) ? 1 : op_bytes;
682 if ( src.bytes == 8 ) src.bytes = 4;
683 /* NB. Immediates are sign-extended as necessary. */
684 switch ( src.bytes )
685 {
686 case 1: src.val = insn_fetch(int8_t, 1, _regs.eip); break;
687 case 2: src.val = insn_fetch(int16_t, 2, _regs.eip); break;
688 case 4: src.val = insn_fetch(int32_t, 4, _regs.eip); break;
689 }
690 break;
691 case SrcImmByte:
692 src.type = OP_IMM;
693 src.ptr = (unsigned long *)_regs.eip;
694 src.bytes = 1;
695 src.val = insn_fetch(int8_t, 1, _regs.eip);
696 break;
697 }
699 if ( twobyte )
700 goto twobyte_insn;
702 switch ( b )
703 {
704 case 0x00 ... 0x05: add: /* add */
705 emulate_2op_SrcV("add", src, dst, _regs.eflags);
706 break;
707 case 0x08 ... 0x0d: or: /* or */
708 emulate_2op_SrcV("or", src, dst, _regs.eflags);
709 break;
710 case 0x10 ... 0x15: adc: /* adc */
711 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
712 break;
713 case 0x18 ... 0x1d: sbb: /* sbb */
714 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
715 break;
716 case 0x20 ... 0x25: and: /* and */
717 emulate_2op_SrcV("and", src, dst, _regs.eflags);
718 break;
719 case 0x28 ... 0x2d: sub: /* sub */
720 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
721 break;
722 case 0x30 ... 0x35: xor: /* xor */
723 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
724 break;
725 case 0x38 ... 0x3d: cmp: /* cmp */
726 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
727 break;
728 case 0x63: /* movsxd */
729 if ( mode != X86EMUL_MODE_PROT64 )
730 goto cannot_emulate;
731 dst.val = (int32_t)src.val;
732 break;
733 case 0x80 ... 0x83: /* Grp1 */
734 switch ( modrm_reg )
735 {
736 case 0: goto add;
737 case 1: goto or;
738 case 2: goto adc;
739 case 3: goto sbb;
740 case 4: goto and;
741 case 5: goto sub;
742 case 6: goto xor;
743 case 7: goto cmp;
744 }
745 break;
746 case 0x84 ... 0x85: test: /* test */
747 emulate_2op_SrcV("test", src, dst, _regs.eflags);
748 break;
749 case 0x86 ... 0x87: /* xchg */
750 /* Write back the register source. */
751 switch ( dst.bytes )
752 {
753 case 1: *(uint8_t *)src.ptr = (uint8_t)dst.val; break;
754 case 2: *(uint16_t *)src.ptr = (uint16_t)dst.val; break;
755 case 4: *src.ptr = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
756 case 8: *src.ptr = dst.val; break;
757 }
758 /* Write back the memory destination with implicit LOCK prefix. */
759 dst.val = src.val;
760 lock_prefix = 1;
761 break;
762 case 0xa0 ... 0xa1: /* mov */
763 dst.ptr = (unsigned long *)&_regs.eax;
764 dst.val = src.val;
765 _regs.eip += ad_bytes; /* skip src displacement */
766 break;
767 case 0xa2 ... 0xa3: /* mov */
768 dst.val = (unsigned long)_regs.eax;
769 _regs.eip += ad_bytes; /* skip dst displacement */
770 break;
771 case 0x88 ... 0x8b: /* mov */
772 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
773 dst.val = src.val;
774 break;
775 case 0x8f: /* pop (sole member of Grp1a) */
776 /* 64-bit mode: POP always pops a 64-bit operand. */
777 if ( mode == X86EMUL_MODE_PROT64 )
778 dst.bytes = 8;
779 if ( (rc = ops->read_std(register_address(_regs.ss, _regs.esp),
780 &dst.val, dst.bytes, ctxt)) != 0 )
781 goto done;
782 register_address_increment(_regs.esp, dst.bytes);
783 break;
784 case 0xc0 ... 0xc1: grp2: /* Grp2 */
785 switch ( modrm_reg )
786 {
787 case 0: /* rol */
788 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
789 break;
790 case 1: /* ror */
791 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
792 break;
793 case 2: /* rcl */
794 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
795 break;
796 case 3: /* rcr */
797 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
798 break;
799 case 4: /* sal/shl */
800 case 6: /* sal/shl */
801 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
802 break;
803 case 5: /* shr */
804 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
805 break;
806 case 7: /* sar */
807 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
808 break;
809 }
810 break;
811 case 0xd0 ... 0xd1: /* Grp2 */
812 src.val = 1;
813 goto grp2;
814 case 0xd2 ... 0xd3: /* Grp2 */
815 src.val = _regs.ecx;
816 goto grp2;
817 case 0xf6 ... 0xf7: /* Grp3 */
818 switch ( modrm_reg )
819 {
820 case 0 ... 1: /* test */
821 /* Special case in Grp3: test has an immediate source operand. */
822 src.type = OP_IMM;
823 src.ptr = (unsigned long *)_regs.eip;
824 src.bytes = (d & ByteOp) ? 1 : op_bytes;
825 if ( src.bytes == 8 ) src.bytes = 4;
826 switch ( src.bytes )
827 {
828 case 1: src.val = insn_fetch(int8_t, 1, _regs.eip); break;
829 case 2: src.val = insn_fetch(int16_t, 2, _regs.eip); break;
830 case 4: src.val = insn_fetch(int32_t, 4, _regs.eip); break;
831 }
832 goto test;
833 case 2: /* not */
834 dst.val = ~dst.val;
835 break;
836 case 3: /* neg */
837 emulate_1op("neg", dst, _regs.eflags);
838 break;
839 default:
840 goto cannot_emulate;
841 }
842 break;
843 case 0xfe ... 0xff: /* Grp4/Grp5 */
844 switch ( modrm_reg )
845 {
846 case 0: /* inc */
847 emulate_1op("inc", dst, _regs.eflags);
848 break;
849 case 1: /* dec */
850 emulate_1op("dec", dst, _regs.eflags);
851 break;
852 case 6: /* push */
853 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
854 if ( mode == X86EMUL_MODE_PROT64 )
855 {
856 dst.bytes = 8;
857 if ( (rc = ops->read_std((unsigned long)dst.ptr,
858 &dst.val, 8, ctxt)) != 0 )
859 goto done;
860 }
861 register_address_increment(_regs.esp, -dst.bytes);
862 if ( (rc = ops->write_std(register_address(_regs.ss, _regs.esp),
863 dst.val, dst.bytes, ctxt)) != 0 )
864 goto done;
865 dst.val = dst.orig_val; /* skanky: disable writeback */
866 break;
867 default:
868 goto cannot_emulate;
869 }
870 break;
871 }
873 writeback:
874 if ( (d & Mov) || (dst.orig_val != dst.val) )
875 {
876 switch ( dst.type )
877 {
878 case OP_REG:
879 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
880 switch ( dst.bytes )
881 {
882 case 1: *(uint8_t *)dst.ptr = (uint8_t)dst.val; break;
883 case 2: *(uint16_t *)dst.ptr = (uint16_t)dst.val; break;
884 case 4: *dst.ptr = (uint32_t)dst.val; break; /* 64b: zero-ext */
885 case 8: *dst.ptr = dst.val; break;
886 }
887 break;
888 case OP_MEM:
889 if ( lock_prefix )
890 rc = ops->cmpxchg_emulated(
891 (unsigned long)dst.ptr, dst.orig_val,
892 dst.val, dst.bytes, ctxt);
893 else
894 rc = ops->write_emulated(
895 (unsigned long)dst.ptr, dst.val, dst.bytes, ctxt);
896 if ( rc != 0 )
897 goto done;
898 default:
899 break;
900 }
901 }
903 /* Commit shadow register state. */
904 *ctxt->regs = _regs;
906 done:
907 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
909 special_insn:
910 if ( twobyte )
911 goto twobyte_special_insn;
912 if ( rep_prefix )
913 {
914 if ( _regs.ecx == 0 )
915 {
916 ctxt->regs->eip = _regs.eip;
917 goto done;
918 }
919 _regs.ecx--;
920 _regs.eip = ctxt->regs->eip;
921 }
922 switch ( b )
923 {
924 case 0xa4 ... 0xa5: /* movs */
925 dst.type = OP_MEM;
926 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
927 if ( _regs.error_code & 2 )
928 {
929 /* Write fault: destination is special memory. */
930 dst.ptr = (unsigned long *)cr2;
931 if ( (rc = ops->read_std(register_address(seg ? *seg : _regs.ds,
932 _regs.esi),
933 &dst.val, dst.bytes, ctxt)) != 0 )
934 goto done;
935 }
936 else
937 {
938 /* Read fault: source is special memory. */
939 dst.ptr = (unsigned long *)register_address(_regs.es, _regs.edi);
940 if ( (rc = ops->read_emulated(cr2, &dst.val,
941 dst.bytes, ctxt)) != 0 )
942 goto done;
943 }
944 register_address_increment(
945 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
946 register_address_increment(
947 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
948 break;
949 case 0xa6 ... 0xa7: /* cmps */
950 DPRINTF("Urk! I don't handle CMPS.\n");
951 goto cannot_emulate;
952 case 0xaa ... 0xab: /* stos */
953 dst.type = OP_MEM;
954 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
955 dst.ptr = (unsigned long *)cr2;
956 dst.val = _regs.eax;
957 register_address_increment(
958 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
959 break;
960 case 0xac ... 0xad: /* lods */
961 dst.type = OP_REG;
962 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
963 dst.ptr = (unsigned long *)&_regs.eax;
964 if ( (rc = ops->read_emulated(cr2, &dst.val, dst.bytes, ctxt)) != 0 )
965 goto done;
966 register_address_increment(
967 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
968 break;
969 case 0xae ... 0xaf: /* scas */
970 DPRINTF("Urk! I don't handle SCAS.\n");
971 goto cannot_emulate;
972 }
973 goto writeback;
975 twobyte_insn:
976 switch ( b )
977 {
978 case 0x40 ... 0x4f: /* cmov */
979 dst.val = dst.orig_val = src.val;
980 d &= ~Mov; /* default to no move */
981 /* First, assume we're decoding an even cmov opcode (lsb == 0). */
982 switch ( (b & 15) >> 1 )
983 {
984 case 0: /* cmovo */
985 d |= (_regs.eflags & EFLG_OF) ? Mov : 0;
986 break;
987 case 1: /* cmovb/cmovc/cmovnae */
988 d |= (_regs.eflags & EFLG_CF) ? Mov : 0;
989 break;
990 case 2: /* cmovz/cmove */
991 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
992 break;
993 case 3: /* cmovbe/cmovna */
994 d |= (_regs.eflags & (EFLG_CF|EFLG_ZF)) ? Mov : 0;
995 break;
996 case 4: /* cmovs */
997 d |= (_regs.eflags & EFLG_SF) ? Mov : 0;
998 break;
999 case 5: /* cmovp/cmovpe */
1000 d |= (_regs.eflags & EFLG_PF) ? Mov : 0;
1001 break;
1002 case 7: /* cmovle/cmovng */
1003 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
1004 /* fall through */
1005 case 6: /* cmovl/cmovnge */
1006 d |= (!(_regs.eflags & EFLG_SF) != !(_regs.eflags & EFLG_OF)) ?
1007 Mov : 0;
1008 break;
1010 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1011 d ^= (b & 1) ? Mov : 0;
1012 break;
1013 case 0xb0 ... 0xb1: /* cmpxchg */
1014 /* Save real source value, then compare EAX against destination. */
1015 src.orig_val = src.val;
1016 src.val = _regs.eax;
1017 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1018 /* Always write back. The question is: where to? */
1019 d |= Mov;
1020 if ( _regs.eflags & EFLG_ZF )
1022 /* Success: write back to memory. */
1023 dst.val = src.orig_val;
1025 else
1027 /* Failure: write the value we saw to EAX. */
1028 dst.type = OP_REG;
1029 dst.ptr = (unsigned long *)&_regs.eax;
1031 break;
1032 case 0xa3: bt: /* bt */
1033 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1034 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
1035 break;
1036 case 0xb3: btr: /* btr */
1037 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1038 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
1039 break;
1040 case 0xab: bts: /* bts */
1041 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1042 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
1043 break;
1044 case 0xb6 ... 0xb7: /* movzx */
1045 dst.bytes = op_bytes;
1046 dst.val = (d & ByteOp) ? (uint8_t)src.val : (uint16_t)src.val;
1047 break;
1048 case 0xbb: btc: /* btc */
1049 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
1050 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
1051 break;
1052 case 0xba: /* Grp8 */
1053 switch ( modrm_reg & 3 )
1055 case 0: goto bt;
1056 case 1: goto bts;
1057 case 2: goto btr;
1058 case 3: goto btc;
1060 break;
1061 case 0xbe ... 0xbf: /* movsx */
1062 dst.bytes = op_bytes;
1063 dst.val = (d & ByteOp) ? (int8_t)src.val : (int16_t)src.val;
1064 break;
1066 goto writeback;
1068 twobyte_special_insn:
1069 /* Disable writeback. */
1070 dst.orig_val = dst.val;
1071 switch ( b )
1073 case 0x0d: /* GrpP (prefetch) */
1074 case 0x18: /* Grp16 (prefetch/nop) */
1075 break;
1076 case 0xc7: /* Grp9 (cmpxchg8b) */
1077 #if defined(__i386__)
1079 unsigned long old_lo, old_hi;
1080 if ( ((rc = ops->read_emulated(cr2+0, &old_lo, 4, ctxt)) != 0) ||
1081 ((rc = ops->read_emulated(cr2+4, &old_hi, 4, ctxt)) != 0) )
1082 goto done;
1083 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
1085 _regs.eax = old_lo;
1086 _regs.edx = old_hi;
1087 _regs.eflags &= ~EFLG_ZF;
1089 else if ( ops->cmpxchg8b_emulated == NULL )
1091 rc = X86EMUL_UNHANDLEABLE;
1092 goto done;
1094 else
1096 if ( (rc = ops->cmpxchg8b_emulated(cr2, old_lo, old_hi, _regs.ebx,
1097 _regs.ecx, ctxt)) != 0 )
1098 goto done;
1099 _regs.eflags |= EFLG_ZF;
1101 break;
1103 #elif defined(__x86_64__)
1105 unsigned long old, new;
1106 if ( (rc = ops->read_emulated(cr2, &old, 8, ctxt)) != 0 )
1107 goto done;
1108 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
1109 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
1111 _regs.eax = (uint32_t)(old>>0);
1112 _regs.edx = (uint32_t)(old>>32);
1113 _regs.eflags &= ~EFLG_ZF;
1115 else
1117 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
1118 if ( (rc = ops->cmpxchg_emulated(cr2, old, new, 8, ctxt)) != 0 )
1119 goto done;
1120 _regs.eflags |= EFLG_ZF;
1122 break;
1124 #endif
1126 goto writeback;
1128 cannot_emulate:
1129 DPRINTF("Cannot emulate %02x\n", b);
1130 return -1;
1133 #ifdef __XEN__
1135 #include <asm/mm.h>
1136 #include <asm/uaccess.h>
1138 int
1139 x86_emulate_read_std(
1140 unsigned long addr,
1141 unsigned long *val,
1142 unsigned int bytes,
1143 struct x86_emulate_ctxt *ctxt)
1145 *val = 0;
1146 if ( copy_from_user((void *)val, (void *)addr, bytes) )
1148 propagate_page_fault(addr, 4); /* user mode, read fault */
1149 return X86EMUL_PROPAGATE_FAULT;
1151 return X86EMUL_CONTINUE;
1154 int
1155 x86_emulate_write_std(
1156 unsigned long addr,
1157 unsigned long val,
1158 unsigned int bytes,
1159 struct x86_emulate_ctxt *ctxt)
1161 if ( copy_to_user((void *)addr, (void *)&val, bytes) )
1163 propagate_page_fault(addr, 6); /* user mode, write fault */
1164 return X86EMUL_PROPAGATE_FAULT;
1166 return X86EMUL_CONTINUE;
1169 #endif