xen-vtx-unstable

view xen/arch/x86/x86_emulate.c @ 6774:4d899a738d59

merge?
author cl349@firebug.cl.cam.ac.uk
date Tue Sep 13 15:05:49 2005 +0000 (2005-09-13)
parents 65b28c74cec2
children
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 */
9 #ifdef __TEST_HARNESS__
10 #include <stdio.h>
11 #include <stdint.h>
12 typedef uint8_t u8;
13 typedef uint16_t u16;
14 typedef uint32_t u32;
15 typedef uint64_t u64;
16 typedef int8_t s8;
17 typedef int16_t s16;
18 typedef int32_t s32;
19 typedef int64_t s64;
20 #include <public/xen.h>
21 #define DPRINTF(_f, _a...) printf( _f , ## _a )
22 #else
23 #include <xen/config.h>
24 #include <xen/types.h>
25 #include <xen/lib.h>
26 #include <xen/mm.h>
27 #include <asm/regs.h>
28 #define DPRINTF DPRINTK
29 #endif
30 #include <asm-x86/x86_emulate.h>
32 /*
33 * Opcode effective-address decode tables.
34 * Note that we only emulate instructions that have at least one memory
35 * operand (excluding implicit stack references). We assume that stack
36 * references and instruction fetches will never occur in special memory
37 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
38 * not be handled.
39 */
41 /* Operand sizes: 8-bit operands or specified/overridden size. */
42 #define ByteOp (1<<0) /* 8-bit operands. */
43 /* Destination operand type. */
44 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
45 #define DstReg (2<<1) /* Register operand. */
46 #define DstMem (3<<1) /* Memory operand. */
47 #define DstMask (3<<1)
48 /* Source operand type. */
49 #define SrcNone (0<<3) /* No source operand. */
50 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
51 #define SrcReg (1<<3) /* Register operand. */
52 #define SrcMem (2<<3) /* Memory operand. */
53 #define SrcImm (3<<3) /* Immediate operand. */
54 #define SrcImmByte (4<<3) /* 8-bit sign-extended immediate operand. */
55 #define SrcMask (7<<3)
56 /* Generic ModRM decode. */
57 #define ModRM (1<<6)
58 /* Destination is only written; never read. */
59 #define Mov (1<<7)
61 static u8 opcode_table[256] = {
62 /* 0x00 - 0x07 */
63 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
64 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
65 0, 0, 0, 0,
66 /* 0x08 - 0x0F */
67 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
68 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
69 0, 0, 0, 0,
70 /* 0x10 - 0x17 */
71 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
72 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
73 0, 0, 0, 0,
74 /* 0x18 - 0x1F */
75 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
76 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
77 0, 0, 0, 0,
78 /* 0x20 - 0x27 */
79 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
80 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
81 0, 0, 0, 0,
82 /* 0x28 - 0x2F */
83 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
84 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
85 0, 0, 0, 0,
86 /* 0x30 - 0x37 */
87 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
88 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
89 0, 0, 0, 0,
90 /* 0x38 - 0x3F */
91 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
92 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
93 0, 0, 0, 0,
94 /* 0x40 - 0x4F */
95 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
96 /* 0x50 - 0x5F */
97 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
98 /* 0x60 - 0x6F */
99 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
100 /* 0x70 - 0x7F */
101 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 /* 0x80 - 0x87 */
103 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
104 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
105 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
106 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
107 /* 0x88 - 0x8F */
108 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
109 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
110 0, 0, 0, DstMem|SrcNone|ModRM|Mov,
111 /* 0x90 - 0x9F */
112 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
113 /* 0xA0 - 0xA7 */
114 ByteOp|DstReg|SrcMem|Mov, DstReg|SrcMem|Mov,
115 ByteOp|DstMem|SrcReg|Mov, DstMem|SrcReg|Mov,
116 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
117 ByteOp|ImplicitOps, ImplicitOps,
118 /* 0xA8 - 0xAF */
119 0, 0, ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
120 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
121 ByteOp|ImplicitOps, ImplicitOps,
122 /* 0xB0 - 0xBF */
123 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
124 /* 0xC0 - 0xC7 */
125 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM, 0, 0,
126 0, 0, ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
127 /* 0xC8 - 0xCF */
128 0, 0, 0, 0, 0, 0, 0, 0,
129 /* 0xD0 - 0xD7 */
130 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
131 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
132 0, 0, 0, 0,
133 /* 0xD8 - 0xDF */
134 0, 0, 0, 0, 0, 0, 0, 0,
135 /* 0xE0 - 0xEF */
136 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
137 /* 0xF0 - 0xF7 */
138 0, 0, 0, 0,
139 0, 0, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
140 /* 0xF8 - 0xFF */
141 0, 0, 0, 0,
142 0, 0, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
143 };
145 static u8 twobyte_table[256] = {
146 /* 0x00 - 0x0F */
147 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
148 /* 0x10 - 0x1F */
149 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0, 0, 0, 0, 0, 0,
150 /* 0x20 - 0x2F */
151 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
152 /* 0x30 - 0x3F */
153 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
154 /* 0x40 - 0x47 */
155 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
156 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
157 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
158 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
159 /* 0x48 - 0x4F */
160 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
161 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
162 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
163 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
164 /* 0x50 - 0x5F */
165 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
166 /* 0x60 - 0x6F */
167 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
168 /* 0x70 - 0x7F */
169 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
170 /* 0x80 - 0x8F */
171 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
172 /* 0x90 - 0x9F */
173 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
174 /* 0xA0 - 0xA7 */
175 0, 0, 0, DstMem|SrcReg|ModRM, 0, 0, 0, 0,
176 /* 0xA8 - 0xAF */
177 0, 0, 0, DstMem|SrcReg|ModRM, 0, 0, 0, 0,
178 /* 0xB0 - 0xB7 */
179 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstMem|SrcReg|ModRM,
180 0, 0, 0, 0,
181 /* 0xB8 - 0xBF */
182 0, 0, DstMem|SrcImmByte|ModRM, DstMem|SrcReg|ModRM, 0, 0, 0, 0,
183 /* 0xC0 - 0xCF */
184 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM, 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0xD0 - 0xDF */
186 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
187 /* 0xE0 - 0xEF */
188 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
189 /* 0xF0 - 0xFF */
190 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
191 };
193 /* Type, address-of, and value of an instruction's operand. */
194 struct operand {
195 enum { OP_REG, OP_MEM, OP_IMM } type;
196 unsigned int bytes;
197 unsigned long val, orig_val, *ptr;
198 };
200 /* EFLAGS bit definitions. */
201 #define EFLG_OF (1<<11)
202 #define EFLG_DF (1<<10)
203 #define EFLG_SF (1<<7)
204 #define EFLG_ZF (1<<6)
205 #define EFLG_AF (1<<4)
206 #define EFLG_PF (1<<2)
207 #define EFLG_CF (1<<0)
209 /*
210 * Instruction emulation:
211 * Most instructions are emulated directly via a fragment of inline assembly
212 * code. This allows us to save/restore EFLAGS and thus very easily pick up
213 * any modified flags.
214 */
216 #if defined(__x86_64__)
217 #define _LO32 "k" /* force 32-bit operand */
218 #define _STK "%%rsp" /* stack pointer */
219 #elif defined(__i386__)
220 #define _LO32 "" /* force 32-bit operand */
221 #define _STK "%%esp" /* stack pointer */
222 #endif
224 /*
225 * These EFLAGS bits are restored from saved value during emulation, and
226 * any changes are written back to the saved value after emulation.
227 */
228 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
230 /* Before executing instruction: restore necessary bits in EFLAGS. */
231 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
232 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */\
233 "push %"_sav"; " \
234 "movl %"_msk",%"_LO32 _tmp"; " \
235 "andl %"_LO32 _tmp",("_STK"); " \
236 "pushf; " \
237 "notl %"_LO32 _tmp"; " \
238 "andl %"_LO32 _tmp",("_STK"); " \
239 "pop %"_tmp"; " \
240 "orl %"_LO32 _tmp",("_STK"); " \
241 "popf; " \
242 /* _sav &= ~msk; */ \
243 "movl %"_msk",%"_LO32 _tmp"; " \
244 "notl %"_LO32 _tmp"; " \
245 "andl %"_LO32 _tmp",%"_sav"; "
247 /* After executing instruction: write-back necessary bits in EFLAGS. */
248 #define _POST_EFLAGS(_sav, _msk, _tmp) \
249 /* _sav |= EFLAGS & _msk; */ \
250 "pushf; " \
251 "pop %"_tmp"; " \
252 "andl %"_msk",%"_LO32 _tmp"; " \
253 "orl %"_LO32 _tmp",%"_sav"; "
255 /* Raw emulation: instruction has two explicit operands. */
256 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
257 do{ unsigned long _tmp; \
258 switch ( (_dst).bytes ) \
259 { \
260 case 2: \
261 __asm__ __volatile__ ( \
262 _PRE_EFLAGS("0","4","2") \
263 _op"w %"_wx"3,%1; " \
264 _POST_EFLAGS("0","4","2") \
265 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
266 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
267 break; \
268 case 4: \
269 __asm__ __volatile__ ( \
270 _PRE_EFLAGS("0","4","2") \
271 _op"l %"_lx"3,%1; " \
272 _POST_EFLAGS("0","4","2") \
273 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
274 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
275 break; \
276 case 8: \
277 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
278 break; \
279 } \
280 } while (0)
281 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
282 do{ unsigned long _tmp; \
283 switch ( (_dst).bytes ) \
284 { \
285 case 1: \
286 __asm__ __volatile__ ( \
287 _PRE_EFLAGS("0","4","2") \
288 _op"b %"_bx"3,%1; " \
289 _POST_EFLAGS("0","4","2") \
290 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
291 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
292 break; \
293 default: \
294 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
295 break; \
296 } \
297 } while (0)
298 /* Source operand is byte-sized and may be restricted to just %cl. */
299 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
300 __emulate_2op(_op, _src, _dst, _eflags, \
301 "b", "c", "b", "c", "b", "c", "b", "c")
302 /* Source operand is byte, word, long or quad sized. */
303 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
304 __emulate_2op(_op, _src, _dst, _eflags, \
305 "b", "q", "w", "r", _LO32, "r", "", "r")
306 /* Source operand is word, long or quad sized. */
307 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
308 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
309 "w", "r", _LO32, "r", "", "r")
311 /* Instruction has only one explicit operand (no source operand). */
312 #define emulate_1op(_op,_dst,_eflags) \
313 do{ unsigned long _tmp; \
314 switch ( (_dst).bytes ) \
315 { \
316 case 1: \
317 __asm__ __volatile__ ( \
318 _PRE_EFLAGS("0","3","2") \
319 _op"b %1; " \
320 _POST_EFLAGS("0","3","2") \
321 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
322 : "i" (EFLAGS_MASK) ); \
323 break; \
324 case 2: \
325 __asm__ __volatile__ ( \
326 _PRE_EFLAGS("0","3","2") \
327 _op"w %1; " \
328 _POST_EFLAGS("0","3","2") \
329 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
330 : "i" (EFLAGS_MASK) ); \
331 break; \
332 case 4: \
333 __asm__ __volatile__ ( \
334 _PRE_EFLAGS("0","3","2") \
335 _op"l %1; " \
336 _POST_EFLAGS("0","3","2") \
337 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
338 : "i" (EFLAGS_MASK) ); \
339 break; \
340 case 8: \
341 __emulate_1op_8byte(_op, _dst, _eflags); \
342 break; \
343 } \
344 } while (0)
346 /* Emulate an instruction with quadword operands (x86/64 only). */
347 #if defined(__x86_64__)
348 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
349 do{ __asm__ __volatile__ ( \
350 _PRE_EFLAGS("0","4","2") \
351 _op"q %"_qx"3,%1; " \
352 _POST_EFLAGS("0","4","2") \
353 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
354 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
355 } while (0)
356 #define __emulate_1op_8byte(_op, _dst, _eflags) \
357 do{ __asm__ __volatile__ ( \
358 _PRE_EFLAGS("0","3","2") \
359 _op"q %1; " \
360 _POST_EFLAGS("0","3","2") \
361 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
362 : "i" (EFLAGS_MASK) ); \
363 } while (0)
364 #elif defined(__i386__)
365 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
366 #define __emulate_1op_8byte(_op, _dst, _eflags)
367 #endif /* __i386__ */
369 /* Fetch next part of the instruction being emulated. */
370 #define insn_fetch(_type, _size, _eip) \
371 ({ unsigned long _x; \
372 if ( (rc = ops->read_std((unsigned long)(_eip), &_x, (_size))) != 0 ) \
373 goto done; \
374 (_eip) += (_size); \
375 (_type)_x; \
376 })
378 void *
379 decode_register(
380 u8 modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
381 {
382 void *p;
384 switch ( modrm_reg )
385 {
386 case 0: p = &regs->eax; break;
387 case 1: p = &regs->ecx; break;
388 case 2: p = &regs->edx; break;
389 case 3: p = &regs->ebx; break;
390 case 4: p = (highbyte_regs ?
391 ((unsigned char *)&regs->eax + 1) :
392 (unsigned char *)&regs->esp); break;
393 case 5: p = (highbyte_regs ?
394 ((unsigned char *)&regs->ecx + 1) :
395 (unsigned char *)&regs->ebp); break;
396 case 6: p = (highbyte_regs ?
397 ((unsigned char *)&regs->edx + 1) :
398 (unsigned char *)&regs->esi); break;
399 case 7: p = (highbyte_regs ?
400 ((unsigned char *)&regs->ebx + 1) :
401 (unsigned char *)&regs->edi); break;
402 #if defined(__x86_64__)
403 case 8: p = &regs->r8; break;
404 case 9: p = &regs->r9; break;
405 case 10: p = &regs->r10; break;
406 case 11: p = &regs->r11; break;
407 case 12: p = &regs->r12; break;
408 case 13: p = &regs->r13; break;
409 case 14: p = &regs->r14; break;
410 case 15: p = &regs->r15; break;
411 #endif
412 default: p = NULL; break;
413 }
415 return p;
416 }
418 int
419 x86_emulate_memop(
420 struct cpu_user_regs *regs,
421 unsigned long cr2,
422 struct x86_mem_emulator *ops,
423 int mode)
424 {
425 u8 b, d, sib, twobyte = 0, rex_prefix = 0;
426 u8 modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
427 unsigned int op_bytes = (mode == 8) ? 4 : mode, ad_bytes = mode;
428 unsigned int lock_prefix = 0, rep_prefix = 0, i;
429 int rc = 0;
430 struct operand src, dst;
432 /* Shadow copy of register state. Committed on successful emulation. */
433 struct cpu_user_regs _regs = *regs;
435 /* Legacy prefixes. */
436 for ( i = 0; i < 8; i++ )
437 {
438 switch ( b = insn_fetch(u8, 1, _regs.eip) )
439 {
440 case 0x66: /* operand-size override */
441 op_bytes ^= 6; /* switch between 2/4 bytes */
442 break;
443 case 0x67: /* address-size override */
444 ad_bytes ^= (mode == 8) ? 12 : 6; /* switch between 2/4/8 bytes */
445 break;
446 case 0x2e: /* CS override */
447 case 0x3e: /* DS override */
448 case 0x26: /* ES override */
449 case 0x64: /* FS override */
450 case 0x65: /* GS override */
451 case 0x36: /* SS override */
452 DPRINTF("Warning: ignoring a segment override.\n");
453 break;
454 case 0xf0: /* LOCK */
455 lock_prefix = 1;
456 break;
457 case 0xf3: /* REP/REPE/REPZ */
458 rep_prefix = 1;
459 break;
460 case 0xf2: /* REPNE/REPNZ */
461 break;
462 default:
463 goto done_prefixes;
464 }
465 }
466 done_prefixes:
468 if ( ad_bytes == 2 )
469 {
470 DPRINTF("Cannot parse 16-bit effective addresses.\n");
471 goto cannot_emulate;
472 }
474 /* REX prefix. */
475 if ( (mode == 8) && ((b & 0xf0) == 0x40) )
476 {
477 rex_prefix = b;
478 if ( b & 8 )
479 op_bytes = 8; /* REX.W */
480 modrm_reg = (b & 4) << 1; /* REX.R */
481 /* REX.B and REX.X do not need to be decoded. */
482 b = insn_fetch(u8, 1, _regs.eip);
483 }
485 /* Opcode byte(s). */
486 d = opcode_table[b];
487 if ( d == 0 )
488 {
489 /* Two-byte opcode? */
490 if ( b == 0x0f )
491 {
492 twobyte = 1;
493 b = insn_fetch(u8, 1, _regs.eip);
494 d = twobyte_table[b];
495 }
497 /* Unrecognised? */
498 if ( d == 0 )
499 goto cannot_emulate;
500 }
502 /* ModRM and SIB bytes. */
503 if ( d & ModRM )
504 {
505 modrm = insn_fetch(u8, 1, _regs.eip);
506 modrm_mod |= (modrm & 0xc0) >> 6;
507 modrm_reg |= (modrm & 0x38) >> 3;
508 modrm_rm |= (modrm & 0x07);
509 switch ( modrm_mod )
510 {
511 case 0:
512 if ( (modrm_rm == 4) &&
513 (((sib = insn_fetch(u8, 1, _regs.eip)) & 7) == 5) )
514 _regs.eip += 4; /* skip disp32 specified by SIB.base */
515 else if ( modrm_rm == 5 )
516 _regs.eip += 4; /* skip disp32 */
517 break;
518 case 1:
519 if ( modrm_rm == 4 )
520 sib = insn_fetch(u8, 1, _regs.eip);
521 _regs.eip += 1; /* skip disp8 */
522 break;
523 case 2:
524 if ( modrm_rm == 4 )
525 sib = insn_fetch(u8, 1, _regs.eip);
526 _regs.eip += 4; /* skip disp32 */
527 break;
528 case 3:
529 DPRINTF("Cannot parse ModRM.mod == 3.\n");
530 goto cannot_emulate;
531 }
532 }
534 /* Decode and fetch the destination operand: register or memory. */
535 switch ( d & DstMask )
536 {
537 case ImplicitOps:
538 /* Special instructions do their own operand decoding. */
539 goto special_insn;
540 case DstReg:
541 dst.type = OP_REG;
542 if ( d & ByteOp )
543 {
544 dst.ptr = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
545 dst.val = *(u8 *)dst.ptr;
546 dst.bytes = 1;
547 }
548 else
549 {
550 dst.ptr = decode_register(modrm_reg, &_regs, 0);
551 switch ( (dst.bytes = op_bytes) )
552 {
553 case 2: dst.val = *(u16 *)dst.ptr; break;
554 case 4: dst.val = *(u32 *)dst.ptr; break;
555 case 8: dst.val = *(u64 *)dst.ptr; break;
556 }
557 }
558 break;
559 case DstMem:
560 dst.type = OP_MEM;
561 dst.ptr = (unsigned long *)cr2;
562 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
563 if ( !(d & Mov) && /* optimisation - avoid slow emulated read */
564 ((rc = ops->read_emulated((unsigned long)dst.ptr,
565 &dst.val, dst.bytes)) != 0) )
566 goto done;
567 break;
568 }
569 dst.orig_val = dst.val;
571 /* Decode and fetch the source operand: register, memory or immediate. */
572 switch ( d & SrcMask )
573 {
574 case SrcNone:
575 break;
576 case SrcReg:
577 src.type = OP_REG;
578 if ( d & ByteOp )
579 {
580 src.ptr = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
581 src.val = src.orig_val = *(u8 *)src.ptr;
582 src.bytes = 1;
583 }
584 else
585 {
586 src.ptr = decode_register(modrm_reg, &_regs, 0);
587 switch ( (src.bytes = op_bytes) )
588 {
589 case 2: src.val = src.orig_val = *(u16 *)src.ptr; break;
590 case 4: src.val = src.orig_val = *(u32 *)src.ptr; break;
591 case 8: src.val = src.orig_val = *(u64 *)src.ptr; break;
592 }
593 }
594 break;
595 case SrcMem:
596 src.type = OP_MEM;
597 src.ptr = (unsigned long *)cr2;
598 src.bytes = (d & ByteOp) ? 1 : op_bytes;
599 if ( (rc = ops->read_emulated((unsigned long)src.ptr,
600 &src.val, src.bytes)) != 0 )
601 goto done;
602 src.orig_val = src.val;
603 break;
604 case SrcImm:
605 src.type = OP_IMM;
606 src.ptr = (unsigned long *)_regs.eip;
607 src.bytes = (d & ByteOp) ? 1 : op_bytes;
608 if ( src.bytes == 8 ) src.bytes = 4;
609 /* NB. Immediates are sign-extended as necessary. */
610 switch ( src.bytes )
611 {
612 case 1: src.val = insn_fetch(s8, 1, _regs.eip); break;
613 case 2: src.val = insn_fetch(s16, 2, _regs.eip); break;
614 case 4: src.val = insn_fetch(s32, 4, _regs.eip); break;
615 }
616 break;
617 case SrcImmByte:
618 src.type = OP_IMM;
619 src.ptr = (unsigned long *)_regs.eip;
620 src.bytes = 1;
621 src.val = insn_fetch(s8, 1, _regs.eip);
622 break;
623 }
625 if ( twobyte )
626 goto twobyte_insn;
628 switch ( b )
629 {
630 case 0x00 ... 0x05: add: /* add */
631 emulate_2op_SrcV("add", src, dst, _regs.eflags);
632 break;
633 case 0x08 ... 0x0d: or: /* or */
634 emulate_2op_SrcV("or", src, dst, _regs.eflags);
635 break;
636 case 0x10 ... 0x15: adc: /* adc */
637 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
638 break;
639 case 0x18 ... 0x1d: sbb: /* sbb */
640 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
641 break;
642 case 0x20 ... 0x25: and: /* and */
643 emulate_2op_SrcV("and", src, dst, _regs.eflags);
644 break;
645 case 0x28 ... 0x2d: sub: /* sub */
646 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
647 break;
648 case 0x30 ... 0x35: xor: /* xor */
649 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
650 break;
651 case 0x38 ... 0x3d: cmp: /* cmp */
652 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
653 break;
654 case 0x80 ... 0x83: /* Grp1 */
655 switch ( modrm_reg )
656 {
657 case 0: goto add;
658 case 1: goto or;
659 case 2: goto adc;
660 case 3: goto sbb;
661 case 4: goto and;
662 case 5: goto sub;
663 case 6: goto xor;
664 case 7: goto cmp;
665 }
666 break;
667 case 0x84 ... 0x85: test: /* test */
668 emulate_2op_SrcV("test", src, dst, _regs.eflags);
669 break;
670 case 0x86 ... 0x87: /* xchg */
671 /* Write back the register source. */
672 switch ( dst.bytes )
673 {
674 case 1: *(u8 *)src.ptr = (u8)dst.val; break;
675 case 2: *(u16 *)src.ptr = (u16)dst.val; break;
676 case 4: *src.ptr = (u32)dst.val; break; /* 64b mode: zero-extend */
677 case 8: *src.ptr = dst.val; break;
678 }
679 /* Write back the memory destination with implicit LOCK prefix. */
680 dst.val = src.val;
681 lock_prefix = 1;
682 break;
683 case 0xa0 ... 0xa1: /* mov */
684 dst.ptr = (unsigned long *)&_regs.eax;
685 dst.val = src.val;
686 _regs.eip += ad_bytes; /* skip src displacement */
687 break;
688 case 0xa2 ... 0xa3: /* mov */
689 dst.val = (unsigned long)_regs.eax;
690 _regs.eip += ad_bytes; /* skip dst displacement */
691 break;
692 case 0x88 ... 0x8b: /* mov */
693 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
694 dst.val = src.val;
695 break;
696 case 0x8f: /* pop (sole member of Grp1a) */
697 /* 64-bit mode: POP defaults to 64-bit operands. */
698 if ( (mode == 8) && (dst.bytes == 4) )
699 dst.bytes = 8;
700 if ( (rc = ops->read_std(_regs.esp, &dst.val, dst.bytes)) != 0 )
701 goto done;
702 _regs.esp += dst.bytes;
703 break;
704 case 0xc0 ... 0xc1: grp2: /* Grp2 */
705 switch ( modrm_reg )
706 {
707 case 0: /* rol */
708 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
709 break;
710 case 1: /* ror */
711 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
712 break;
713 case 2: /* rcl */
714 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
715 break;
716 case 3: /* rcr */
717 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
718 break;
719 case 4: /* sal/shl */
720 case 6: /* sal/shl */
721 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
722 break;
723 case 5: /* shr */
724 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
725 break;
726 case 7: /* sar */
727 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
728 break;
729 }
730 break;
731 case 0xd0 ... 0xd1: /* Grp2 */
732 src.val = 1;
733 goto grp2;
734 case 0xd2 ... 0xd3: /* Grp2 */
735 src.val = _regs.ecx;
736 goto grp2;
737 case 0xf6 ... 0xf7: /* Grp3 */
738 switch ( modrm_reg )
739 {
740 case 0 ... 1: /* test */
741 /* Special case in Grp3: test has an immediate source operand. */
742 src.type = OP_IMM;
743 src.ptr = (unsigned long *)_regs.eip;
744 src.bytes = (d & ByteOp) ? 1 : op_bytes;
745 if ( src.bytes == 8 ) src.bytes = 4;
746 switch ( src.bytes )
747 {
748 case 1: src.val = insn_fetch(s8, 1, _regs.eip); break;
749 case 2: src.val = insn_fetch(s16, 2, _regs.eip); break;
750 case 4: src.val = insn_fetch(s32, 4, _regs.eip); break;
751 }
752 goto test;
753 case 2: /* not */
754 dst.val = ~dst.val;
755 break;
756 case 3: /* neg */
757 emulate_1op("neg", dst, _regs.eflags);
758 break;
759 default:
760 goto cannot_emulate;
761 }
762 break;
763 case 0xfe ... 0xff: /* Grp4/Grp5 */
764 switch ( modrm_reg )
765 {
766 case 0: /* inc */
767 emulate_1op("inc", dst, _regs.eflags);
768 break;
769 case 1: /* dec */
770 emulate_1op("dec", dst, _regs.eflags);
771 break;
772 case 6: /* push */
773 /* 64-bit mode: PUSH defaults to 64-bit operands. */
774 if ( (mode == 8) && (dst.bytes == 4) )
775 {
776 dst.bytes = 8;
777 if ( (rc = ops->read_std((unsigned long)dst.ptr,
778 &dst.val, 8)) != 0 )
779 goto done;
780 }
781 _regs.esp -= dst.bytes;
782 if ( (rc = ops->write_std(_regs.esp, dst.val, dst.bytes)) != 0 )
783 goto done;
784 dst.val = dst.orig_val; /* skanky: disable writeback */
785 break;
786 default:
787 goto cannot_emulate;
788 }
789 break;
790 }
792 writeback:
793 if ( (d & Mov) || (dst.orig_val != dst.val) )
794 {
795 switch ( dst.type )
796 {
797 case OP_REG:
798 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
799 switch ( dst.bytes )
800 {
801 case 1: *(u8 *)dst.ptr = (u8)dst.val; break;
802 case 2: *(u16 *)dst.ptr = (u16)dst.val; break;
803 case 4: *dst.ptr = (u32)dst.val; break; /* 64b mode: zero-extend */
804 case 8: *dst.ptr = dst.val; break;
805 }
806 break;
807 case OP_MEM:
808 if ( lock_prefix )
809 rc = ops->cmpxchg_emulated(
810 (unsigned long)dst.ptr, dst.orig_val, dst.val, dst.bytes);
811 else
812 rc = ops->write_emulated(
813 (unsigned long)dst.ptr, dst.val, dst.bytes);
814 if ( rc != 0 )
815 goto done;
816 default:
817 break;
818 }
819 }
821 /* Commit shadow register state. */
822 *regs = _regs;
824 done:
825 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
827 special_insn:
828 if ( twobyte )
829 goto twobyte_special_insn;
830 if ( rep_prefix )
831 {
832 if ( _regs.ecx == 0 )
833 {
834 regs->eip = _regs.eip;
835 goto done;
836 }
837 _regs.ecx--;
838 _regs.eip = regs->eip;
839 }
840 switch ( b )
841 {
842 case 0xa4 ... 0xa5: /* movs */
843 dst.type = OP_MEM;
844 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
845 if ( _regs.error_code & 2 )
846 {
847 /* Write fault: destination is special memory. */
848 dst.ptr = (unsigned long *)cr2;
849 if ( (rc = ops->read_std(_regs.esi - _regs.edi + cr2,
850 &dst.val, dst.bytes)) != 0 )
851 goto done;
852 }
853 else
854 {
855 /* Read fault: source is special memory. */
856 dst.ptr = (unsigned long *)(_regs.edi - _regs.esi + cr2);
857 if ( (rc = ops->read_emulated(cr2, &dst.val, dst.bytes)) != 0 )
858 goto done;
859 }
860 _regs.esi += (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes;
861 _regs.edi += (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes;
862 break;
863 case 0xa6 ... 0xa7: /* cmps */
864 DPRINTF("Urk! I don't handle CMPS.\n");
865 goto cannot_emulate;
866 case 0xaa ... 0xab: /* stos */
867 dst.type = OP_MEM;
868 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
869 dst.ptr = (unsigned long *)cr2;
870 dst.val = _regs.eax;
871 _regs.edi += (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes;
872 break;
873 case 0xac ... 0xad: /* lods */
874 dst.type = OP_REG;
875 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
876 dst.ptr = (unsigned long *)&_regs.eax;
877 if ( (rc = ops->read_emulated(cr2, &dst.val, dst.bytes)) != 0 )
878 goto done;
879 _regs.esi += (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes;
880 break;
881 case 0xae ... 0xaf: /* scas */
882 DPRINTF("Urk! I don't handle SCAS.\n");
883 goto cannot_emulate;
884 }
885 goto writeback;
887 twobyte_insn:
888 switch ( b )
889 {
890 case 0x40 ... 0x4f: /* cmov */
891 dst.val = dst.orig_val = src.val;
892 d &= ~Mov; /* default to no move */
893 /* First, assume we're decoding an even cmov opcode (lsb == 0). */
894 switch ( (b & 15) >> 1 )
895 {
896 case 0: /* cmovo */
897 d |= (_regs.eflags & EFLG_OF) ? Mov : 0;
898 break;
899 case 1: /* cmovb/cmovc/cmovnae */
900 d |= (_regs.eflags & EFLG_CF) ? Mov : 0;
901 break;
902 case 2: /* cmovz/cmove */
903 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
904 break;
905 case 3: /* cmovbe/cmovna */
906 d |= (_regs.eflags & (EFLG_CF|EFLG_ZF)) ? Mov : 0;
907 break;
908 case 4: /* cmovs */
909 d |= (_regs.eflags & EFLG_SF) ? Mov : 0;
910 break;
911 case 5: /* cmovp/cmovpe */
912 d |= (_regs.eflags & EFLG_PF) ? Mov : 0;
913 break;
914 case 7: /* cmovle/cmovng */
915 d |= (_regs.eflags & EFLG_ZF) ? Mov : 0;
916 /* fall through */
917 case 6: /* cmovl/cmovnge */
918 d |= (!(_regs.eflags & EFLG_SF) != !(_regs.eflags & EFLG_OF)) ?
919 Mov : 0;
920 break;
921 }
922 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
923 d ^= (b & 1) ? Mov : 0;
924 break;
925 case 0xb0 ... 0xb1: /* cmpxchg */
926 /* Save real source value, then compare EAX against destination. */
927 src.orig_val = src.val;
928 src.val = _regs.eax;
929 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
930 /* Always write back. The question is: where to? */
931 d |= Mov;
932 if ( _regs.eflags & EFLG_ZF )
933 {
934 /* Success: write back to memory. */
935 dst.val = src.orig_val;
936 }
937 else
938 {
939 /* Failure: write the value we saw to EAX. */
940 dst.type = OP_REG;
941 dst.ptr = (unsigned long *)&_regs.eax;
942 }
943 break;
944 case 0xa3: bt: /* bt */
945 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
946 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
947 break;
948 case 0xb3: btr: /* btr */
949 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
950 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
951 break;
952 case 0xab: bts: /* bts */
953 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
954 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
955 break;
956 case 0xbb: btc: /* btc */
957 src.val &= (dst.bytes << 3) - 1; /* only subword offset */
958 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
959 break;
960 case 0xba: /* Grp8 */
961 switch ( modrm_reg & 3 )
962 {
963 case 0: goto bt;
964 case 1: goto bts;
965 case 2: goto btr;
966 case 3: goto btc;
967 }
968 break;
969 }
970 goto writeback;
972 twobyte_special_insn:
973 /* Disable writeback. */
974 dst.orig_val = dst.val;
975 switch ( b )
976 {
977 case 0x0d: /* GrpP (prefetch) */
978 case 0x18: /* Grp16 (prefetch/nop) */
979 break;
980 case 0xc7: /* Grp9 (cmpxchg8b) */
981 #if defined(__i386__)
982 {
983 unsigned long old_lo, old_hi;
984 if ( ((rc = ops->read_emulated(cr2+0, &old_lo, 4)) != 0) ||
985 ((rc = ops->read_emulated(cr2+4, &old_hi, 4)) != 0) )
986 goto done;
987 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
988 {
989 _regs.eax = old_lo;
990 _regs.edx = old_hi;
991 _regs.eflags &= ~EFLG_ZF;
992 }
993 else if ( ops->cmpxchg8b_emulated == NULL )
994 {
995 rc = X86EMUL_UNHANDLEABLE;
996 goto done;
997 }
998 else
999 {
1000 if ( (rc = ops->cmpxchg8b_emulated(cr2, old_lo, old_hi,
1001 _regs.ebx, _regs.ecx)) != 0 )
1002 goto done;
1003 _regs.eflags |= EFLG_ZF;
1005 break;
1007 #elif defined(__x86_64__)
1009 unsigned long old, new;
1010 if ( (rc = ops->read_emulated(cr2, &old, 8)) != 0 )
1011 goto done;
1012 if ( ((u32)(old>>0) != (u32)_regs.eax) ||
1013 ((u32)(old>>32) != (u32)_regs.edx) )
1015 _regs.eax = (u32)(old>>0);
1016 _regs.edx = (u32)(old>>32);
1017 _regs.eflags &= ~EFLG_ZF;
1019 else
1021 new = (_regs.ecx<<32)|(u32)_regs.ebx;
1022 if ( (rc = ops->cmpxchg_emulated(cr2, old, new, 8)) != 0 )
1023 goto done;
1024 _regs.eflags |= EFLG_ZF;
1026 break;
1028 #endif
1030 goto writeback;
1032 cannot_emulate:
1033 DPRINTF("Cannot emulate %02x\n", b);
1034 return -1;
1037 #ifndef __TEST_HARNESS__
1039 #include <asm/mm.h>
1040 #include <asm/uaccess.h>
1042 int
1043 x86_emulate_read_std(
1044 unsigned long addr,
1045 unsigned long *val,
1046 unsigned int bytes)
1048 *val = 0;
1049 if ( copy_from_user((void *)val, (void *)addr, bytes) )
1051 propagate_page_fault(addr, 4); /* user mode, read fault */
1052 return X86EMUL_PROPAGATE_FAULT;
1054 return X86EMUL_CONTINUE;
1057 int
1058 x86_emulate_write_std(
1059 unsigned long addr,
1060 unsigned long val,
1061 unsigned int bytes)
1063 if ( copy_to_user((void *)addr, (void *)&val, bytes) )
1065 propagate_page_fault(addr, 6); /* user mode, write fault */
1066 return X86EMUL_PROPAGATE_FAULT;
1068 return X86EMUL_CONTINUE;
1071 #endif