debuggers.hg

view xen/arch/x86/x86_emulate.c @ 16566:6d879bb3f6f0

x86_emulate: EFLAGS.PF only reflects least-significant byte of result,
so even_parity() can return to its original prototype.
Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Wed Dec 05 10:34:15 2007 +0000 (2007-12-05)
parents 6706934cdf9d
children 4c1a0d2a318d
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 #ifndef __XEN__
25 #include <stddef.h>
26 #include <stdint.h>
27 #include <public/xen.h>
28 #else
29 #include <xen/config.h>
30 #include <xen/types.h>
31 #include <xen/lib.h>
32 #include <asm/regs.h>
33 #undef cmpxchg
34 #endif
35 #include <asm-x86/x86_emulate.h>
37 /* Operand sizes: 8-bit operands or specified/overridden size. */
38 #define ByteOp (1<<0) /* 8-bit operands. */
39 /* Destination operand type. */
40 #define DstBitBase (0<<1) /* Memory operand, bit string. */
41 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
42 #define DstReg (2<<1) /* Register operand. */
43 #define DstMem (3<<1) /* Memory operand. */
44 #define DstMask (3<<1)
45 /* Source operand type. */
46 #define SrcNone (0<<3) /* No source operand. */
47 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
48 #define SrcReg (1<<3) /* Register operand. */
49 #define SrcMem (2<<3) /* Memory operand. */
50 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
51 #define SrcImm (4<<3) /* Immediate operand. */
52 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
53 #define SrcMask (7<<3)
54 /* Generic ModRM decode. */
55 #define ModRM (1<<6)
56 /* Destination is only written; never read. */
57 #define Mov (1<<7)
59 static uint8_t opcode_table[256] = {
60 /* 0x00 - 0x07 */
61 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
62 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
63 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
64 /* 0x08 - 0x0F */
65 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
66 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
67 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
68 /* 0x10 - 0x17 */
69 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
70 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
71 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
72 /* 0x18 - 0x1F */
73 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
74 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
75 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
76 /* 0x20 - 0x27 */
77 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
78 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
79 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
80 /* 0x28 - 0x2F */
81 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
82 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
83 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
84 /* 0x30 - 0x37 */
85 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
86 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
87 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
88 /* 0x38 - 0x3F */
89 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
90 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
91 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
92 /* 0x40 - 0x4F */
93 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
94 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
95 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
96 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
97 /* 0x50 - 0x5F */
98 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
99 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
100 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
101 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
102 /* 0x60 - 0x67 */
103 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
104 0, 0, 0, 0,
105 /* 0x68 - 0x6F */
106 ImplicitOps|Mov, DstMem|SrcImm|ModRM|Mov,
107 ImplicitOps|Mov, DstMem|SrcImmByte|ModRM|Mov,
108 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
109 /* 0x70 - 0x77 */
110 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
111 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
112 /* 0x78 - 0x7F */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x80 - 0x87 */
116 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
117 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
118 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
119 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
120 /* 0x88 - 0x8F */
121 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
122 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
123 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
124 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
125 /* 0x90 - 0x97 */
126 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
127 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
128 /* 0x98 - 0x9F */
129 ImplicitOps, ImplicitOps, ImplicitOps, 0,
130 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
131 /* 0xA0 - 0xA7 */
132 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
133 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
134 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
135 ByteOp|ImplicitOps, ImplicitOps,
136 /* 0xA8 - 0xAF */
137 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
138 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
139 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
140 ByteOp|ImplicitOps, ImplicitOps,
141 /* 0xB0 - 0xB7 */
142 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
143 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
144 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
145 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
146 /* 0xB8 - 0xBF */
147 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
148 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
149 /* 0xC0 - 0xC7 */
150 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
151 ImplicitOps, ImplicitOps,
152 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
153 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
154 /* 0xC8 - 0xCF */
155 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
156 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
157 /* 0xD0 - 0xD7 */
158 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
159 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
160 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
161 /* 0xD8 - 0xDF */
162 0, 0, 0, 0, 0, 0, 0, 0,
163 /* 0xE0 - 0xE7 */
164 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
165 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
166 /* 0xE8 - 0xEF */
167 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
168 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
169 /* 0xF0 - 0xF7 */
170 0, ImplicitOps, 0, 0,
171 ImplicitOps, ImplicitOps,
172 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
173 /* 0xF8 - 0xFF */
174 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
175 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
176 };
178 static uint8_t twobyte_table[256] = {
179 /* 0x00 - 0x07 */
180 0, ImplicitOps|ModRM, 0, 0, 0, ImplicitOps, 0, 0,
181 /* 0x08 - 0x0F */
182 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
183 /* 0x10 - 0x17 */
184 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0x18 - 0x1F */
186 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
187 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
188 /* 0x20 - 0x27 */
189 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
190 0, 0, 0, 0,
191 /* 0x28 - 0x2F */
192 0, 0, 0, 0, 0, 0, 0, 0,
193 /* 0x30 - 0x37 */
194 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
195 /* 0x38 - 0x3F */
196 0, 0, 0, 0, 0, 0, 0, 0,
197 /* 0x40 - 0x47 */
198 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
199 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
200 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
201 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
202 /* 0x48 - 0x4F */
203 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
204 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
205 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
206 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
207 /* 0x50 - 0x5F */
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 /* 0x60 - 0x6F */
210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
211 /* 0x70 - 0x7F */
212 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
213 /* 0x80 - 0x87 */
214 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
215 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
216 /* 0x88 - 0x8F */
217 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
218 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
219 /* 0x90 - 0x97 */
220 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
221 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
222 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
223 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
224 /* 0x98 - 0x9F */
225 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
226 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
227 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
228 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
229 /* 0xA0 - 0xA7 */
230 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
231 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
232 /* 0xA8 - 0xAF */
233 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
234 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
235 /* 0xB0 - 0xB7 */
236 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
237 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
238 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
239 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
240 /* 0xB8 - 0xBF */
241 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
242 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
243 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
244 /* 0xC0 - 0xC7 */
245 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
246 0, 0, 0, ImplicitOps|ModRM,
247 /* 0xC8 - 0xCF */
248 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
249 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
250 /* 0xD0 - 0xDF */
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
252 /* 0xE0 - 0xEF */
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
254 /* 0xF0 - 0xFF */
255 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
256 };
258 /* Type, address-of, and value of an instruction's operand. */
259 struct operand {
260 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
261 unsigned int bytes;
262 unsigned long val, orig_val;
263 union {
264 /* OP_REG: Pointer to register field. */
265 unsigned long *reg;
266 /* OP_MEM: Segment and offset. */
267 struct {
268 enum x86_segment seg;
269 unsigned long off;
270 } mem;
271 };
272 };
274 /* MSRs. */
275 #define MSR_TSC 0x10
277 /* Control register flags. */
278 #define CR0_PE (1<<0)
279 #define CR4_TSD (1<<2)
281 /* EFLAGS bit definitions. */
282 #define EFLG_VIP (1<<20)
283 #define EFLG_VIF (1<<19)
284 #define EFLG_AC (1<<18)
285 #define EFLG_VM (1<<17)
286 #define EFLG_RF (1<<16)
287 #define EFLG_NT (1<<14)
288 #define EFLG_IOPL (3<<12)
289 #define EFLG_OF (1<<11)
290 #define EFLG_DF (1<<10)
291 #define EFLG_IF (1<<9)
292 #define EFLG_TF (1<<8)
293 #define EFLG_SF (1<<7)
294 #define EFLG_ZF (1<<6)
295 #define EFLG_AF (1<<4)
296 #define EFLG_PF (1<<2)
297 #define EFLG_CF (1<<0)
299 /* Exception definitions. */
300 #define EXC_DE 0
301 #define EXC_DB 1
302 #define EXC_BP 3
303 #define EXC_OF 4
304 #define EXC_BR 5
305 #define EXC_UD 6
306 #define EXC_GP 13
308 /*
309 * Instruction emulation:
310 * Most instructions are emulated directly via a fragment of inline assembly
311 * code. This allows us to save/restore EFLAGS and thus very easily pick up
312 * any modified flags.
313 */
315 #if defined(__x86_64__)
316 #define _LO32 "k" /* force 32-bit operand */
317 #define _STK "%%rsp" /* stack pointer */
318 #define _BYTES_PER_LONG "8"
319 #elif defined(__i386__)
320 #define _LO32 "" /* force 32-bit operand */
321 #define _STK "%%esp" /* stack pointer */
322 #define _BYTES_PER_LONG "4"
323 #endif
325 /*
326 * These EFLAGS bits are restored from saved value during emulation, and
327 * any changes are written back to the saved value after emulation.
328 */
329 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
331 /* Before executing instruction: restore necessary bits in EFLAGS. */
332 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
333 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
334 "movl %"_sav",%"_LO32 _tmp"; " \
335 "push %"_tmp"; " \
336 "push %"_tmp"; " \
337 "movl %"_msk",%"_LO32 _tmp"; " \
338 "andl %"_LO32 _tmp",("_STK"); " \
339 "pushf; " \
340 "notl %"_LO32 _tmp"; " \
341 "andl %"_LO32 _tmp",("_STK"); " \
342 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
343 "pop %"_tmp"; " \
344 "orl %"_LO32 _tmp",("_STK"); " \
345 "popf; " \
346 "pop %"_sav"; "
348 /* After executing instruction: write-back necessary bits in EFLAGS. */
349 #define _POST_EFLAGS(_sav, _msk, _tmp) \
350 /* _sav |= EFLAGS & _msk; */ \
351 "pushf; " \
352 "pop %"_tmp"; " \
353 "andl %"_msk",%"_LO32 _tmp"; " \
354 "orl %"_LO32 _tmp",%"_sav"; "
356 /* Raw emulation: instruction has two explicit operands. */
357 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
358 do{ unsigned long _tmp; \
359 switch ( (_dst).bytes ) \
360 { \
361 case 2: \
362 asm volatile ( \
363 _PRE_EFLAGS("0","4","2") \
364 _op"w %"_wx"3,%1; " \
365 _POST_EFLAGS("0","4","2") \
366 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
367 : _wy ((_src).val), "i" (EFLAGS_MASK), \
368 "m" (_eflags), "m" ((_dst).val) ); \
369 break; \
370 case 4: \
371 asm volatile ( \
372 _PRE_EFLAGS("0","4","2") \
373 _op"l %"_lx"3,%1; " \
374 _POST_EFLAGS("0","4","2") \
375 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
376 : _ly ((_src).val), "i" (EFLAGS_MASK), \
377 "m" (_eflags), "m" ((_dst).val) ); \
378 break; \
379 case 8: \
380 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
381 break; \
382 } \
383 } while (0)
384 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
385 do{ unsigned long _tmp; \
386 switch ( (_dst).bytes ) \
387 { \
388 case 1: \
389 asm volatile ( \
390 _PRE_EFLAGS("0","4","2") \
391 _op"b %"_bx"3,%1; " \
392 _POST_EFLAGS("0","4","2") \
393 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
394 : _by ((_src).val), "i" (EFLAGS_MASK), \
395 "m" (_eflags), "m" ((_dst).val) ); \
396 break; \
397 default: \
398 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
399 break; \
400 } \
401 } while (0)
402 /* Source operand is byte-sized and may be restricted to just %cl. */
403 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
404 __emulate_2op(_op, _src, _dst, _eflags, \
405 "b", "c", "b", "c", "b", "c", "b", "c")
406 /* Source operand is byte, word, long or quad sized. */
407 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
408 __emulate_2op(_op, _src, _dst, _eflags, \
409 "b", "q", "w", "r", _LO32, "r", "", "r")
410 /* Source operand is word, long or quad sized. */
411 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
412 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
413 "w", "r", _LO32, "r", "", "r")
415 /* Instruction has only one explicit operand (no source operand). */
416 #define emulate_1op(_op,_dst,_eflags) \
417 do{ unsigned long _tmp; \
418 switch ( (_dst).bytes ) \
419 { \
420 case 1: \
421 asm volatile ( \
422 _PRE_EFLAGS("0","3","2") \
423 _op"b %1; " \
424 _POST_EFLAGS("0","3","2") \
425 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
426 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
427 break; \
428 case 2: \
429 asm volatile ( \
430 _PRE_EFLAGS("0","3","2") \
431 _op"w %1; " \
432 _POST_EFLAGS("0","3","2") \
433 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
434 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
435 break; \
436 case 4: \
437 asm volatile ( \
438 _PRE_EFLAGS("0","3","2") \
439 _op"l %1; " \
440 _POST_EFLAGS("0","3","2") \
441 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
442 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
443 break; \
444 case 8: \
445 __emulate_1op_8byte(_op, _dst, _eflags); \
446 break; \
447 } \
448 } while (0)
450 /* Emulate an instruction with quadword operands (x86/64 only). */
451 #if defined(__x86_64__)
452 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
453 do{ asm volatile ( \
454 _PRE_EFLAGS("0","4","2") \
455 _op"q %"_qx"3,%1; " \
456 _POST_EFLAGS("0","4","2") \
457 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
458 : _qy ((_src).val), "i" (EFLAGS_MASK), \
459 "m" (_eflags), "m" ((_dst).val) ); \
460 } while (0)
461 #define __emulate_1op_8byte(_op, _dst, _eflags) \
462 do{ asm volatile ( \
463 _PRE_EFLAGS("0","3","2") \
464 _op"q %1; " \
465 _POST_EFLAGS("0","3","2") \
466 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
467 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
468 } while (0)
469 #elif defined(__i386__)
470 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
471 #define __emulate_1op_8byte(_op, _dst, _eflags)
472 #endif /* __i386__ */
474 /* Fetch next part of the instruction being emulated. */
475 #define insn_fetch_bytes(_size) \
476 ({ unsigned long _x, _eip = _regs.eip; \
477 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
478 _regs.eip += (_size); /* real hardware doesn't truncate */ \
479 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
480 EXC_GP); \
481 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
482 if ( rc ) goto done; \
483 _x; \
484 })
485 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
487 #define truncate_word(ea, byte_width) \
488 ({ unsigned long __ea = (ea); \
489 unsigned int _width = (byte_width); \
490 ((_width == sizeof(unsigned long)) ? __ea : \
491 (__ea & ((1UL << (_width << 3)) - 1))); \
492 })
493 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
495 #define mode_64bit() (def_ad_bytes == 8)
497 #define fail_if(p) \
498 do { \
499 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
500 if ( rc ) goto done; \
501 } while (0)
503 #define generate_exception_if(p, e) \
504 ({ if ( (p) ) { \
505 fail_if(ops->inject_hw_exception == NULL); \
506 rc = ops->inject_hw_exception(e, ctxt) ? : X86EMUL_EXCEPTION; \
507 goto done; \
508 } \
509 })
511 /*
512 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
513 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
514 */
515 static int even_parity(uint8_t v)
516 {
517 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
518 return v;
519 }
521 /* Update address held in a register, based on addressing mode. */
522 #define _register_address_increment(reg, inc, byte_width) \
523 do { \
524 int _inc = (inc); /* signed type ensures sign extension to long */ \
525 unsigned int _width = (byte_width); \
526 if ( _width == sizeof(unsigned long) ) \
527 (reg) += _inc; \
528 else if ( mode_64bit() ) \
529 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
530 else \
531 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
532 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
533 } while (0)
534 #define register_address_increment(reg, inc) \
535 _register_address_increment((reg), (inc), ad_bytes)
537 #define sp_pre_dec(dec) ({ \
538 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
539 truncate_word(_regs.esp, ctxt->sp_size/8); \
540 })
541 #define sp_post_inc(inc) ({ \
542 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
543 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
544 __esp; \
545 })
547 #define jmp_rel(rel) \
548 do { \
549 _regs.eip += (int)(rel); \
550 if ( !mode_64bit() ) \
551 _regs.eip = ((op_bytes == 2) \
552 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
553 } while (0)
555 static int __handle_rep_prefix(
556 struct cpu_user_regs *int_regs,
557 struct cpu_user_regs *ext_regs,
558 int ad_bytes)
559 {
560 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
561 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
562 int_regs->ecx);
564 if ( ecx-- == 0 )
565 {
566 ext_regs->eip = int_regs->eip;
567 return 1;
568 }
570 if ( ad_bytes == 2 )
571 *(uint16_t *)&int_regs->ecx = ecx;
572 else if ( ad_bytes == 4 )
573 int_regs->ecx = (uint32_t)ecx;
574 else
575 int_regs->ecx = ecx;
576 int_regs->eip = ext_regs->eip;
577 return 0;
578 }
580 #define handle_rep_prefix() \
581 do { \
582 if ( rep_prefix && __handle_rep_prefix(&_regs, ctxt->regs, ad_bytes) ) \
583 goto done; \
584 } while (0)
586 /*
587 * Unsigned multiplication with double-word result.
588 * IN: Multiplicand=m[0], Multiplier=m[1]
589 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
590 */
591 static int mul_dbl(unsigned long m[2])
592 {
593 int rc;
594 asm ( "mul %4; seto %b2"
595 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
596 : "0" (m[0]), "1" (m[1]), "2" (0) );
597 return rc;
598 }
600 /*
601 * Signed multiplication with double-word result.
602 * IN: Multiplicand=m[0], Multiplier=m[1]
603 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
604 */
605 static int imul_dbl(unsigned long m[2])
606 {
607 int rc;
608 asm ( "imul %4; seto %b2"
609 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
610 : "0" (m[0]), "1" (m[1]), "2" (0) );
611 return rc;
612 }
614 /*
615 * Unsigned division of double-word dividend.
616 * IN: Dividend=u[1]:u[0], Divisor=v
617 * OUT: Return 1: #DE
618 * Return 0: Quotient=u[0], Remainder=u[1]
619 */
620 static int div_dbl(unsigned long u[2], unsigned long v)
621 {
622 if ( (v == 0) || (u[1] >= v) )
623 return 1;
624 asm ( "div %4"
625 : "=a" (u[0]), "=d" (u[1])
626 : "0" (u[0]), "1" (u[1]), "r" (v) );
627 return 0;
628 }
630 /*
631 * Signed division of double-word dividend.
632 * IN: Dividend=u[1]:u[0], Divisor=v
633 * OUT: Return 1: #DE
634 * Return 0: Quotient=u[0], Remainder=u[1]
635 * NB. We don't use idiv directly as it's moderately hard to work out
636 * ahead of time whether it will #DE, which we cannot allow to happen.
637 */
638 static int idiv_dbl(unsigned long u[2], unsigned long v)
639 {
640 int negu = (long)u[1] < 0, negv = (long)v < 0;
642 /* u = abs(u) */
643 if ( negu )
644 {
645 u[1] = ~u[1];
646 if ( (u[0] = -u[0]) == 0 )
647 u[1]++;
648 }
650 /* abs(u) / abs(v) */
651 if ( div_dbl(u, negv ? -v : v) )
652 return 1;
654 /* Remainder has same sign as dividend. It cannot overflow. */
655 if ( negu )
656 u[1] = -u[1];
658 /* Quotient is overflowed if sign bit is set. */
659 if ( negu ^ negv )
660 {
661 if ( (long)u[0] >= 0 )
662 u[0] = -u[0];
663 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
664 return 1;
665 }
666 else if ( (long)u[0] < 0 )
667 return 1;
669 return 0;
670 }
672 static int
673 test_cc(
674 unsigned int condition, unsigned int flags)
675 {
676 int rc = 0;
678 switch ( (condition & 15) >> 1 )
679 {
680 case 0: /* o */
681 rc |= (flags & EFLG_OF);
682 break;
683 case 1: /* b/c/nae */
684 rc |= (flags & EFLG_CF);
685 break;
686 case 2: /* z/e */
687 rc |= (flags & EFLG_ZF);
688 break;
689 case 3: /* be/na */
690 rc |= (flags & (EFLG_CF|EFLG_ZF));
691 break;
692 case 4: /* s */
693 rc |= (flags & EFLG_SF);
694 break;
695 case 5: /* p/pe */
696 rc |= (flags & EFLG_PF);
697 break;
698 case 7: /* le/ng */
699 rc |= (flags & EFLG_ZF);
700 /* fall through */
701 case 6: /* l/nge */
702 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
703 break;
704 }
706 /* Odd condition identifiers (lsb == 1) have inverted sense. */
707 return (!!rc ^ (condition & 1));
708 }
710 static int
711 get_cpl(
712 struct x86_emulate_ctxt *ctxt,
713 struct x86_emulate_ops *ops)
714 {
715 struct segment_register reg;
717 if ( ctxt->regs->eflags & EFLG_VM )
718 return 3;
720 if ( (ops->read_segment == NULL) ||
721 ops->read_segment(x86_seg_ss, &reg, ctxt) )
722 return -1;
724 return reg.attr.fields.dpl;
725 }
727 static int
728 _mode_iopl(
729 struct x86_emulate_ctxt *ctxt,
730 struct x86_emulate_ops *ops)
731 {
732 int cpl = get_cpl(ctxt, ops);
733 return ((cpl >= 0) && (cpl <= ((ctxt->regs->eflags >> 12) & 3)));
734 }
736 #define mode_ring0() (get_cpl(ctxt, ops) == 0)
737 #define mode_iopl() _mode_iopl(ctxt, ops)
739 static int
740 in_realmode(
741 struct x86_emulate_ctxt *ctxt,
742 struct x86_emulate_ops *ops)
743 {
744 unsigned long cr0;
745 int rc;
747 if ( ops->read_cr == NULL )
748 return 0;
750 rc = ops->read_cr(0, &cr0, ctxt);
751 return (!rc && !(cr0 & CR0_PE));
752 }
754 static int
755 load_seg(
756 enum x86_segment seg,
757 uint16_t sel,
758 struct x86_emulate_ctxt *ctxt,
759 struct x86_emulate_ops *ops)
760 {
761 struct segment_register reg;
762 int rc;
764 if ( !in_realmode(ctxt, ops) ||
765 (ops->read_segment == NULL) ||
766 (ops->write_segment == NULL) )
767 return X86EMUL_UNHANDLEABLE;
769 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
770 return rc;
772 reg.sel = sel;
773 reg.base = (uint32_t)sel << 4;
775 return ops->write_segment(seg, &reg, ctxt);
776 }
778 void *
779 decode_register(
780 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
781 {
782 void *p;
784 switch ( modrm_reg )
785 {
786 case 0: p = &regs->eax; break;
787 case 1: p = &regs->ecx; break;
788 case 2: p = &regs->edx; break;
789 case 3: p = &regs->ebx; break;
790 case 4: p = (highbyte_regs ?
791 ((unsigned char *)&regs->eax + 1) :
792 (unsigned char *)&regs->esp); break;
793 case 5: p = (highbyte_regs ?
794 ((unsigned char *)&regs->ecx + 1) :
795 (unsigned char *)&regs->ebp); break;
796 case 6: p = (highbyte_regs ?
797 ((unsigned char *)&regs->edx + 1) :
798 (unsigned char *)&regs->esi); break;
799 case 7: p = (highbyte_regs ?
800 ((unsigned char *)&regs->ebx + 1) :
801 (unsigned char *)&regs->edi); break;
802 #if defined(__x86_64__)
803 case 8: p = &regs->r8; break;
804 case 9: p = &regs->r9; break;
805 case 10: p = &regs->r10; break;
806 case 11: p = &regs->r11; break;
807 case 12: p = &regs->r12; break;
808 case 13: p = &regs->r13; break;
809 case 14: p = &regs->r14; break;
810 case 15: p = &regs->r15; break;
811 #endif
812 default: p = NULL; break;
813 }
815 return p;
816 }
818 #define decode_segment_failed x86_seg_tr
819 enum x86_segment
820 decode_segment(
821 uint8_t modrm_reg)
822 {
823 switch ( modrm_reg )
824 {
825 case 0: return x86_seg_es;
826 case 1: return x86_seg_cs;
827 case 2: return x86_seg_ss;
828 case 3: return x86_seg_ds;
829 case 4: return x86_seg_fs;
830 case 5: return x86_seg_gs;
831 default: break;
832 }
833 return decode_segment_failed;
834 }
836 int
837 x86_emulate(
838 struct x86_emulate_ctxt *ctxt,
839 struct x86_emulate_ops *ops)
840 {
841 /* Shadow copy of register state. Committed on successful emulation. */
842 struct cpu_user_regs _regs = *ctxt->regs;
844 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
845 uint8_t modrm, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
846 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
847 #define REPE_PREFIX 1
848 #define REPNE_PREFIX 2
849 unsigned int lock_prefix = 0, rep_prefix = 0;
850 int override_seg = -1, rc = X86EMUL_OKAY;
851 struct operand src, dst;
853 /* Data operand effective address (usually computed from ModRM). */
854 struct operand ea;
856 /* Default is a memory operand relative to segment DS. */
857 ea.type = OP_MEM;
858 ea.mem.seg = x86_seg_ds;
859 ea.mem.off = 0;
861 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
862 if ( op_bytes == 8 )
863 {
864 op_bytes = def_op_bytes = 4;
865 #ifndef __x86_64__
866 return X86EMUL_UNHANDLEABLE;
867 #endif
868 }
870 /* Prefix bytes. */
871 for ( ; ; )
872 {
873 switch ( b = insn_fetch_type(uint8_t) )
874 {
875 case 0x66: /* operand-size override */
876 op_bytes = def_op_bytes ^ 6;
877 break;
878 case 0x67: /* address-size override */
879 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
880 break;
881 case 0x2e: /* CS override */
882 override_seg = x86_seg_cs;
883 break;
884 case 0x3e: /* DS override */
885 override_seg = x86_seg_ds;
886 break;
887 case 0x26: /* ES override */
888 override_seg = x86_seg_es;
889 break;
890 case 0x64: /* FS override */
891 override_seg = x86_seg_fs;
892 break;
893 case 0x65: /* GS override */
894 override_seg = x86_seg_gs;
895 break;
896 case 0x36: /* SS override */
897 override_seg = x86_seg_ss;
898 break;
899 case 0xf0: /* LOCK */
900 lock_prefix = 1;
901 break;
902 case 0xf2: /* REPNE/REPNZ */
903 rep_prefix = REPNE_PREFIX;
904 break;
905 case 0xf3: /* REP/REPE/REPZ */
906 rep_prefix = REPE_PREFIX;
907 break;
908 case 0x40 ... 0x4f: /* REX */
909 if ( !mode_64bit() )
910 goto done_prefixes;
911 rex_prefix = b;
912 continue;
913 default:
914 goto done_prefixes;
915 }
917 /* Any legacy prefix after a REX prefix nullifies its effect. */
918 rex_prefix = 0;
919 }
920 done_prefixes:
922 if ( rex_prefix & 8 ) /* REX.W */
923 op_bytes = 8;
925 /* Opcode byte(s). */
926 d = opcode_table[b];
927 if ( d == 0 )
928 {
929 /* Two-byte opcode? */
930 if ( b == 0x0f )
931 {
932 twobyte = 1;
933 b = insn_fetch_type(uint8_t);
934 d = twobyte_table[b];
935 }
937 /* Unrecognised? */
938 if ( d == 0 )
939 goto cannot_emulate;
940 }
942 /* Lock prefix is allowed only on RMW instructions. */
943 generate_exception_if((d & Mov) && lock_prefix, EXC_GP);
945 /* ModRM and SIB bytes. */
946 if ( d & ModRM )
947 {
948 modrm = insn_fetch_type(uint8_t);
949 modrm_mod = (modrm & 0xc0) >> 6;
950 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
951 modrm_rm = modrm & 0x07;
953 if ( modrm_mod == 3 )
954 {
955 modrm_rm |= (rex_prefix & 1) << 3;
956 ea.type = OP_REG;
957 ea.reg = decode_register(
958 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
959 }
960 else if ( ad_bytes == 2 )
961 {
962 /* 16-bit ModR/M decode. */
963 switch ( modrm_rm )
964 {
965 case 0:
966 ea.mem.off = _regs.ebx + _regs.esi;
967 break;
968 case 1:
969 ea.mem.off = _regs.ebx + _regs.edi;
970 break;
971 case 2:
972 ea.mem.seg = x86_seg_ss;
973 ea.mem.off = _regs.ebp + _regs.esi;
974 break;
975 case 3:
976 ea.mem.seg = x86_seg_ss;
977 ea.mem.off = _regs.ebp + _regs.edi;
978 break;
979 case 4:
980 ea.mem.off = _regs.esi;
981 break;
982 case 5:
983 ea.mem.off = _regs.edi;
984 break;
985 case 6:
986 if ( modrm_mod == 0 )
987 break;
988 ea.mem.seg = x86_seg_ss;
989 ea.mem.off = _regs.ebp;
990 break;
991 case 7:
992 ea.mem.off = _regs.ebx;
993 break;
994 }
995 switch ( modrm_mod )
996 {
997 case 0:
998 if ( modrm_rm == 6 )
999 ea.mem.off = insn_fetch_type(int16_t);
1000 break;
1001 case 1:
1002 ea.mem.off += insn_fetch_type(int8_t);
1003 break;
1004 case 2:
1005 ea.mem.off += insn_fetch_type(int16_t);
1006 break;
1008 ea.mem.off = truncate_ea(ea.mem.off);
1010 else
1012 /* 32/64-bit ModR/M decode. */
1013 if ( modrm_rm == 4 )
1015 sib = insn_fetch_type(uint8_t);
1016 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1017 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1018 if ( sib_index != 4 )
1019 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1020 ea.mem.off <<= (sib >> 6) & 3;
1021 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1022 ea.mem.off += insn_fetch_type(int32_t);
1023 else if ( sib_base == 4 )
1025 ea.mem.seg = x86_seg_ss;
1026 ea.mem.off += _regs.esp;
1027 if ( !twobyte && (b == 0x8f) )
1028 /* POP <rm> computes its EA post increment. */
1029 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1030 ? 8 : op_bytes);
1032 else if ( sib_base == 5 )
1034 ea.mem.seg = x86_seg_ss;
1035 ea.mem.off += _regs.ebp;
1037 else
1038 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1040 else
1042 modrm_rm |= (rex_prefix & 1) << 3;
1043 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1044 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1045 ea.mem.seg = x86_seg_ss;
1047 switch ( modrm_mod )
1049 case 0:
1050 if ( (modrm_rm & 7) != 5 )
1051 break;
1052 ea.mem.off = insn_fetch_type(int32_t);
1053 if ( !mode_64bit() )
1054 break;
1055 /* Relative to RIP of next instruction. Argh! */
1056 ea.mem.off += _regs.eip;
1057 if ( (d & SrcMask) == SrcImm )
1058 ea.mem.off += (d & ByteOp) ? 1 :
1059 ((op_bytes == 8) ? 4 : op_bytes);
1060 else if ( (d & SrcMask) == SrcImmByte )
1061 ea.mem.off += 1;
1062 else if ( ((b == 0xf6) || (b == 0xf7)) &&
1063 ((modrm_reg & 7) <= 1) )
1064 /* Special case in Grp3: test has immediate operand. */
1065 ea.mem.off += (d & ByteOp) ? 1
1066 : ((op_bytes == 8) ? 4 : op_bytes);
1067 break;
1068 case 1:
1069 ea.mem.off += insn_fetch_type(int8_t);
1070 break;
1071 case 2:
1072 ea.mem.off += insn_fetch_type(int32_t);
1073 break;
1075 ea.mem.off = truncate_ea(ea.mem.off);
1079 if ( override_seg != -1 )
1080 ea.mem.seg = override_seg;
1082 /* Special instructions do their own operand decoding. */
1083 if ( (d & DstMask) == ImplicitOps )
1084 goto special_insn;
1086 /* Decode and fetch the source operand: register, memory or immediate. */
1087 switch ( d & SrcMask )
1089 case SrcNone:
1090 break;
1091 case SrcReg:
1092 src.type = OP_REG;
1093 if ( d & ByteOp )
1095 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1096 src.val = *(uint8_t *)src.reg;
1097 src.bytes = 1;
1099 else
1101 src.reg = decode_register(modrm_reg, &_regs, 0);
1102 switch ( (src.bytes = op_bytes) )
1104 case 2: src.val = *(uint16_t *)src.reg; break;
1105 case 4: src.val = *(uint32_t *)src.reg; break;
1106 case 8: src.val = *(uint64_t *)src.reg; break;
1109 break;
1110 case SrcMem16:
1111 ea.bytes = 2;
1112 goto srcmem_common;
1113 case SrcMem:
1114 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1115 srcmem_common:
1116 src = ea;
1117 if ( src.type == OP_REG )
1119 switch ( src.bytes )
1121 case 1: src.val = *(uint8_t *)src.reg; break;
1122 case 2: src.val = *(uint16_t *)src.reg; break;
1123 case 4: src.val = *(uint32_t *)src.reg; break;
1124 case 8: src.val = *(uint64_t *)src.reg; break;
1127 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1128 &src.val, src.bytes, ctxt)) )
1129 goto done;
1130 break;
1131 case SrcImm:
1132 src.type = OP_IMM;
1133 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1134 if ( src.bytes == 8 ) src.bytes = 4;
1135 /* NB. Immediates are sign-extended as necessary. */
1136 switch ( src.bytes )
1138 case 1: src.val = insn_fetch_type(int8_t); break;
1139 case 2: src.val = insn_fetch_type(int16_t); break;
1140 case 4: src.val = insn_fetch_type(int32_t); break;
1142 break;
1143 case SrcImmByte:
1144 src.type = OP_IMM;
1145 src.bytes = 1;
1146 src.val = insn_fetch_type(int8_t);
1147 break;
1150 /* Decode and fetch the destination operand: register or memory. */
1151 switch ( d & DstMask )
1153 case DstReg:
1154 dst.type = OP_REG;
1155 if ( d & ByteOp )
1157 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1158 dst.val = *(uint8_t *)dst.reg;
1159 dst.bytes = 1;
1161 else
1163 dst.reg = decode_register(modrm_reg, &_regs, 0);
1164 switch ( (dst.bytes = op_bytes) )
1166 case 2: dst.val = *(uint16_t *)dst.reg; break;
1167 case 4: dst.val = *(uint32_t *)dst.reg; break;
1168 case 8: dst.val = *(uint64_t *)dst.reg; break;
1171 break;
1172 case DstBitBase:
1173 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1175 src.val &= (op_bytes << 3) - 1;
1177 else
1179 /*
1180 * EA += BitOffset DIV op_bytes*8
1181 * BitOffset = BitOffset MOD op_bytes*8
1182 * DIV truncates towards negative infinity.
1183 * MOD always produces a positive result.
1184 */
1185 if ( op_bytes == 2 )
1186 src.val = (int16_t)src.val;
1187 else if ( op_bytes == 4 )
1188 src.val = (int32_t)src.val;
1189 if ( (long)src.val < 0 )
1191 unsigned long byte_offset;
1192 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1193 ea.mem.off -= byte_offset;
1194 src.val = (byte_offset << 3) + src.val;
1196 else
1198 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1199 src.val &= (op_bytes << 3) - 1;
1202 /* Becomes a normal DstMem operation from here on. */
1203 d = (d & ~DstMask) | DstMem;
1204 case DstMem:
1205 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1206 dst = ea;
1207 if ( dst.type == OP_REG )
1209 switch ( dst.bytes )
1211 case 1: dst.val = *(uint8_t *)dst.reg; break;
1212 case 2: dst.val = *(uint16_t *)dst.reg; break;
1213 case 4: dst.val = *(uint32_t *)dst.reg; break;
1214 case 8: dst.val = *(uint64_t *)dst.reg; break;
1217 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1219 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1220 &dst.val, dst.bytes, ctxt)) )
1221 goto done;
1222 dst.orig_val = dst.val;
1224 break;
1227 /* LOCK prefix allowed only on instructions with memory destination. */
1228 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP);
1230 if ( twobyte )
1231 goto twobyte_insn;
1233 switch ( b )
1235 case 0x04 ... 0x05: /* add imm,%%eax */
1236 dst.reg = (unsigned long *)&_regs.eax;
1237 dst.val = _regs.eax;
1238 case 0x00 ... 0x03: add: /* add */
1239 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1240 break;
1242 case 0x0c ... 0x0d: /* or imm,%%eax */
1243 dst.reg = (unsigned long *)&_regs.eax;
1244 dst.val = _regs.eax;
1245 case 0x08 ... 0x0b: or: /* or */
1246 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1247 break;
1249 case 0x14 ... 0x15: /* adc imm,%%eax */
1250 dst.reg = (unsigned long *)&_regs.eax;
1251 dst.val = _regs.eax;
1252 case 0x10 ... 0x13: adc: /* adc */
1253 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1254 break;
1256 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1257 dst.reg = (unsigned long *)&_regs.eax;
1258 dst.val = _regs.eax;
1259 case 0x18 ... 0x1b: sbb: /* sbb */
1260 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1261 break;
1263 case 0x24 ... 0x25: /* and imm,%%eax */
1264 dst.reg = (unsigned long *)&_regs.eax;
1265 dst.val = _regs.eax;
1266 case 0x20 ... 0x23: and: /* and */
1267 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1268 break;
1270 case 0x2c ... 0x2d: /* sub imm,%%eax */
1271 dst.reg = (unsigned long *)&_regs.eax;
1272 dst.val = _regs.eax;
1273 case 0x28 ... 0x2b: sub: /* sub */
1274 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1275 break;
1277 case 0x34 ... 0x35: /* xor imm,%%eax */
1278 dst.reg = (unsigned long *)&_regs.eax;
1279 dst.val = _regs.eax;
1280 case 0x30 ... 0x33: xor: /* xor */
1281 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1282 break;
1284 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1285 dst.reg = (unsigned long *)&_regs.eax;
1286 dst.val = _regs.eax;
1287 case 0x38 ... 0x3b: cmp: /* cmp */
1288 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1289 break;
1291 case 0x62: /* bound */ {
1292 unsigned long src_val2;
1293 int lb, ub, idx;
1294 generate_exception_if(mode_64bit() || (src.type != OP_MEM), EXC_UD);
1295 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1296 &src_val2, op_bytes, ctxt)) )
1297 goto done;
1298 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1299 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1300 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1301 generate_exception_if((idx < lb) || (idx > ub), EXC_BR);
1302 dst.type = OP_NONE;
1303 break;
1306 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1307 if ( mode_64bit() )
1309 /* movsxd */
1310 if ( src.type == OP_REG )
1311 src.val = *(int32_t *)src.reg;
1312 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1313 &src.val, 4, ctxt)) )
1314 goto done;
1315 dst.val = (int32_t)src.val;
1317 else
1319 /* arpl */
1320 uint16_t src_val = dst.val;
1321 dst = src;
1322 _regs.eflags &= ~EFLG_ZF;
1323 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1324 if ( _regs.eflags & EFLG_ZF )
1325 dst.val = (dst.val & ~3) | (src_val & 3);
1326 else
1327 dst.type = OP_NONE;
1328 generate_exception_if(in_realmode(ctxt, ops), EXC_UD);
1330 break;
1332 case 0x69: /* imul imm16/32 */
1333 case 0x6b: /* imul imm8 */ {
1334 unsigned long reg = *(long *)decode_register(modrm_reg, &_regs, 0);
1335 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1336 switch ( dst.bytes )
1338 case 2:
1339 dst.val = ((uint32_t)(int16_t)src.val *
1340 (uint32_t)(int16_t)reg);
1341 if ( (int16_t)dst.val != (uint32_t)dst.val )
1342 _regs.eflags |= EFLG_OF|EFLG_CF;
1343 break;
1344 #ifdef __x86_64__
1345 case 4:
1346 dst.val = ((uint64_t)(int32_t)src.val *
1347 (uint64_t)(int32_t)reg);
1348 if ( (int32_t)dst.val != dst.val )
1349 _regs.eflags |= EFLG_OF|EFLG_CF;
1350 break;
1351 #endif
1352 default: {
1353 unsigned long m[2] = { src.val, reg };
1354 if ( imul_dbl(m) )
1355 _regs.eflags |= EFLG_OF|EFLG_CF;
1356 dst.val = m[0];
1357 break;
1360 dst.type = OP_REG;
1361 dst.reg = decode_register(modrm_reg, &_regs, 0);
1362 break;
1365 case 0x82: /* Grp1 (x86/32 only) */
1366 generate_exception_if(mode_64bit(), EXC_UD);
1367 case 0x80: case 0x81: case 0x83: /* Grp1 */
1368 switch ( modrm_reg & 7 )
1370 case 0: goto add;
1371 case 1: goto or;
1372 case 2: goto adc;
1373 case 3: goto sbb;
1374 case 4: goto and;
1375 case 5: goto sub;
1376 case 6: goto xor;
1377 case 7: goto cmp;
1379 break;
1381 case 0xa8 ... 0xa9: /* test imm,%%eax */
1382 dst.reg = (unsigned long *)&_regs.eax;
1383 dst.val = _regs.eax;
1384 case 0x84 ... 0x85: test: /* test */
1385 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1386 break;
1388 case 0x86 ... 0x87: xchg: /* xchg */
1389 /* Write back the register source. */
1390 switch ( dst.bytes )
1392 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1393 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1394 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1395 case 8: *src.reg = dst.val; break;
1397 /* Write back the memory destination with implicit LOCK prefix. */
1398 dst.val = src.val;
1399 lock_prefix = 1;
1400 break;
1402 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1403 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1404 case 0x88 ... 0x8b: /* mov */
1405 dst.val = src.val;
1406 break;
1408 case 0x8c: /* mov Sreg,r/m */ {
1409 struct segment_register reg;
1410 enum x86_segment seg = decode_segment(modrm_reg);
1411 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1412 fail_if(ops->read_segment == NULL);
1413 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
1414 goto done;
1415 dst.val = reg.sel;
1416 if ( dst.type == OP_MEM )
1417 dst.bytes = 2;
1418 break;
1421 case 0x8e: /* mov r/m,Sreg */ {
1422 enum x86_segment seg = decode_segment(modrm_reg);
1423 generate_exception_if(seg == decode_segment_failed, EXC_UD);
1424 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
1425 goto done;
1426 dst.type = OP_NONE;
1427 break;
1430 case 0x8d: /* lea */
1431 dst.val = ea.mem.off;
1432 break;
1434 case 0x8f: /* pop (sole member of Grp1a) */
1435 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
1436 /* 64-bit mode: POP defaults to a 64-bit operand. */
1437 if ( mode_64bit() && (dst.bytes == 4) )
1438 dst.bytes = 8;
1439 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1440 &dst.val, dst.bytes, ctxt)) != 0 )
1441 goto done;
1442 break;
1444 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1445 dst.reg = decode_register(
1446 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1447 dst.val = src.val;
1448 break;
1450 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1451 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1452 src.val = ((uint32_t)src.val |
1453 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1454 dst.reg = decode_register(
1455 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1456 dst.val = src.val;
1457 break;
1459 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1460 switch ( modrm_reg & 7 )
1462 case 0: /* rol */
1463 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1464 break;
1465 case 1: /* ror */
1466 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1467 break;
1468 case 2: /* rcl */
1469 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1470 break;
1471 case 3: /* rcr */
1472 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1473 break;
1474 case 4: /* sal/shl */
1475 case 6: /* sal/shl */
1476 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1477 break;
1478 case 5: /* shr */
1479 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1480 break;
1481 case 7: /* sar */
1482 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1483 break;
1485 break;
1487 case 0xc4: /* les */ {
1488 unsigned long sel;
1489 dst.val = x86_seg_es;
1490 les: /* dst.val identifies the segment */
1491 generate_exception_if(src.type != OP_MEM, EXC_UD);
1492 if ( (rc = ops->read(src.mem.seg, src.mem.off + src.bytes,
1493 &sel, 2, ctxt)) != 0 )
1494 goto done;
1495 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
1496 goto done;
1497 dst.val = src.val;
1498 break;
1501 case 0xc5: /* lds */
1502 dst.val = x86_seg_ds;
1503 goto les;
1505 case 0xd0 ... 0xd1: /* Grp2 */
1506 src.val = 1;
1507 goto grp2;
1509 case 0xd2 ... 0xd3: /* Grp2 */
1510 src.val = _regs.ecx;
1511 goto grp2;
1513 case 0xf6 ... 0xf7: /* Grp3 */
1514 switch ( modrm_reg & 7 )
1516 case 0 ... 1: /* test */
1517 /* Special case in Grp3: test has an immediate source operand. */
1518 src.type = OP_IMM;
1519 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1520 if ( src.bytes == 8 ) src.bytes = 4;
1521 switch ( src.bytes )
1523 case 1: src.val = insn_fetch_type(int8_t); break;
1524 case 2: src.val = insn_fetch_type(int16_t); break;
1525 case 4: src.val = insn_fetch_type(int32_t); break;
1527 goto test;
1528 case 2: /* not */
1529 dst.val = ~dst.val;
1530 break;
1531 case 3: /* neg */
1532 emulate_1op("neg", dst, _regs.eflags);
1533 break;
1534 case 4: /* mul */
1535 src = dst;
1536 dst.type = OP_REG;
1537 dst.reg = (unsigned long *)&_regs.eax;
1538 dst.val = *dst.reg;
1539 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1540 switch ( src.bytes )
1542 case 1:
1543 dst.val *= src.val;
1544 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1545 _regs.eflags |= EFLG_OF|EFLG_CF;
1546 break;
1547 case 2:
1548 dst.val *= src.val;
1549 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1550 _regs.eflags |= EFLG_OF|EFLG_CF;
1551 *(uint16_t *)&_regs.edx = dst.val >> 16;
1552 break;
1553 #ifdef __x86_64__
1554 case 4:
1555 dst.val *= src.val;
1556 if ( (uint32_t)dst.val != dst.val )
1557 _regs.eflags |= EFLG_OF|EFLG_CF;
1558 _regs.edx = (uint32_t)(dst.val >> 32);
1559 break;
1560 #endif
1561 default: {
1562 unsigned long m[2] = { src.val, dst.val };
1563 if ( mul_dbl(m) )
1564 _regs.eflags |= EFLG_OF|EFLG_CF;
1565 _regs.edx = m[1];
1566 dst.val = m[0];
1567 break;
1570 break;
1571 case 5: /* imul */
1572 src = dst;
1573 dst.type = OP_REG;
1574 dst.reg = (unsigned long *)&_regs.eax;
1575 dst.val = *dst.reg;
1576 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1577 switch ( src.bytes )
1579 case 1:
1580 dst.val = ((uint16_t)(int8_t)src.val *
1581 (uint16_t)(int8_t)dst.val);
1582 if ( (int8_t)dst.val != (uint16_t)dst.val )
1583 _regs.eflags |= EFLG_OF|EFLG_CF;
1584 break;
1585 case 2:
1586 dst.val = ((uint32_t)(int16_t)src.val *
1587 (uint32_t)(int16_t)dst.val);
1588 if ( (int16_t)dst.val != (uint32_t)dst.val )
1589 _regs.eflags |= EFLG_OF|EFLG_CF;
1590 *(uint16_t *)&_regs.edx = dst.val >> 16;
1591 break;
1592 #ifdef __x86_64__
1593 case 4:
1594 dst.val = ((uint64_t)(int32_t)src.val *
1595 (uint64_t)(int32_t)dst.val);
1596 if ( (int32_t)dst.val != dst.val )
1597 _regs.eflags |= EFLG_OF|EFLG_CF;
1598 _regs.edx = (uint32_t)(dst.val >> 32);
1599 break;
1600 #endif
1601 default: {
1602 unsigned long m[2] = { src.val, dst.val };
1603 if ( imul_dbl(m) )
1604 _regs.eflags |= EFLG_OF|EFLG_CF;
1605 _regs.edx = m[1];
1606 dst.val = m[0];
1607 break;
1610 break;
1611 case 6: /* div */ {
1612 unsigned long u[2], v;
1613 src = dst;
1614 dst.type = OP_REG;
1615 dst.reg = (unsigned long *)&_regs.eax;
1616 switch ( src.bytes )
1618 case 1:
1619 u[0] = (uint16_t)_regs.eax;
1620 u[1] = 0;
1621 v = (uint8_t)src.val;
1622 generate_exception_if(
1623 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1624 EXC_DE);
1625 dst.val = (uint8_t)u[0];
1626 ((uint8_t *)&_regs.eax)[1] = u[1];
1627 break;
1628 case 2:
1629 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1630 u[1] = 0;
1631 v = (uint16_t)src.val;
1632 generate_exception_if(
1633 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1634 EXC_DE);
1635 dst.val = (uint16_t)u[0];
1636 *(uint16_t *)&_regs.edx = u[1];
1637 break;
1638 #ifdef __x86_64__
1639 case 4:
1640 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1641 u[1] = 0;
1642 v = (uint32_t)src.val;
1643 generate_exception_if(
1644 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1645 EXC_DE);
1646 dst.val = (uint32_t)u[0];
1647 _regs.edx = (uint32_t)u[1];
1648 break;
1649 #endif
1650 default:
1651 u[0] = _regs.eax;
1652 u[1] = _regs.edx;
1653 v = src.val;
1654 generate_exception_if(div_dbl(u, v), EXC_DE);
1655 dst.val = u[0];
1656 _regs.edx = u[1];
1657 break;
1659 break;
1661 case 7: /* idiv */ {
1662 unsigned long u[2], v;
1663 src = dst;
1664 dst.type = OP_REG;
1665 dst.reg = (unsigned long *)&_regs.eax;
1666 switch ( src.bytes )
1668 case 1:
1669 u[0] = (int16_t)_regs.eax;
1670 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1671 v = (int8_t)src.val;
1672 generate_exception_if(
1673 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1674 EXC_DE);
1675 dst.val = (int8_t)u[0];
1676 ((int8_t *)&_regs.eax)[1] = u[1];
1677 break;
1678 case 2:
1679 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1680 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1681 v = (int16_t)src.val;
1682 generate_exception_if(
1683 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1684 EXC_DE);
1685 dst.val = (int16_t)u[0];
1686 *(int16_t *)&_regs.edx = u[1];
1687 break;
1688 #ifdef __x86_64__
1689 case 4:
1690 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1691 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1692 v = (int32_t)src.val;
1693 generate_exception_if(
1694 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1695 EXC_DE);
1696 dst.val = (int32_t)u[0];
1697 _regs.edx = (uint32_t)u[1];
1698 break;
1699 #endif
1700 default:
1701 u[0] = _regs.eax;
1702 u[1] = _regs.edx;
1703 v = src.val;
1704 generate_exception_if(idiv_dbl(u, v), EXC_DE);
1705 dst.val = u[0];
1706 _regs.edx = u[1];
1707 break;
1709 break;
1711 default:
1712 goto cannot_emulate;
1714 break;
1716 case 0xfe: /* Grp4 */
1717 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD);
1718 case 0xff: /* Grp5 */
1719 switch ( modrm_reg & 7 )
1721 case 0: /* inc */
1722 emulate_1op("inc", dst, _regs.eflags);
1723 break;
1724 case 1: /* dec */
1725 emulate_1op("dec", dst, _regs.eflags);
1726 break;
1727 case 2: /* call (near) */
1728 case 4: /* jmp (near) */
1729 dst.type = OP_NONE;
1730 if ( (dst.bytes != 8) && mode_64bit() )
1732 dst.bytes = op_bytes = 8;
1733 if ( dst.type == OP_REG )
1734 dst.val = *dst.reg;
1735 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1736 &dst.val, 8, ctxt)) != 0 )
1737 goto done;
1739 src.val = _regs.eip;
1740 _regs.eip = dst.val;
1741 if ( (modrm_reg & 7) == 2 )
1742 goto push; /* call */
1743 break;
1744 case 3: /* call (far, absolute indirect) */
1745 case 5: /* jmp (far, absolute indirect) */ {
1746 unsigned long sel;
1748 if ( (rc = ops->read(dst.mem.seg, dst.mem.off+dst.bytes,
1749 &sel, 2, ctxt)) )
1750 goto done;
1752 if ( (modrm_reg & 7) == 3 ) /* call */
1754 struct segment_register reg;
1755 fail_if(ops->read_segment == NULL);
1756 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
1757 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1758 reg.sel, op_bytes, ctxt)) ||
1759 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1760 _regs.eip, op_bytes, ctxt)) )
1761 goto done;
1764 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
1765 goto done;
1766 _regs.eip = dst.val;
1768 dst.type = OP_NONE;
1769 break;
1771 case 6: /* push */
1772 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1773 if ( mode_64bit() && (dst.bytes == 4) )
1775 dst.bytes = 8;
1776 if ( dst.type == OP_REG )
1777 dst.val = *dst.reg;
1778 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1779 &dst.val, 8, ctxt)) != 0 )
1780 goto done;
1782 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
1783 dst.val, dst.bytes, ctxt)) != 0 )
1784 goto done;
1785 dst.type = OP_NONE;
1786 break;
1787 case 7:
1788 generate_exception_if(1, EXC_UD);
1789 default:
1790 goto cannot_emulate;
1792 break;
1795 writeback:
1796 switch ( dst.type )
1798 case OP_REG:
1799 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1800 switch ( dst.bytes )
1802 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
1803 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
1804 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
1805 case 8: *dst.reg = dst.val; break;
1807 break;
1808 case OP_MEM:
1809 if ( !(d & Mov) && (dst.orig_val == dst.val) )
1810 /* nothing to do */;
1811 else if ( lock_prefix )
1812 rc = ops->cmpxchg(
1813 dst.mem.seg, dst.mem.off, dst.orig_val,
1814 dst.val, dst.bytes, ctxt);
1815 else
1816 rc = ops->write(
1817 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
1818 if ( rc != 0 )
1819 goto done;
1820 default:
1821 break;
1824 /* Commit shadow register state. */
1825 _regs.eflags &= ~EFLG_RF;
1826 *ctxt->regs = _regs;
1828 if ( (_regs.eflags & EFLG_TF) &&
1829 (rc == X86EMUL_OKAY) &&
1830 (ops->inject_hw_exception != NULL) )
1831 rc = ops->inject_hw_exception(EXC_DB, ctxt) ? : X86EMUL_EXCEPTION;
1833 done:
1834 return rc;
1836 special_insn:
1837 dst.type = OP_NONE;
1839 /*
1840 * The only implicit-operands instructions allowed a LOCK prefix are
1841 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
1842 */
1843 generate_exception_if(lock_prefix &&
1844 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
1845 (b != 0xc7), /* CMPXCHG{8,16}B */
1846 EXC_GP);
1848 if ( twobyte )
1849 goto twobyte_special_insn;
1851 switch ( b )
1853 case 0x06: /* push %%es */ {
1854 struct segment_register reg;
1855 src.val = x86_seg_es;
1856 push_seg:
1857 fail_if(ops->read_segment == NULL);
1858 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
1859 return rc;
1860 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
1861 if ( mode_64bit() && (op_bytes == 4) )
1862 op_bytes = 8;
1863 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1864 reg.sel, op_bytes, ctxt)) != 0 )
1865 goto done;
1866 break;
1869 case 0x07: /* pop %%es */
1870 src.val = x86_seg_es;
1871 pop_seg:
1872 fail_if(ops->write_segment == NULL);
1873 /* 64-bit mode: POP defaults to a 64-bit operand. */
1874 if ( mode_64bit() && (op_bytes == 4) )
1875 op_bytes = 8;
1876 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
1877 &dst.val, op_bytes, ctxt)) != 0 )
1878 goto done;
1879 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
1880 return rc;
1881 break;
1883 case 0x0e: /* push %%cs */
1884 src.val = x86_seg_cs;
1885 goto push_seg;
1887 case 0x16: /* push %%ss */
1888 src.val = x86_seg_ss;
1889 goto push_seg;
1891 case 0x17: /* pop %%ss */
1892 src.val = x86_seg_ss;
1893 goto pop_seg;
1895 case 0x1e: /* push %%ds */
1896 src.val = x86_seg_ds;
1897 goto push_seg;
1899 case 0x1f: /* pop %%ds */
1900 src.val = x86_seg_ds;
1901 goto pop_seg;
1903 case 0x27: /* daa */ {
1904 uint8_t al = _regs.eax;
1905 unsigned long eflags = _regs.eflags;
1906 generate_exception_if(mode_64bit(), EXC_UD);
1907 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1908 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1910 *(uint8_t *)&_regs.eax += 6;
1911 _regs.eflags |= EFLG_AF;
1913 if ( (al > 0x99) || (eflags & EFLG_CF) )
1915 *(uint8_t *)&_regs.eax += 0x60;
1916 _regs.eflags |= EFLG_CF;
1918 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1919 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1920 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1921 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1922 break;
1925 case 0x2f: /* das */ {
1926 uint8_t al = _regs.eax;
1927 unsigned long eflags = _regs.eflags;
1928 generate_exception_if(mode_64bit(), EXC_UD);
1929 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
1930 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
1932 _regs.eflags |= EFLG_AF;
1933 if ( (al < 6) || (eflags & EFLG_CF) )
1934 _regs.eflags |= EFLG_CF;
1935 *(uint8_t *)&_regs.eax -= 6;
1937 if ( (al > 0x99) || (eflags & EFLG_CF) )
1939 *(uint8_t *)&_regs.eax -= 0x60;
1940 _regs.eflags |= EFLG_CF;
1942 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
1943 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
1944 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
1945 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
1946 break;
1949 case 0x37: /* aaa */
1950 case 0x3f: /* aas */
1951 generate_exception_if(mode_64bit(), EXC_UD);
1952 _regs.eflags &= ~EFLG_CF;
1953 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
1955 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
1956 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
1957 _regs.eflags |= EFLG_CF | EFLG_AF;
1959 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
1960 break;
1962 case 0x40 ... 0x4f: /* inc/dec reg */
1963 dst.type = OP_REG;
1964 dst.reg = decode_register(b & 7, &_regs, 0);
1965 dst.bytes = op_bytes;
1966 dst.val = *dst.reg;
1967 if ( b & 8 )
1968 emulate_1op("dec", dst, _regs.eflags);
1969 else
1970 emulate_1op("inc", dst, _regs.eflags);
1971 break;
1973 case 0x50 ... 0x57: /* push reg */
1974 src.val = *(unsigned long *)decode_register(
1975 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1976 goto push;
1978 case 0x58 ... 0x5f: /* pop reg */
1979 dst.type = OP_REG;
1980 dst.reg = decode_register(
1981 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1982 dst.bytes = op_bytes;
1983 if ( mode_64bit() && (dst.bytes == 4) )
1984 dst.bytes = 8;
1985 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1986 &dst.val, dst.bytes, ctxt)) != 0 )
1987 goto done;
1988 break;
1990 case 0x60: /* pusha */ {
1991 int i;
1992 unsigned long regs[] = {
1993 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
1994 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
1995 generate_exception_if(mode_64bit(), EXC_UD);
1996 for ( i = 0; i < 8; i++ )
1997 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
1998 regs[i], op_bytes, ctxt)) != 0 )
1999 goto done;
2000 break;
2003 case 0x61: /* popa */ {
2004 int i;
2005 unsigned long dummy_esp, *regs[] = {
2006 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
2007 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
2008 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
2009 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
2010 generate_exception_if(mode_64bit(), EXC_UD);
2011 for ( i = 0; i < 8; i++ )
2013 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2014 &dst.val, op_bytes, ctxt)) != 0 )
2015 goto done;
2016 switch ( op_bytes )
2018 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
2019 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
2020 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
2021 case 8: *regs[i] = dst.val; break;
2024 break;
2027 case 0x68: /* push imm{16,32,64} */
2028 src.val = ((op_bytes == 2)
2029 ? (int32_t)insn_fetch_type(int16_t)
2030 : insn_fetch_type(int32_t));
2031 goto push;
2033 case 0x6a: /* push imm8 */
2034 src.val = insn_fetch_type(int8_t);
2035 push:
2036 d |= Mov; /* force writeback */
2037 dst.type = OP_MEM;
2038 dst.bytes = op_bytes;
2039 if ( mode_64bit() && (dst.bytes == 4) )
2040 dst.bytes = 8;
2041 dst.val = src.val;
2042 dst.mem.seg = x86_seg_ss;
2043 dst.mem.off = sp_pre_dec(dst.bytes);
2044 break;
2046 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */
2047 handle_rep_prefix();
2048 generate_exception_if(!mode_iopl(), EXC_GP);
2049 dst.type = OP_MEM;
2050 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2051 dst.mem.seg = x86_seg_es;
2052 dst.mem.off = truncate_ea(_regs.edi);
2053 fail_if(ops->read_io == NULL);
2054 if ( (rc = ops->read_io((uint16_t)_regs.edx, dst.bytes,
2055 &dst.val, ctxt)) != 0 )
2056 goto done;
2057 register_address_increment(
2058 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2059 break;
2061 case 0x6e ... 0x6f: /* outs %esi,%dx */
2062 handle_rep_prefix();
2063 generate_exception_if(!mode_iopl(), EXC_GP);
2064 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2065 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2066 &dst.val, dst.bytes, ctxt)) != 0 )
2067 goto done;
2068 fail_if(ops->write_io == NULL);
2069 if ( (rc = ops->write_io((uint16_t)_regs.edx, dst.bytes,
2070 dst.val, ctxt)) != 0 )
2071 goto done;
2072 register_address_increment(
2073 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2074 break;
2076 case 0x70 ... 0x7f: /* jcc (short) */ {
2077 int rel = insn_fetch_type(int8_t);
2078 if ( test_cc(b, _regs.eflags) )
2079 jmp_rel(rel);
2080 break;
2083 case 0x90: /* nop / xchg %%r8,%%rax */
2084 if ( !(rex_prefix & 1) )
2085 break; /* nop */
2087 case 0x91 ... 0x97: /* xchg reg,%%rax */
2088 src.type = dst.type = OP_REG;
2089 src.bytes = dst.bytes = op_bytes;
2090 src.reg = (unsigned long *)&_regs.eax;
2091 src.val = *src.reg;
2092 dst.reg = decode_register(
2093 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2094 dst.val = *dst.reg;
2095 goto xchg;
2097 case 0x98: /* cbw/cwde/cdqe */
2098 switch ( op_bytes )
2100 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2101 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2102 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2104 break;
2106 case 0x99: /* cwd/cdq/cqo */
2107 switch ( op_bytes )
2109 case 2:
2110 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2111 break;
2112 case 4:
2113 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2114 break;
2115 case 8:
2116 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2117 break;
2119 break;
2121 case 0x9a: /* call (far, absolute) */ {
2122 struct segment_register reg;
2123 uint16_t sel;
2124 uint32_t eip;
2126 fail_if(ops->read_segment == NULL);
2127 generate_exception_if(mode_64bit(), EXC_UD);
2129 eip = insn_fetch_bytes(op_bytes);
2130 sel = insn_fetch_type(uint16_t);
2132 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2133 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2134 reg.sel, op_bytes, ctxt)) ||
2135 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2136 _regs.eip, op_bytes, ctxt)) )
2137 goto done;
2139 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2140 goto done;
2141 _regs.eip = eip;
2142 break;
2145 case 0x9c: /* pushf */
2146 src.val = _regs.eflags;
2147 goto push;
2149 case 0x9d: /* popf */ {
2150 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2151 if ( !mode_iopl() )
2152 mask |= EFLG_IOPL;
2153 fail_if(ops->write_rflags == NULL);
2154 /* 64-bit mode: POP defaults to a 64-bit operand. */
2155 if ( mode_64bit() && (op_bytes == 4) )
2156 op_bytes = 8;
2157 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2158 &dst.val, op_bytes, ctxt)) != 0 )
2159 goto done;
2160 if ( op_bytes == 2 )
2161 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2162 dst.val &= 0x257fd5;
2163 _regs.eflags &= mask;
2164 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2165 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2166 goto done;
2167 break;
2170 case 0x9e: /* sahf */
2171 *(uint8_t *)_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2172 break;
2174 case 0x9f: /* lahf */
2175 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2176 break;
2178 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2179 /* Source EA is not encoded via ModRM. */
2180 dst.type = OP_REG;
2181 dst.reg = (unsigned long *)&_regs.eax;
2182 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2183 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2184 &dst.val, dst.bytes, ctxt)) != 0 )
2185 goto done;
2186 break;
2188 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2189 /* Destination EA is not encoded via ModRM. */
2190 dst.type = OP_MEM;
2191 dst.mem.seg = ea.mem.seg;
2192 dst.mem.off = insn_fetch_bytes(ad_bytes);
2193 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2194 dst.val = (unsigned long)_regs.eax;
2195 break;
2197 case 0xa4 ... 0xa5: /* movs */
2198 handle_rep_prefix();
2199 dst.type = OP_MEM;
2200 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2201 dst.mem.seg = x86_seg_es;
2202 dst.mem.off = truncate_ea(_regs.edi);
2203 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2204 &dst.val, dst.bytes, ctxt)) != 0 )
2205 goto done;
2206 register_address_increment(
2207 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2208 register_address_increment(
2209 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2210 break;
2212 case 0xa6 ... 0xa7: /* cmps */ {
2213 unsigned long next_eip = _regs.eip;
2214 handle_rep_prefix();
2215 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2216 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2217 &dst.val, dst.bytes, ctxt)) ||
2218 (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2219 &src.val, src.bytes, ctxt)) )
2220 goto done;
2221 register_address_increment(
2222 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2223 register_address_increment(
2224 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2225 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2226 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2227 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2228 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2229 _regs.eip = next_eip;
2230 break;
2233 case 0xaa ... 0xab: /* stos */
2234 handle_rep_prefix();
2235 dst.type = OP_MEM;
2236 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2237 dst.mem.seg = x86_seg_es;
2238 dst.mem.off = truncate_ea(_regs.edi);
2239 dst.val = _regs.eax;
2240 register_address_increment(
2241 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2242 break;
2244 case 0xac ... 0xad: /* lods */
2245 handle_rep_prefix();
2246 dst.type = OP_REG;
2247 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2248 dst.reg = (unsigned long *)&_regs.eax;
2249 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2250 &dst.val, dst.bytes, ctxt)) != 0 )
2251 goto done;
2252 register_address_increment(
2253 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2254 break;
2256 case 0xae ... 0xaf: /* scas */ {
2257 unsigned long next_eip = _regs.eip;
2258 handle_rep_prefix();
2259 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2260 dst.val = _regs.eax;
2261 if ( (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2262 &src.val, src.bytes, ctxt)) != 0 )
2263 goto done;
2264 register_address_increment(
2265 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2266 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2267 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2268 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2269 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2270 _regs.eip = next_eip;
2271 break;
2274 case 0xc2: /* ret imm16 (near) */
2275 case 0xc3: /* ret (near) */ {
2276 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2277 op_bytes = mode_64bit() ? 8 : op_bytes;
2278 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2279 &dst.val, op_bytes, ctxt)) != 0 )
2280 goto done;
2281 _regs.eip = dst.val;
2282 break;
2285 case 0xc8: /* enter imm16,imm8 */ {
2286 uint16_t size = insn_fetch_type(uint16_t);
2287 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2288 int i;
2290 dst.type = OP_REG;
2291 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2292 dst.reg = (unsigned long *)&_regs.ebp;
2293 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2294 _regs.ebp, dst.bytes, ctxt)) )
2295 goto done;
2296 dst.val = _regs.esp;
2298 if ( depth > 0 )
2300 for ( i = 1; i < depth; i++ )
2302 unsigned long ebp, temp_data;
2303 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2304 if ( (rc = ops->read(x86_seg_ss, ebp,
2305 &temp_data, dst.bytes, ctxt)) ||
2306 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2307 temp_data, dst.bytes, ctxt)) )
2308 goto done;
2310 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2311 dst.val, dst.bytes, ctxt)) )
2312 goto done;
2315 sp_pre_dec(size);
2316 break;
2319 case 0xc9: /* leave */
2320 /* First writeback, to %%esp. */
2321 dst.type = OP_REG;
2322 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2323 dst.reg = (unsigned long *)&_regs.esp;
2324 dst.val = _regs.ebp;
2326 /* Flush first writeback, since there is a second. */
2327 switch ( dst.bytes )
2329 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2330 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2331 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2332 case 8: *dst.reg = dst.val; break;
2335 /* Second writeback, to %%ebp. */
2336 dst.reg = (unsigned long *)&_regs.ebp;
2337 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2338 &dst.val, dst.bytes, ctxt)) )
2339 goto done;
2340 break;
2342 case 0xca: /* ret imm16 (far) */
2343 case 0xcb: /* ret (far) */ {
2344 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2345 op_bytes = mode_64bit() ? 8 : op_bytes;
2346 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2347 &dst.val, op_bytes, ctxt)) ||
2348 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2349 &src.val, op_bytes, ctxt)) ||
2350 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2351 goto done;
2352 _regs.eip = dst.val;
2353 break;
2356 case 0xcc: /* int3 */
2357 src.val = EXC_BP;
2358 goto swint;
2360 case 0xcd: /* int imm8 */
2361 src.val = insn_fetch_type(uint8_t);
2362 swint:
2363 fail_if(ops->inject_sw_interrupt == NULL);
2364 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2365 ctxt) ? : X86EMUL_EXCEPTION;
2366 goto done;
2368 case 0xce: /* into */
2369 generate_exception_if(mode_64bit(), EXC_UD);
2370 if ( !(_regs.eflags & EFLG_OF) )
2371 break;
2372 src.val = EXC_OF;
2373 goto swint;
2375 case 0xcf: /* iret */ {
2376 unsigned long cs, eip, eflags;
2377 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2378 if ( !mode_iopl() )
2379 mask |= EFLG_IOPL;
2380 fail_if(!in_realmode(ctxt, ops));
2381 fail_if(ops->write_rflags == NULL);
2382 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2383 &eip, op_bytes, ctxt)) ||
2384 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2385 &cs, op_bytes, ctxt)) ||
2386 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2387 &eflags, op_bytes, ctxt)) )
2388 goto done;
2389 if ( op_bytes == 2 )
2390 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2391 eflags &= 0x257fd5;
2392 _regs.eflags &= mask;
2393 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2394 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2395 goto done;
2396 _regs.eip = eip;
2397 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2398 goto done;
2399 break;
2402 case 0xd4: /* aam */ {
2403 unsigned int base = insn_fetch_type(uint8_t);
2404 uint8_t al = _regs.eax;
2405 generate_exception_if(mode_64bit(), EXC_UD);
2406 generate_exception_if(base == 0, EXC_DE);
2407 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2408 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2409 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2410 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2411 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2412 break;
2415 case 0xd5: /* aad */ {
2416 unsigned int base = insn_fetch_type(uint8_t);
2417 uint16_t ax = _regs.eax;
2418 generate_exception_if(mode_64bit(), EXC_UD);
2419 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2420 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2421 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2422 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2423 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2424 break;
2427 case 0xd6: /* salc */
2428 generate_exception_if(mode_64bit(), EXC_UD);
2429 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2430 break;
2432 case 0xd7: /* xlat */ {
2433 unsigned long al = (uint8_t)_regs.eax;
2434 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
2435 &al, 1, ctxt)) != 0 )
2436 goto done;
2437 *(uint8_t *)&_regs.eax = al;
2438 break;
2441 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2442 int rel = insn_fetch_type(int8_t);
2443 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2444 if ( b == 0xe1 )
2445 do_jmp = !do_jmp; /* loopz */
2446 else if ( b == 0xe2 )
2447 do_jmp = 1; /* loop */
2448 switch ( ad_bytes )
2450 case 2:
2451 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2452 break;
2453 case 4:
2454 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
2455 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
2456 break;
2457 default: /* case 8: */
2458 do_jmp &= --_regs.ecx != 0;
2459 break;
2461 if ( do_jmp )
2462 jmp_rel(rel);
2463 break;
2466 case 0xe3: /* jcxz/jecxz (short) */ {
2467 int rel = insn_fetch_type(int8_t);
2468 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2469 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2470 jmp_rel(rel);
2471 break;
2474 case 0xe4: /* in imm8,%al */
2475 case 0xe5: /* in imm8,%eax */
2476 case 0xe6: /* out %al,imm8 */
2477 case 0xe7: /* out %eax,imm8 */
2478 case 0xec: /* in %dx,%al */
2479 case 0xed: /* in %dx,%eax */
2480 case 0xee: /* out %al,%dx */
2481 case 0xef: /* out %eax,%dx */ {
2482 unsigned int port = ((b < 0xe8)
2483 ? insn_fetch_type(uint8_t)
2484 : (uint16_t)_regs.edx);
2485 generate_exception_if(!mode_iopl(), EXC_GP);
2486 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2487 if ( b & 2 )
2489 /* out */
2490 fail_if(ops->write_io == NULL);
2491 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2494 else
2496 /* in */
2497 dst.type = OP_REG;
2498 dst.bytes = op_bytes;
2499 dst.reg = (unsigned long *)&_regs.eax;
2500 fail_if(ops->read_io == NULL);
2501 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2503 if ( rc != 0 )
2504 goto done;
2505 break;
2508 case 0xe8: /* call (near) */ {
2509 int rel = (((op_bytes == 2) && !mode_64bit())
2510 ? (int32_t)insn_fetch_type(int16_t)
2511 : insn_fetch_type(int32_t));
2512 op_bytes = mode_64bit() ? 8 : op_bytes;
2513 src.val = _regs.eip;
2514 jmp_rel(rel);
2515 goto push;
2518 case 0xe9: /* jmp (near) */ {
2519 int rel = (((op_bytes == 2) && !mode_64bit())
2520 ? (int32_t)insn_fetch_type(int16_t)
2521 : insn_fetch_type(int32_t));
2522 jmp_rel(rel);
2523 break;
2526 case 0xea: /* jmp (far, absolute) */ {
2527 uint16_t sel;
2528 uint32_t eip;
2529 generate_exception_if(mode_64bit(), EXC_UD);
2530 eip = insn_fetch_bytes(op_bytes);
2531 sel = insn_fetch_type(uint16_t);
2532 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2533 goto done;
2534 _regs.eip = eip;
2535 break;
2538 case 0xeb: /* jmp (short) */
2539 jmp_rel(insn_fetch_type(int8_t));
2540 break;
2542 case 0xf1: /* int1 (icebp) */
2543 src.val = EXC_DB;
2544 goto swint;
2546 case 0xf4: /* hlt */
2547 fail_if(ops->hlt == NULL);
2548 if ( (rc = ops->hlt(ctxt)) != 0 )
2549 goto done;
2550 break;
2552 case 0xf5: /* cmc */
2553 _regs.eflags ^= EFLG_CF;
2554 break;
2556 case 0xf8: /* clc */
2557 _regs.eflags &= ~EFLG_CF;
2558 break;
2560 case 0xf9: /* stc */
2561 _regs.eflags |= EFLG_CF;
2562 break;
2564 case 0xfa: /* cli */
2565 case 0xfb: /* sti */
2566 generate_exception_if(!mode_iopl(), EXC_GP);
2567 fail_if(ops->write_rflags == NULL);
2568 _regs.eflags &= ~EFLG_IF;
2569 if ( b == 0xfb ) /* sti */
2570 _regs.eflags |= EFLG_IF;
2571 if ( (rc = ops->write_rflags(_regs.eflags, ctxt)) != 0 )
2572 goto done;
2573 break;
2575 case 0xfc: /* cld */
2576 _regs.eflags &= ~EFLG_DF;
2577 break;
2579 case 0xfd: /* std */
2580 _regs.eflags |= EFLG_DF;
2581 break;
2583 goto writeback;
2585 twobyte_insn:
2586 switch ( b )
2588 case 0x40 ... 0x4f: /* cmovcc */
2589 dst.val = src.val;
2590 if ( !test_cc(b, _regs.eflags) )
2591 dst.type = OP_NONE;
2592 break;
2594 case 0x90 ... 0x9f: /* setcc */
2595 dst.val = test_cc(b, _regs.eflags);
2596 break;
2598 case 0xb0 ... 0xb1: /* cmpxchg */
2599 /* Save real source value, then compare EAX against destination. */
2600 src.orig_val = src.val;
2601 src.val = _regs.eax;
2602 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2603 /* Always write back. The question is: where to? */
2604 d |= Mov;
2605 if ( _regs.eflags & EFLG_ZF )
2607 /* Success: write back to memory. */
2608 dst.val = src.orig_val;
2610 else
2612 /* Failure: write the value we saw to EAX. */
2613 dst.type = OP_REG;
2614 dst.reg = (unsigned long *)&_regs.eax;
2616 break;
2618 case 0xa3: bt: /* bt */
2619 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
2620 break;
2622 case 0xa4: /* shld imm8,r,r/m */
2623 case 0xa5: /* shld %%cl,r,r/m */
2624 case 0xac: /* shrd imm8,r,r/m */
2625 case 0xad: /* shrd %%cl,r,r/m */ {
2626 uint8_t shift, width = dst.bytes << 3;
2627 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
2628 if ( (shift &= width - 1) == 0 )
2629 break;
2630 dst.orig_val = truncate_word(dst.val, dst.bytes);
2631 dst.val = ((shift == width) ? src.val :
2632 (b & 8) ?
2633 /* shrd */
2634 ((dst.orig_val >> shift) |
2635 truncate_word(src.val << (width - shift), dst.bytes)) :
2636 /* shld */
2637 ((dst.orig_val << shift) |
2638 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
2639 dst.val = truncate_word(dst.val, dst.bytes);
2640 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
2641 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
2642 _regs.eflags |= EFLG_CF;
2643 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
2644 _regs.eflags |= EFLG_OF;
2645 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
2646 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
2647 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
2648 break;
2651 case 0xb3: btr: /* btr */
2652 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
2653 break;
2655 case 0xab: bts: /* bts */
2656 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
2657 break;
2659 case 0xaf: /* imul */
2660 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
2661 switch ( dst.bytes )
2663 case 2:
2664 dst.val = ((uint32_t)(int16_t)src.val *
2665 (uint32_t)(int16_t)dst.val);
2666 if ( (int16_t)dst.val != (uint32_t)dst.val )
2667 _regs.eflags |= EFLG_OF|EFLG_CF;
2668 break;
2669 #ifdef __x86_64__
2670 case 4:
2671 dst.val = ((uint64_t)(int32_t)src.val *
2672 (uint64_t)(int32_t)dst.val);
2673 if ( (int32_t)dst.val != dst.val )
2674 _regs.eflags |= EFLG_OF|EFLG_CF;
2675 break;
2676 #endif
2677 default: {
2678 unsigned long m[2] = { src.val, dst.val };
2679 if ( imul_dbl(m) )
2680 _regs.eflags |= EFLG_OF|EFLG_CF;
2681 dst.val = m[0];
2682 break;
2685 break;
2687 case 0xb2: /* lss */
2688 dst.val = x86_seg_ss;
2689 goto les;
2691 case 0xb4: /* lfs */
2692 dst.val = x86_seg_fs;
2693 goto les;
2695 case 0xb5: /* lgs */
2696 dst.val = x86_seg_gs;
2697 goto les;
2699 case 0xb6: /* movzx rm8,r{16,32,64} */
2700 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2701 dst.reg = decode_register(modrm_reg, &_regs, 0);
2702 dst.bytes = op_bytes;
2703 dst.val = (uint8_t)src.val;
2704 break;
2706 case 0xbc: /* bsf */ {
2707 int zf;
2708 asm ( "bsf %2,%0; setz %b1"
2709 : "=r" (dst.val), "=q" (zf)
2710 : "r" (src.val), "1" (0) );
2711 _regs.eflags &= ~EFLG_ZF;
2712 _regs.eflags |= zf ? EFLG_ZF : 0;
2713 break;
2716 case 0xbd: /* bsr */ {
2717 int zf;
2718 asm ( "bsr %2,%0; setz %b1"
2719 : "=r" (dst.val), "=q" (zf)
2720 : "r" (src.val), "1" (0) );
2721 _regs.eflags &= ~EFLG_ZF;
2722 _regs.eflags |= zf ? EFLG_ZF : 0;
2723 break;
2726 case 0xb7: /* movzx rm16,r{16,32,64} */
2727 dst.val = (uint16_t)src.val;
2728 break;
2730 case 0xbb: btc: /* btc */
2731 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
2732 break;
2734 case 0xba: /* Grp8 */
2735 switch ( modrm_reg & 7 )
2737 case 4: goto bt;
2738 case 5: goto bts;
2739 case 6: goto btr;
2740 case 7: goto btc;
2741 default: generate_exception_if(1, EXC_UD);
2743 break;
2745 case 0xbe: /* movsx rm8,r{16,32,64} */
2746 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
2747 dst.reg = decode_register(modrm_reg, &_regs, 0);
2748 dst.bytes = op_bytes;
2749 dst.val = (int8_t)src.val;
2750 break;
2752 case 0xbf: /* movsx rm16,r{16,32,64} */
2753 dst.val = (int16_t)src.val;
2754 break;
2756 case 0xc0 ... 0xc1: /* xadd */
2757 /* Write back the register source. */
2758 switch ( dst.bytes )
2760 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
2761 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
2762 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
2763 case 8: *src.reg = dst.val; break;
2765 goto add;
2767 goto writeback;
2769 twobyte_special_insn:
2770 switch ( b )
2772 case 0x01: /* Grp7 */ {
2773 struct segment_register reg;
2774 unsigned long base, limit, cr0, cr0w;
2776 switch ( modrm_reg & 7 )
2778 case 0: /* sgdt */
2779 case 1: /* sidt */
2780 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2781 fail_if(ops->read_segment == NULL);
2782 if ( (rc = ops->read_segment((modrm_reg & 1) ?
2783 x86_seg_idtr : x86_seg_gdtr,
2784 &reg, ctxt)) )
2785 goto done;
2786 if ( op_bytes == 2 )
2787 reg.base &= 0xffffff;
2788 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
2789 reg.limit, 2, ctxt)) ||
2790 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
2791 reg.base, mode_64bit() ? 8 : 4, ctxt)) )
2792 goto done;
2793 break;
2794 case 2: /* lgdt */
2795 case 3: /* lidt */
2796 generate_exception_if(ea.type != OP_MEM, EXC_UD);
2797 fail_if(ops->write_segment == NULL);
2798 memset(&reg, 0, sizeof(reg));
2799 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0,
2800 &limit, 2, ctxt)) ||
2801 (rc = ops->read(ea.mem.seg, ea.mem.off+2,
2802 &base, mode_64bit() ? 8 : 4, ctxt)) )
2803 goto done;
2804 reg.base = base;
2805 reg.limit = limit;
2806 if ( op_bytes == 2 )
2807 reg.base &= 0xffffff;
2808 if ( (rc = ops->write_segment((modrm_reg & 1) ?
2809 x86_seg_idtr : x86_seg_gdtr,
2810 &reg, ctxt)) )
2811 goto done;
2812 break;
2813 case 4: /* smsw */
2814 ea.bytes = 2;
2815 dst = ea;
2816 fail_if(ops->read_cr == NULL);
2817 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
2818 goto done;
2819 d |= Mov; /* force writeback */
2820 break;
2821 case 6: /* lmsw */
2822 fail_if(ops->read_cr == NULL);
2823 fail_if(ops->write_cr == NULL);
2824 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
2825 goto done;
2826 if ( ea.type == OP_REG )
2827 cr0w = *ea.reg;
2828 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
2829 &cr0w, 2, ctxt)) )
2830 goto done;
2831 cr0 &= 0xffff0000;
2832 cr0 |= (uint16_t)cr0w;
2833 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
2834 goto done;
2835 break;
2836 default:
2837 goto cannot_emulate;
2839 break;
2842 case 0x06: /* clts */
2843 generate_exception_if(!mode_ring0(), EXC_GP);
2844 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
2845 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
2846 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
2847 goto done;
2848 break;
2850 case 0x08: /* invd */
2851 case 0x09: /* wbinvd */
2852 generate_exception_if(!mode_ring0(), EXC_GP);
2853 fail_if(ops->wbinvd == NULL);
2854 if ( (rc = ops->wbinvd(ctxt)) != 0 )
2855 goto done;
2856 break;
2858 case 0x0d: /* GrpP (prefetch) */
2859 case 0x18: /* Grp16 (prefetch/nop) */
2860 case 0x19 ... 0x1f: /* nop (amd-defined) */
2861 break;
2863 case 0x20: /* mov cr,reg */
2864 case 0x21: /* mov dr,reg */
2865 case 0x22: /* mov reg,cr */
2866 case 0x23: /* mov reg,dr */
2867 generate_exception_if(!mode_ring0(), EXC_GP);
2868 modrm_rm |= (rex_prefix & 1) << 3;
2869 modrm_reg |= lock_prefix << 3;
2870 if ( b & 2 )
2872 /* Write to CR/DR. */
2873 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
2874 if ( !mode_64bit() )
2875 src.val = (uint32_t)src.val;
2876 rc = ((b & 1)
2877 ? (ops->write_dr
2878 ? ops->write_dr(modrm_reg, src.val, ctxt)
2879 : X86EMUL_UNHANDLEABLE)
2880 : (ops->write_cr
2881 ? ops->write_cr(modrm_reg, src.val, ctxt)
2882 : X86EMUL_UNHANDLEABLE));
2884 else
2886 /* Read from CR/DR. */
2887 dst.type = OP_REG;
2888 dst.bytes = mode_64bit() ? 8 : 4;
2889 dst.reg = decode_register(modrm_rm, &_regs, 0);
2890 rc = ((b & 1)
2891 ? (ops->read_dr
2892 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
2893 : X86EMUL_UNHANDLEABLE)
2894 : (ops->read_cr
2895 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
2896 : X86EMUL_UNHANDLEABLE));
2898 if ( rc != 0 )
2899 goto done;
2900 break;
2902 case 0x30: /* wrmsr */ {
2903 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
2904 generate_exception_if(!mode_ring0(), EXC_GP);
2905 fail_if(ops->write_msr == NULL);
2906 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
2907 goto done;
2908 break;
2911 case 0x31: /* rdtsc */ {
2912 unsigned long cr4;
2913 uint64_t val;
2914 fail_if(ops->read_cr == NULL);
2915 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
2916 goto done;
2917 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP);
2918 fail_if(ops->read_msr == NULL);
2919 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
2920 goto done;
2921 _regs.edx = (uint32_t)(val >> 32);
2922 _regs.eax = (uint32_t)(val >> 0);
2923 break;
2926 case 0x32: /* rdmsr */ {
2927 uint64_t val;
2928 generate_exception_if(!mode_ring0(), EXC_GP);
2929 fail_if(ops->read_msr == NULL);
2930 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
2931 goto done;
2932 _regs.edx = (uint32_t)(val >> 32);
2933 _regs.eax = (uint32_t)(val >> 0);
2934 break;
2937 case 0x80 ... 0x8f: /* jcc (near) */ {
2938 int rel = (((op_bytes == 2) && !mode_64bit())
2939 ? (int32_t)insn_fetch_type(int16_t)
2940 : insn_fetch_type(int32_t));
2941 if ( test_cc(b, _regs.eflags) )
2942 jmp_rel(rel);
2943 break;
2946 case 0xa0: /* push %%fs */
2947 src.val = x86_seg_fs;
2948 goto push_seg;
2950 case 0xa1: /* pop %%fs */
2951 src.val = x86_seg_fs;
2952 goto pop_seg;
2954 case 0xa2: /* cpuid */ {
2955 unsigned int eax = _regs.eax, ebx = _regs.ebx;
2956 unsigned int ecx = _regs.ecx, edx = _regs.edx;
2957 fail_if(ops->cpuid == NULL);
2958 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
2959 goto done;
2960 _regs.eax = eax; _regs.ebx = ebx;
2961 _regs.ecx = ecx; _regs.edx = edx;
2962 break;
2965 case 0xa8: /* push %%gs */
2966 src.val = x86_seg_gs;
2967 goto push_seg;
2969 case 0xa9: /* pop %%gs */
2970 src.val = x86_seg_gs;
2971 goto pop_seg;
2973 case 0xc7: /* Grp9 (cmpxchg8b) */
2974 #if defined(__i386__)
2976 unsigned long old_lo, old_hi;
2977 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
2978 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &old_lo, 4, ctxt)) ||
2979 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &old_hi, 4, ctxt)) )
2980 goto done;
2981 if ( (old_lo != _regs.eax) || (old_hi != _regs.edx) )
2983 _regs.eax = old_lo;
2984 _regs.edx = old_hi;
2985 _regs.eflags &= ~EFLG_ZF;
2987 else if ( ops->cmpxchg8b == NULL )
2989 rc = X86EMUL_UNHANDLEABLE;
2990 goto done;
2992 else
2994 if ( (rc = ops->cmpxchg8b(ea.mem.seg, ea.mem.off, old_lo, old_hi,
2995 _regs.ebx, _regs.ecx, ctxt)) != 0 )
2996 goto done;
2997 _regs.eflags |= EFLG_ZF;
2999 break;
3001 #elif defined(__x86_64__)
3003 unsigned long old, new;
3004 generate_exception_if((modrm_reg & 7) != 1, EXC_UD);
3005 if ( (rc = ops->read(ea.mem.seg, ea.mem.off, &old, 8, ctxt)) != 0 )
3006 goto done;
3007 if ( ((uint32_t)(old>>0) != (uint32_t)_regs.eax) ||
3008 ((uint32_t)(old>>32) != (uint32_t)_regs.edx) )
3010 _regs.eax = (uint32_t)(old>>0);
3011 _regs.edx = (uint32_t)(old>>32);
3012 _regs.eflags &= ~EFLG_ZF;
3014 else
3016 new = (_regs.ecx<<32)|(uint32_t)_regs.ebx;
3017 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3018 new, 8, ctxt)) != 0 )
3019 goto done;
3020 _regs.eflags |= EFLG_ZF;
3022 break;
3024 #endif
3026 case 0xc8 ... 0xcf: /* bswap */
3027 dst.type = OP_REG;
3028 dst.reg = decode_register(
3029 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3030 switch ( dst.bytes = op_bytes )
3032 default: /* case 2: */
3033 /* Undefined behaviour. Writes zero on all tested CPUs. */
3034 dst.val = 0;
3035 break;
3036 case 4:
3037 #ifdef __x86_64__
3038 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3039 break;
3040 case 8:
3041 #endif
3042 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3043 break;
3045 break;
3047 goto writeback;
3049 cannot_emulate:
3050 #if 0
3051 gdprintk(XENLOG_DEBUG, "Instr:");
3052 for ( ea.mem.off = ctxt->regs->eip; ea.mem.off < _regs.eip; ea.mem.off++ )
3054 unsigned long x;
3055 ops->insn_fetch(x86_seg_cs, ea.mem.off, &x, 1, ctxt);
3056 printk(" %02x", (uint8_t)x);
3058 printk("\n");
3059 #endif
3060 return X86EMUL_UNHANDLEABLE;