debuggers.hg

view xen/arch/x86/x86_emulate/x86_emulate.c @ 17945:a9fff28d4f06

x86: Emulation of LMSW must only affect CR0 bits 0-3.
Emulation of SMSW is only restricted to 16-bit operation on memory
operands.

Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
author Keir Fraser <keir.fraser@citrix.com>
date Fri Jun 27 17:24:54 2008 +0100 (2008-06-27)
parents 6b0663901174
children c33a40b4c22b
line source
1 /******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005-2007 Keir Fraser
7 * Copyright (c) 2005-2007 XenSource Inc.
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 */
24 /* Operand sizes: 8-bit operands or specified/overridden size. */
25 #define ByteOp (1<<0) /* 8-bit operands. */
26 /* Destination operand type. */
27 #define DstBitBase (0<<1) /* Memory operand, bit string. */
28 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
29 #define DstReg (2<<1) /* Register operand. */
30 #define DstMem (3<<1) /* Memory operand. */
31 #define DstMask (3<<1)
32 /* Source operand type. */
33 #define SrcNone (0<<3) /* No source operand. */
34 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
35 #define SrcReg (1<<3) /* Register operand. */
36 #define SrcMem (2<<3) /* Memory operand. */
37 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
38 #define SrcImm (4<<3) /* Immediate operand. */
39 #define SrcImmByte (5<<3) /* 8-bit sign-extended immediate operand. */
40 #define SrcMask (7<<3)
41 /* Generic ModRM decode. */
42 #define ModRM (1<<6)
43 /* Destination is only written; never read. */
44 #define Mov (1<<7)
46 static uint8_t opcode_table[256] = {
47 /* 0x00 - 0x07 */
48 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
49 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
50 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
51 /* 0x08 - 0x0F */
52 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
53 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
54 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, 0,
55 /* 0x10 - 0x17 */
56 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
57 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
58 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
59 /* 0x18 - 0x1F */
60 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
61 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
62 ByteOp|DstReg|SrcImm, DstReg|SrcImm, ImplicitOps, ImplicitOps,
63 /* 0x20 - 0x27 */
64 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
65 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
66 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
67 /* 0x28 - 0x2F */
68 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
69 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
70 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
71 /* 0x30 - 0x37 */
72 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
73 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
74 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
75 /* 0x38 - 0x3F */
76 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
77 ByteOp|DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
78 ByteOp|DstReg|SrcImm, DstReg|SrcImm, 0, ImplicitOps,
79 /* 0x40 - 0x4F */
80 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
81 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
82 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
83 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
84 /* 0x50 - 0x5F */
85 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
86 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
87 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
88 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
89 /* 0x60 - 0x67 */
90 ImplicitOps, ImplicitOps, DstReg|SrcMem|ModRM, DstReg|SrcMem16|ModRM|Mov,
91 0, 0, 0, 0,
92 /* 0x68 - 0x6F */
93 ImplicitOps|Mov, DstReg|SrcImm|ModRM|Mov,
94 ImplicitOps|Mov, DstReg|SrcImmByte|ModRM|Mov,
95 ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov, ImplicitOps|Mov,
96 /* 0x70 - 0x77 */
97 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
98 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
99 /* 0x78 - 0x7F */
100 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
101 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
102 /* 0x80 - 0x87 */
103 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImm|ModRM,
104 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
105 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
106 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
107 /* 0x88 - 0x8F */
108 ByteOp|DstMem|SrcReg|ModRM|Mov, DstMem|SrcReg|ModRM|Mov,
109 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
110 DstMem|SrcReg|ModRM|Mov, DstReg|SrcNone|ModRM,
111 DstReg|SrcMem|ModRM|Mov, DstMem|SrcNone|ModRM|Mov,
112 /* 0x90 - 0x97 */
113 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
114 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
115 /* 0x98 - 0x9F */
116 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
117 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
118 /* 0xA0 - 0xA7 */
119 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
120 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
121 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
122 ByteOp|ImplicitOps, ImplicitOps,
123 /* 0xA8 - 0xAF */
124 ByteOp|DstReg|SrcImm, DstReg|SrcImm,
125 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
126 ByteOp|ImplicitOps|Mov, ImplicitOps|Mov,
127 ByteOp|ImplicitOps, ImplicitOps,
128 /* 0xB0 - 0xB7 */
129 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
130 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
131 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
132 ByteOp|DstReg|SrcImm|Mov, ByteOp|DstReg|SrcImm|Mov,
133 /* 0xB8 - 0xBF */
134 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
135 DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov, DstReg|SrcImm|Mov,
136 /* 0xC0 - 0xC7 */
137 ByteOp|DstMem|SrcImm|ModRM, DstMem|SrcImmByte|ModRM,
138 ImplicitOps, ImplicitOps,
139 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
140 ByteOp|DstMem|SrcImm|ModRM|Mov, DstMem|SrcImm|ModRM|Mov,
141 /* 0xC8 - 0xCF */
142 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
143 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
144 /* 0xD0 - 0xD7 */
145 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
146 ByteOp|DstMem|SrcImplicit|ModRM, DstMem|SrcImplicit|ModRM,
147 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
148 /* 0xD8 - 0xDF */
149 0, ImplicitOps|ModRM|Mov, 0, ImplicitOps|ModRM|Mov,
150 0, ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov, ImplicitOps|ModRM|Mov,
151 /* 0xE0 - 0xE7 */
152 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
153 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
154 /* 0xE8 - 0xEF */
155 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
156 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
157 /* 0xF0 - 0xF7 */
158 0, ImplicitOps, 0, 0,
159 ImplicitOps, ImplicitOps,
160 ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM,
161 /* 0xF8 - 0xFF */
162 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
163 ImplicitOps, ImplicitOps, ByteOp|DstMem|SrcNone|ModRM, DstMem|SrcNone|ModRM
164 };
166 static uint8_t twobyte_table[256] = {
167 /* 0x00 - 0x07 */
168 0, ImplicitOps|ModRM, 0, 0, 0, 0, ImplicitOps, 0,
169 /* 0x08 - 0x0F */
170 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps|ModRM, 0, 0,
171 /* 0x10 - 0x17 */
172 0, 0, 0, 0, 0, 0, 0, 0,
173 /* 0x18 - 0x1F */
174 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
175 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
176 /* 0x20 - 0x27 */
177 ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM, ImplicitOps|ModRM,
178 0, 0, 0, 0,
179 /* 0x28 - 0x2F */
180 0, 0, 0, 0, 0, 0, 0, 0,
181 /* 0x30 - 0x37 */
182 ImplicitOps, ImplicitOps, ImplicitOps, 0, 0, 0, 0, 0,
183 /* 0x38 - 0x3F */
184 0, 0, 0, 0, 0, 0, 0, 0,
185 /* 0x40 - 0x47 */
186 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
187 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
188 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
189 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
190 /* 0x48 - 0x4F */
191 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
192 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
193 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
194 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
195 /* 0x50 - 0x5F */
196 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
197 /* 0x60 - 0x6F */
198 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
199 /* 0x70 - 0x7F */
200 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps|ModRM,
201 /* 0x80 - 0x87 */
202 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
203 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
204 /* 0x88 - 0x8F */
205 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
206 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
207 /* 0x90 - 0x97 */
208 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
209 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
210 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
211 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
212 /* 0x98 - 0x9F */
213 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
214 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
215 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
216 ByteOp|DstMem|SrcNone|ModRM|Mov, ByteOp|DstMem|SrcNone|ModRM|Mov,
217 /* 0xA0 - 0xA7 */
218 ImplicitOps, ImplicitOps, ImplicitOps, DstBitBase|SrcReg|ModRM,
219 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
220 /* 0xA8 - 0xAF */
221 ImplicitOps, ImplicitOps, 0, DstBitBase|SrcReg|ModRM,
222 DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, DstReg|SrcMem|ModRM,
223 /* 0xB0 - 0xB7 */
224 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM,
225 DstReg|SrcMem|ModRM|Mov, DstBitBase|SrcReg|ModRM,
226 DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem|ModRM|Mov,
227 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
228 /* 0xB8 - 0xBF */
229 0, 0, DstBitBase|SrcImmByte|ModRM, DstBitBase|SrcReg|ModRM,
230 DstReg|SrcMem|ModRM, DstReg|SrcMem|ModRM,
231 ByteOp|DstReg|SrcMem|ModRM|Mov, DstReg|SrcMem16|ModRM|Mov,
232 /* 0xC0 - 0xC7 */
233 ByteOp|DstMem|SrcReg|ModRM, DstMem|SrcReg|ModRM, 0, 0,
234 0, 0, 0, ImplicitOps|ModRM,
235 /* 0xC8 - 0xCF */
236 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
237 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
238 /* 0xD0 - 0xDF */
239 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
240 /* 0xE0 - 0xEF */
241 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
242 /* 0xF0 - 0xFF */
243 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
244 };
246 /* Type, address-of, and value of an instruction's operand. */
247 struct operand {
248 enum { OP_REG, OP_MEM, OP_IMM, OP_NONE } type;
249 unsigned int bytes;
250 unsigned long val, orig_val;
251 union {
252 /* OP_REG: Pointer to register field. */
253 unsigned long *reg;
254 /* OP_MEM: Segment and offset. */
255 struct {
256 enum x86_segment seg;
257 unsigned long off;
258 } mem;
259 };
260 };
262 /* MSRs. */
263 #define MSR_TSC 0x10
265 /* Control register flags. */
266 #define CR0_PE (1<<0)
267 #define CR4_TSD (1<<2)
269 /* EFLAGS bit definitions. */
270 #define EFLG_VIP (1<<20)
271 #define EFLG_VIF (1<<19)
272 #define EFLG_AC (1<<18)
273 #define EFLG_VM (1<<17)
274 #define EFLG_RF (1<<16)
275 #define EFLG_NT (1<<14)
276 #define EFLG_IOPL (3<<12)
277 #define EFLG_OF (1<<11)
278 #define EFLG_DF (1<<10)
279 #define EFLG_IF (1<<9)
280 #define EFLG_TF (1<<8)
281 #define EFLG_SF (1<<7)
282 #define EFLG_ZF (1<<6)
283 #define EFLG_AF (1<<4)
284 #define EFLG_PF (1<<2)
285 #define EFLG_CF (1<<0)
287 /* Exception definitions. */
288 #define EXC_DE 0
289 #define EXC_DB 1
290 #define EXC_BP 3
291 #define EXC_OF 4
292 #define EXC_BR 5
293 #define EXC_UD 6
294 #define EXC_TS 10
295 #define EXC_NP 11
296 #define EXC_SS 12
297 #define EXC_GP 13
298 #define EXC_PF 14
299 #define EXC_MF 16
301 /*
302 * Instruction emulation:
303 * Most instructions are emulated directly via a fragment of inline assembly
304 * code. This allows us to save/restore EFLAGS and thus very easily pick up
305 * any modified flags.
306 */
308 #if defined(__x86_64__)
309 #define _LO32 "k" /* force 32-bit operand */
310 #define _STK "%%rsp" /* stack pointer */
311 #define _BYTES_PER_LONG "8"
312 #elif defined(__i386__)
313 #define _LO32 "" /* force 32-bit operand */
314 #define _STK "%%esp" /* stack pointer */
315 #define _BYTES_PER_LONG "4"
316 #endif
318 /*
319 * These EFLAGS bits are restored from saved value during emulation, and
320 * any changes are written back to the saved value after emulation.
321 */
322 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
324 /* Before executing instruction: restore necessary bits in EFLAGS. */
325 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
326 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
327 "movl %"_sav",%"_LO32 _tmp"; " \
328 "push %"_tmp"; " \
329 "push %"_tmp"; " \
330 "movl %"_msk",%"_LO32 _tmp"; " \
331 "andl %"_LO32 _tmp",("_STK"); " \
332 "pushf; " \
333 "notl %"_LO32 _tmp"; " \
334 "andl %"_LO32 _tmp",("_STK"); " \
335 "andl %"_LO32 _tmp",2*"_BYTES_PER_LONG"("_STK"); " \
336 "pop %"_tmp"; " \
337 "orl %"_LO32 _tmp",("_STK"); " \
338 "popf; " \
339 "pop %"_sav"; "
341 /* After executing instruction: write-back necessary bits in EFLAGS. */
342 #define _POST_EFLAGS(_sav, _msk, _tmp) \
343 /* _sav |= EFLAGS & _msk; */ \
344 "pushf; " \
345 "pop %"_tmp"; " \
346 "andl %"_msk",%"_LO32 _tmp"; " \
347 "orl %"_LO32 _tmp",%"_sav"; "
349 /* Raw emulation: instruction has two explicit operands. */
350 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy)\
351 do{ unsigned long _tmp; \
352 switch ( (_dst).bytes ) \
353 { \
354 case 2: \
355 asm volatile ( \
356 _PRE_EFLAGS("0","4","2") \
357 _op"w %"_wx"3,%1; " \
358 _POST_EFLAGS("0","4","2") \
359 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
360 : _wy ((_src).val), "i" (EFLAGS_MASK), \
361 "m" (_eflags), "m" ((_dst).val) ); \
362 break; \
363 case 4: \
364 asm volatile ( \
365 _PRE_EFLAGS("0","4","2") \
366 _op"l %"_lx"3,%1; " \
367 _POST_EFLAGS("0","4","2") \
368 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
369 : _ly ((_src).val), "i" (EFLAGS_MASK), \
370 "m" (_eflags), "m" ((_dst).val) ); \
371 break; \
372 case 8: \
373 __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy); \
374 break; \
375 } \
376 } while (0)
377 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)\
378 do{ unsigned long _tmp; \
379 switch ( (_dst).bytes ) \
380 { \
381 case 1: \
382 asm volatile ( \
383 _PRE_EFLAGS("0","4","2") \
384 _op"b %"_bx"3,%1; " \
385 _POST_EFLAGS("0","4","2") \
386 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
387 : _by ((_src).val), "i" (EFLAGS_MASK), \
388 "m" (_eflags), "m" ((_dst).val) ); \
389 break; \
390 default: \
391 __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy);\
392 break; \
393 } \
394 } while (0)
395 /* Source operand is byte-sized and may be restricted to just %cl. */
396 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
397 __emulate_2op(_op, _src, _dst, _eflags, \
398 "b", "c", "b", "c", "b", "c", "b", "c")
399 /* Source operand is byte, word, long or quad sized. */
400 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
401 __emulate_2op(_op, _src, _dst, _eflags, \
402 "b", "q", "w", "r", _LO32, "r", "", "r")
403 /* Source operand is word, long or quad sized. */
404 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
405 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
406 "w", "r", _LO32, "r", "", "r")
408 /* Instruction has only one explicit operand (no source operand). */
409 #define emulate_1op(_op,_dst,_eflags) \
410 do{ unsigned long _tmp; \
411 switch ( (_dst).bytes ) \
412 { \
413 case 1: \
414 asm volatile ( \
415 _PRE_EFLAGS("0","3","2") \
416 _op"b %1; " \
417 _POST_EFLAGS("0","3","2") \
418 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
419 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
420 break; \
421 case 2: \
422 asm volatile ( \
423 _PRE_EFLAGS("0","3","2") \
424 _op"w %1; " \
425 _POST_EFLAGS("0","3","2") \
426 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
427 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
428 break; \
429 case 4: \
430 asm volatile ( \
431 _PRE_EFLAGS("0","3","2") \
432 _op"l %1; " \
433 _POST_EFLAGS("0","3","2") \
434 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
435 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
436 break; \
437 case 8: \
438 __emulate_1op_8byte(_op, _dst, _eflags); \
439 break; \
440 } \
441 } while (0)
443 /* Emulate an instruction with quadword operands (x86/64 only). */
444 #if defined(__x86_64__)
445 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
446 do{ asm volatile ( \
447 _PRE_EFLAGS("0","4","2") \
448 _op"q %"_qx"3,%1; " \
449 _POST_EFLAGS("0","4","2") \
450 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
451 : _qy ((_src).val), "i" (EFLAGS_MASK), \
452 "m" (_eflags), "m" ((_dst).val) ); \
453 } while (0)
454 #define __emulate_1op_8byte(_op, _dst, _eflags) \
455 do{ asm volatile ( \
456 _PRE_EFLAGS("0","3","2") \
457 _op"q %1; " \
458 _POST_EFLAGS("0","3","2") \
459 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
460 : "i" (EFLAGS_MASK), "m" (_eflags), "m" ((_dst).val) ); \
461 } while (0)
462 #elif defined(__i386__)
463 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
464 #define __emulate_1op_8byte(_op, _dst, _eflags)
465 #endif /* __i386__ */
467 /* Fetch next part of the instruction being emulated. */
468 #define insn_fetch_bytes(_size) \
469 ({ unsigned long _x, _eip = _regs.eip; \
470 if ( !mode_64bit() ) _eip = (uint32_t)_eip; /* ignore upper dword */ \
471 _regs.eip += (_size); /* real hardware doesn't truncate */ \
472 generate_exception_if((uint8_t)(_regs.eip - ctxt->regs->eip) > 15, \
473 EXC_GP, 0); \
474 rc = ops->insn_fetch(x86_seg_cs, _eip, &_x, (_size), ctxt); \
475 if ( rc ) goto done; \
476 _x; \
477 })
478 #define insn_fetch_type(_type) ((_type)insn_fetch_bytes(sizeof(_type)))
480 #define truncate_word(ea, byte_width) \
481 ({ unsigned long __ea = (ea); \
482 unsigned int _width = (byte_width); \
483 ((_width == sizeof(unsigned long)) ? __ea : \
484 (__ea & ((1UL << (_width << 3)) - 1))); \
485 })
486 #define truncate_ea(ea) truncate_word((ea), ad_bytes)
488 #define mode_64bit() (def_ad_bytes == 8)
490 #define fail_if(p) \
491 do { \
492 rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
493 if ( rc ) goto done; \
494 } while (0)
496 #define generate_exception_if(p, e, ec) \
497 ({ if ( (p) ) { \
498 fail_if(ops->inject_hw_exception == NULL); \
499 rc = ops->inject_hw_exception(e, ec, ctxt) ? : X86EMUL_EXCEPTION; \
500 goto done; \
501 } \
502 })
504 /*
505 * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
506 * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
507 */
508 static int even_parity(uint8_t v)
509 {
510 asm ( "test %b0,%b0; setp %b0" : "=a" (v) : "0" (v) );
511 return v;
512 }
514 /* Update address held in a register, based on addressing mode. */
515 #define _register_address_increment(reg, inc, byte_width) \
516 do { \
517 int _inc = (inc); /* signed type ensures sign extension to long */ \
518 unsigned int _width = (byte_width); \
519 if ( _width == sizeof(unsigned long) ) \
520 (reg) += _inc; \
521 else if ( mode_64bit() ) \
522 (reg) = ((reg) + _inc) & ((1UL << (_width << 3)) - 1); \
523 else \
524 (reg) = ((reg) & ~((1UL << (_width << 3)) - 1)) | \
525 (((reg) + _inc) & ((1UL << (_width << 3)) - 1)); \
526 } while (0)
527 #define register_address_increment(reg, inc) \
528 _register_address_increment((reg), (inc), ad_bytes)
530 #define sp_pre_dec(dec) ({ \
531 _register_address_increment(_regs.esp, -(dec), ctxt->sp_size/8); \
532 truncate_word(_regs.esp, ctxt->sp_size/8); \
533 })
534 #define sp_post_inc(inc) ({ \
535 unsigned long __esp = truncate_word(_regs.esp, ctxt->sp_size/8); \
536 _register_address_increment(_regs.esp, (inc), ctxt->sp_size/8); \
537 __esp; \
538 })
540 #define jmp_rel(rel) \
541 do { \
542 int _rel = (int)(rel); \
543 _regs.eip += _rel; \
544 if ( !mode_64bit() ) \
545 _regs.eip = ((op_bytes == 2) \
546 ? (uint16_t)_regs.eip : (uint32_t)_regs.eip); \
547 } while (0)
549 struct fpu_insn_ctxt {
550 uint8_t insn_bytes;
551 uint8_t exn_raised;
552 };
554 static void fpu_handle_exception(void *_fic, struct cpu_user_regs *regs)
555 {
556 struct fpu_insn_ctxt *fic = _fic;
557 fic->exn_raised = 1;
558 regs->eip += fic->insn_bytes;
559 }
561 #define get_fpu(_type, _fic) \
562 do{ (_fic)->exn_raised = 0; \
563 fail_if(ops->get_fpu == NULL); \
564 rc = ops->get_fpu(fpu_handle_exception, _fic, _type, ctxt); \
565 if ( rc ) goto done; \
566 } while (0)
567 #define put_fpu(_fic) \
568 do{ \
569 if ( ops->put_fpu != NULL ) \
570 ops->put_fpu(ctxt); \
571 generate_exception_if((_fic)->exn_raised, EXC_MF, -1); \
572 } while (0)
574 #define emulate_fpu_insn(_op) \
575 do{ struct fpu_insn_ctxt fic; \
576 get_fpu(X86EMUL_FPU_fpu, &fic); \
577 asm volatile ( \
578 "movb $2f-1f,%0 \n" \
579 "1: " _op " \n" \
580 "2: \n" \
581 : "=m" (fic.insn_bytes) : : "memory" ); \
582 put_fpu(&fic); \
583 } while (0)
585 #define emulate_fpu_insn_memdst(_op, _arg) \
586 do{ struct fpu_insn_ctxt fic; \
587 get_fpu(X86EMUL_FPU_fpu, &fic); \
588 asm volatile ( \
589 "movb $2f-1f,%0 \n" \
590 "1: " _op " %1 \n" \
591 "2: \n" \
592 : "=m" (fic.insn_bytes), "=m" (_arg) \
593 : : "memory" ); \
594 put_fpu(&fic); \
595 } while (0)
597 #define emulate_fpu_insn_stub(_bytes...) \
598 do{ uint8_t stub[] = { _bytes, 0xc3 }; \
599 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 }; \
600 get_fpu(X86EMUL_FPU_fpu, &fic); \
601 (*(void(*)(void))stub)(); \
602 put_fpu(&fic); \
603 } while (0)
605 static unsigned long __get_rep_prefix(
606 struct cpu_user_regs *int_regs,
607 struct cpu_user_regs *ext_regs,
608 int ad_bytes)
609 {
610 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
611 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
612 int_regs->ecx);
614 /* Skip the instruction if no repetitions are required. */
615 if ( ecx == 0 )
616 ext_regs->eip = int_regs->eip;
618 return ecx;
619 }
621 #define get_rep_prefix() ({ \
622 unsigned long max_reps = 1; \
623 if ( rep_prefix ) \
624 max_reps = __get_rep_prefix(&_regs, ctxt->regs, ad_bytes); \
625 if ( max_reps == 0 ) \
626 goto done; \
627 max_reps; \
628 })
630 static void __put_rep_prefix(
631 struct cpu_user_regs *int_regs,
632 struct cpu_user_regs *ext_regs,
633 int ad_bytes,
634 unsigned long reps_completed)
635 {
636 unsigned long ecx = ((ad_bytes == 2) ? (uint16_t)int_regs->ecx :
637 (ad_bytes == 4) ? (uint32_t)int_regs->ecx :
638 int_regs->ecx);
640 /* Reduce counter appropriately, and repeat instruction if non-zero. */
641 ecx -= reps_completed;
642 if ( ecx != 0 )
643 int_regs->eip = ext_regs->eip;
645 if ( ad_bytes == 2 )
646 *(uint16_t *)&int_regs->ecx = ecx;
647 else if ( ad_bytes == 4 )
648 int_regs->ecx = (uint32_t)ecx;
649 else
650 int_regs->ecx = ecx;
651 }
653 #define put_rep_prefix(reps_completed) ({ \
654 if ( rep_prefix ) \
655 __put_rep_prefix(&_regs, ctxt->regs, ad_bytes, reps_completed); \
656 })
658 /*
659 * Unsigned multiplication with double-word result.
660 * IN: Multiplicand=m[0], Multiplier=m[1]
661 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
662 */
663 static int mul_dbl(unsigned long m[2])
664 {
665 int rc;
666 asm ( "mul %4; seto %b2"
667 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
668 : "0" (m[0]), "1" (m[1]), "2" (0) );
669 return rc;
670 }
672 /*
673 * Signed multiplication with double-word result.
674 * IN: Multiplicand=m[0], Multiplier=m[1]
675 * OUT: Return CF/OF (overflow status); Result=m[1]:m[0]
676 */
677 static int imul_dbl(unsigned long m[2])
678 {
679 int rc;
680 asm ( "imul %4; seto %b2"
681 : "=a" (m[0]), "=d" (m[1]), "=q" (rc)
682 : "0" (m[0]), "1" (m[1]), "2" (0) );
683 return rc;
684 }
686 /*
687 * Unsigned division of double-word dividend.
688 * IN: Dividend=u[1]:u[0], Divisor=v
689 * OUT: Return 1: #DE
690 * Return 0: Quotient=u[0], Remainder=u[1]
691 */
692 static int div_dbl(unsigned long u[2], unsigned long v)
693 {
694 if ( (v == 0) || (u[1] >= v) )
695 return 1;
696 asm ( "div %4"
697 : "=a" (u[0]), "=d" (u[1])
698 : "0" (u[0]), "1" (u[1]), "r" (v) );
699 return 0;
700 }
702 /*
703 * Signed division of double-word dividend.
704 * IN: Dividend=u[1]:u[0], Divisor=v
705 * OUT: Return 1: #DE
706 * Return 0: Quotient=u[0], Remainder=u[1]
707 * NB. We don't use idiv directly as it's moderately hard to work out
708 * ahead of time whether it will #DE, which we cannot allow to happen.
709 */
710 static int idiv_dbl(unsigned long u[2], unsigned long v)
711 {
712 int negu = (long)u[1] < 0, negv = (long)v < 0;
714 /* u = abs(u) */
715 if ( negu )
716 {
717 u[1] = ~u[1];
718 if ( (u[0] = -u[0]) == 0 )
719 u[1]++;
720 }
722 /* abs(u) / abs(v) */
723 if ( div_dbl(u, negv ? -v : v) )
724 return 1;
726 /* Remainder has same sign as dividend. It cannot overflow. */
727 if ( negu )
728 u[1] = -u[1];
730 /* Quotient is overflowed if sign bit is set. */
731 if ( negu ^ negv )
732 {
733 if ( (long)u[0] >= 0 )
734 u[0] = -u[0];
735 else if ( (u[0] << 1) != 0 ) /* == 0x80...0 is okay */
736 return 1;
737 }
738 else if ( (long)u[0] < 0 )
739 return 1;
741 return 0;
742 }
744 static int
745 test_cc(
746 unsigned int condition, unsigned int flags)
747 {
748 int rc = 0;
750 switch ( (condition & 15) >> 1 )
751 {
752 case 0: /* o */
753 rc |= (flags & EFLG_OF);
754 break;
755 case 1: /* b/c/nae */
756 rc |= (flags & EFLG_CF);
757 break;
758 case 2: /* z/e */
759 rc |= (flags & EFLG_ZF);
760 break;
761 case 3: /* be/na */
762 rc |= (flags & (EFLG_CF|EFLG_ZF));
763 break;
764 case 4: /* s */
765 rc |= (flags & EFLG_SF);
766 break;
767 case 5: /* p/pe */
768 rc |= (flags & EFLG_PF);
769 break;
770 case 7: /* le/ng */
771 rc |= (flags & EFLG_ZF);
772 /* fall through */
773 case 6: /* l/nge */
774 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
775 break;
776 }
778 /* Odd condition identifiers (lsb == 1) have inverted sense. */
779 return (!!rc ^ (condition & 1));
780 }
782 static int
783 get_cpl(
784 struct x86_emulate_ctxt *ctxt,
785 struct x86_emulate_ops *ops)
786 {
787 struct segment_register reg;
789 if ( ctxt->regs->eflags & EFLG_VM )
790 return 3;
792 if ( (ops->read_segment == NULL) ||
793 ops->read_segment(x86_seg_ss, &reg, ctxt) )
794 return -1;
796 return reg.attr.fields.dpl;
797 }
799 static int
800 _mode_iopl(
801 struct x86_emulate_ctxt *ctxt,
802 struct x86_emulate_ops *ops)
803 {
804 int cpl = get_cpl(ctxt, ops);
805 if ( cpl == -1 )
806 return -1;
807 return (cpl <= ((ctxt->regs->eflags >> 12) & 3));
808 }
810 #define mode_ring0() ({ \
811 int _cpl = get_cpl(ctxt, ops); \
812 fail_if(_cpl < 0); \
813 (_cpl == 0); \
814 })
815 #define mode_iopl() ({ \
816 int _iopl = _mode_iopl(ctxt, ops); \
817 fail_if(_iopl < 0); \
818 _iopl; \
819 })
821 static int ioport_access_check(
822 unsigned int first_port,
823 unsigned int bytes,
824 struct x86_emulate_ctxt *ctxt,
825 struct x86_emulate_ops *ops)
826 {
827 unsigned long iobmp;
828 struct segment_register tr;
829 int rc = X86EMUL_OKAY;
831 if ( !(ctxt->regs->eflags & EFLG_VM) && mode_iopl() )
832 return X86EMUL_OKAY;
834 fail_if(ops->read_segment == NULL);
835 if ( (rc = ops->read_segment(x86_seg_tr, &tr, ctxt)) != 0 )
836 return rc;
838 /* Ensure that the TSS is valid and has an io-bitmap-offset field. */
839 if ( !tr.attr.fields.p ||
840 ((tr.attr.fields.type & 0xd) != 0x9) ||
841 (tr.limit < 0x67) )
842 goto raise_exception;
844 if ( (rc = ops->read(x86_seg_none, tr.base + 0x66, &iobmp, 2, ctxt)) )
845 return rc;
847 /* Ensure TSS includes two bytes including byte containing first port. */
848 iobmp += first_port / 8;
849 if ( tr.limit <= iobmp )
850 goto raise_exception;
852 if ( (rc = ops->read(x86_seg_none, tr.base + iobmp, &iobmp, 2, ctxt)) )
853 return rc;
854 if ( (iobmp & (((1<<bytes)-1) << (first_port&7))) != 0 )
855 goto raise_exception;
857 done:
858 return rc;
860 raise_exception:
861 fail_if(ops->inject_hw_exception == NULL);
862 return ops->inject_hw_exception(EXC_GP, 0, ctxt) ? : X86EMUL_EXCEPTION;
863 }
865 static int
866 in_realmode(
867 struct x86_emulate_ctxt *ctxt,
868 struct x86_emulate_ops *ops)
869 {
870 unsigned long cr0;
871 int rc;
873 if ( ops->read_cr == NULL )
874 return 0;
876 rc = ops->read_cr(0, &cr0, ctxt);
877 return (!rc && !(cr0 & CR0_PE));
878 }
880 static int
881 in_protmode(
882 struct x86_emulate_ctxt *ctxt,
883 struct x86_emulate_ops *ops)
884 {
885 return !(in_realmode(ctxt, ops) || (ctxt->regs->eflags & EFLG_VM));
886 }
888 static int
889 realmode_load_seg(
890 enum x86_segment seg,
891 uint16_t sel,
892 struct x86_emulate_ctxt *ctxt,
893 struct x86_emulate_ops *ops)
894 {
895 struct segment_register reg;
896 int rc;
898 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
899 return rc;
901 reg.sel = sel;
902 reg.base = (uint32_t)sel << 4;
904 return ops->write_segment(seg, &reg, ctxt);
905 }
907 static int
908 protmode_load_seg(
909 enum x86_segment seg,
910 uint16_t sel,
911 struct x86_emulate_ctxt *ctxt,
912 struct x86_emulate_ops *ops)
913 {
914 struct segment_register desctab, ss, segr;
915 struct { uint32_t a, b; } desc;
916 unsigned long val;
917 uint8_t dpl, rpl, cpl;
918 uint32_t new_desc_b;
919 int rc, fault_type = EXC_TS;
921 /* NULL selector? */
922 if ( (sel & 0xfffc) == 0 )
923 {
924 if ( (seg == x86_seg_cs) || (seg == x86_seg_ss) )
925 goto raise_exn;
926 memset(&segr, 0, sizeof(segr));
927 return ops->write_segment(seg, &segr, ctxt);
928 }
930 /* LDT descriptor must be in the GDT. */
931 if ( (seg == x86_seg_ldtr) && (sel & 4) )
932 goto raise_exn;
934 if ( (rc = ops->read_segment(x86_seg_ss, &ss, ctxt)) ||
935 (rc = ops->read_segment((sel & 4) ? x86_seg_ldtr : x86_seg_gdtr,
936 &desctab, ctxt)) )
937 return rc;
939 /* Check against descriptor table limit. */
940 if ( ((sel & 0xfff8) + 7) > desctab.limit )
941 goto raise_exn;
943 do {
944 if ( (rc = ops->read(x86_seg_none, desctab.base + (sel & 0xfff8),
945 &val, 4, ctxt)) )
946 return rc;
947 desc.a = val;
948 if ( (rc = ops->read(x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
949 &val, 4, ctxt)) )
950 return rc;
951 desc.b = val;
953 /* Segment present in memory? */
954 if ( !(desc.b & (1u<<15)) )
955 {
956 fault_type = EXC_NP;
957 goto raise_exn;
958 }
960 /* LDT descriptor is a system segment. All others are code/data. */
961 if ( (desc.b & (1u<<12)) == ((seg == x86_seg_ldtr) << 12) )
962 goto raise_exn;
964 dpl = (desc.b >> 13) & 3;
965 rpl = sel & 3;
966 cpl = ss.attr.fields.dpl;
968 switch ( seg )
969 {
970 case x86_seg_cs:
971 /* Code segment? */
972 if ( !(desc.b & (1u<<11)) )
973 goto raise_exn;
974 /* Non-conforming segment: check DPL against RPL. */
975 if ( ((desc.b & (6u<<9)) != (6u<<9)) && (dpl != rpl) )
976 goto raise_exn;
977 break;
978 case x86_seg_ss:
979 /* Writable data segment? */
980 if ( (desc.b & (5u<<9)) != (1u<<9) )
981 goto raise_exn;
982 if ( (dpl != cpl) || (dpl != rpl) )
983 goto raise_exn;
984 break;
985 case x86_seg_ldtr:
986 /* LDT system segment? */
987 if ( (desc.b & (15u<<8)) != (2u<<8) )
988 goto raise_exn;
989 goto skip_accessed_flag;
990 default:
991 /* Readable code or data segment? */
992 if ( (desc.b & (5u<<9)) == (4u<<9) )
993 goto raise_exn;
994 /* Non-conforming segment: check DPL against RPL and CPL. */
995 if ( ((desc.b & (6u<<9)) != (6u<<9)) && ((dpl < cpl) || (dpl < rpl)) )
996 goto raise_exn;
997 break;
998 }
1000 /* Ensure Accessed flag is set. */
1001 new_desc_b = desc.b | 0x100;
1002 rc = ((desc.b & 0x100) ? X86EMUL_OKAY :
1003 ops->cmpxchg(
1004 x86_seg_none, desctab.base + (sel & 0xfff8) + 4,
1005 &desc.b, &new_desc_b, 4, ctxt));
1006 } while ( rc == X86EMUL_CMPXCHG_FAILED );
1008 if ( rc )
1009 return rc;
1011 /* Force the Accessed flag in our local copy. */
1012 desc.b |= 0x100;
1014 skip_accessed_flag:
1015 segr.base = (((desc.b << 0) & 0xff000000u) |
1016 ((desc.b << 16) & 0x00ff0000u) |
1017 ((desc.a >> 16) & 0x0000ffffu));
1018 segr.attr.bytes = (((desc.b >> 8) & 0x00ffu) |
1019 ((desc.b >> 12) & 0x0f00u));
1020 segr.limit = (desc.b & 0x000f0000u) | (desc.a & 0x0000ffffu);
1021 if ( segr.attr.fields.g )
1022 segr.limit = (segr.limit << 12) | 0xfffu;
1023 segr.sel = sel;
1024 return ops->write_segment(seg, &segr, ctxt);
1026 raise_exn:
1027 if ( ops->inject_hw_exception == NULL )
1028 return X86EMUL_UNHANDLEABLE;
1029 if ( (rc = ops->inject_hw_exception(fault_type, sel & 0xfffc, ctxt)) )
1030 return rc;
1031 return X86EMUL_EXCEPTION;
1034 static int
1035 load_seg(
1036 enum x86_segment seg,
1037 uint16_t sel,
1038 struct x86_emulate_ctxt *ctxt,
1039 struct x86_emulate_ops *ops)
1041 if ( (ops->read_segment == NULL) ||
1042 (ops->write_segment == NULL) )
1043 return X86EMUL_UNHANDLEABLE;
1045 if ( in_protmode(ctxt, ops) )
1046 return protmode_load_seg(seg, sel, ctxt, ops);
1048 return realmode_load_seg(seg, sel, ctxt, ops);
1051 void *
1052 decode_register(
1053 uint8_t modrm_reg, struct cpu_user_regs *regs, int highbyte_regs)
1055 void *p;
1057 switch ( modrm_reg )
1059 case 0: p = &regs->eax; break;
1060 case 1: p = &regs->ecx; break;
1061 case 2: p = &regs->edx; break;
1062 case 3: p = &regs->ebx; break;
1063 case 4: p = (highbyte_regs ?
1064 ((unsigned char *)&regs->eax + 1) :
1065 (unsigned char *)&regs->esp); break;
1066 case 5: p = (highbyte_regs ?
1067 ((unsigned char *)&regs->ecx + 1) :
1068 (unsigned char *)&regs->ebp); break;
1069 case 6: p = (highbyte_regs ?
1070 ((unsigned char *)&regs->edx + 1) :
1071 (unsigned char *)&regs->esi); break;
1072 case 7: p = (highbyte_regs ?
1073 ((unsigned char *)&regs->ebx + 1) :
1074 (unsigned char *)&regs->edi); break;
1075 #if defined(__x86_64__)
1076 case 8: p = &regs->r8; break;
1077 case 9: p = &regs->r9; break;
1078 case 10: p = &regs->r10; break;
1079 case 11: p = &regs->r11; break;
1080 case 12: p = &regs->r12; break;
1081 case 13: p = &regs->r13; break;
1082 case 14: p = &regs->r14; break;
1083 case 15: p = &regs->r15; break;
1084 #endif
1085 default: p = NULL; break;
1088 return p;
1091 #define decode_segment_failed x86_seg_tr
1092 enum x86_segment
1093 decode_segment(
1094 uint8_t modrm_reg)
1096 switch ( modrm_reg )
1098 case 0: return x86_seg_es;
1099 case 1: return x86_seg_cs;
1100 case 2: return x86_seg_ss;
1101 case 3: return x86_seg_ds;
1102 case 4: return x86_seg_fs;
1103 case 5: return x86_seg_gs;
1104 default: break;
1106 return decode_segment_failed;
1109 int
1110 x86_emulate(
1111 struct x86_emulate_ctxt *ctxt,
1112 struct x86_emulate_ops *ops)
1114 /* Shadow copy of register state. Committed on successful emulation. */
1115 struct cpu_user_regs _regs = *ctxt->regs;
1117 uint8_t b, d, sib, sib_index, sib_base, twobyte = 0, rex_prefix = 0;
1118 uint8_t modrm = 0, modrm_mod = 0, modrm_reg = 0, modrm_rm = 0;
1119 unsigned int op_bytes, def_op_bytes, ad_bytes, def_ad_bytes;
1120 #define REPE_PREFIX 1
1121 #define REPNE_PREFIX 2
1122 unsigned int lock_prefix = 0, rep_prefix = 0;
1123 int override_seg = -1, rc = X86EMUL_OKAY;
1124 struct operand src, dst;
1126 /* Data operand effective address (usually computed from ModRM). */
1127 struct operand ea;
1129 /* Default is a memory operand relative to segment DS. */
1130 ea.type = OP_MEM;
1131 ea.mem.seg = x86_seg_ds;
1132 ea.mem.off = 0;
1134 ctxt->retire.byte = 0;
1136 op_bytes = def_op_bytes = ad_bytes = def_ad_bytes = ctxt->addr_size/8;
1137 if ( op_bytes == 8 )
1139 op_bytes = def_op_bytes = 4;
1140 #ifndef __x86_64__
1141 return X86EMUL_UNHANDLEABLE;
1142 #endif
1145 /* Prefix bytes. */
1146 for ( ; ; )
1148 switch ( b = insn_fetch_type(uint8_t) )
1150 case 0x66: /* operand-size override */
1151 op_bytes = def_op_bytes ^ 6;
1152 break;
1153 case 0x67: /* address-size override */
1154 ad_bytes = def_ad_bytes ^ (mode_64bit() ? 12 : 6);
1155 break;
1156 case 0x2e: /* CS override */
1157 override_seg = x86_seg_cs;
1158 break;
1159 case 0x3e: /* DS override */
1160 override_seg = x86_seg_ds;
1161 break;
1162 case 0x26: /* ES override */
1163 override_seg = x86_seg_es;
1164 break;
1165 case 0x64: /* FS override */
1166 override_seg = x86_seg_fs;
1167 break;
1168 case 0x65: /* GS override */
1169 override_seg = x86_seg_gs;
1170 break;
1171 case 0x36: /* SS override */
1172 override_seg = x86_seg_ss;
1173 break;
1174 case 0xf0: /* LOCK */
1175 lock_prefix = 1;
1176 break;
1177 case 0xf2: /* REPNE/REPNZ */
1178 rep_prefix = REPNE_PREFIX;
1179 break;
1180 case 0xf3: /* REP/REPE/REPZ */
1181 rep_prefix = REPE_PREFIX;
1182 break;
1183 case 0x40 ... 0x4f: /* REX */
1184 if ( !mode_64bit() )
1185 goto done_prefixes;
1186 rex_prefix = b;
1187 continue;
1188 default:
1189 goto done_prefixes;
1192 /* Any legacy prefix after a REX prefix nullifies its effect. */
1193 rex_prefix = 0;
1195 done_prefixes:
1197 if ( rex_prefix & 8 ) /* REX.W */
1198 op_bytes = 8;
1200 /* Opcode byte(s). */
1201 d = opcode_table[b];
1202 if ( d == 0 )
1204 /* Two-byte opcode? */
1205 if ( b == 0x0f )
1207 twobyte = 1;
1208 b = insn_fetch_type(uint8_t);
1209 d = twobyte_table[b];
1212 /* Unrecognised? */
1213 if ( d == 0 )
1214 goto cannot_emulate;
1217 /* Lock prefix is allowed only on RMW instructions. */
1218 generate_exception_if((d & Mov) && lock_prefix, EXC_GP, 0);
1220 /* ModRM and SIB bytes. */
1221 if ( d & ModRM )
1223 modrm = insn_fetch_type(uint8_t);
1224 modrm_mod = (modrm & 0xc0) >> 6;
1225 modrm_reg = ((rex_prefix & 4) << 1) | ((modrm & 0x38) >> 3);
1226 modrm_rm = modrm & 0x07;
1228 if ( modrm_mod == 3 )
1230 modrm_rm |= (rex_prefix & 1) << 3;
1231 ea.type = OP_REG;
1232 ea.reg = decode_register(
1233 modrm_rm, &_regs, (d & ByteOp) && (rex_prefix == 0));
1235 else if ( ad_bytes == 2 )
1237 /* 16-bit ModR/M decode. */
1238 switch ( modrm_rm )
1240 case 0:
1241 ea.mem.off = _regs.ebx + _regs.esi;
1242 break;
1243 case 1:
1244 ea.mem.off = _regs.ebx + _regs.edi;
1245 break;
1246 case 2:
1247 ea.mem.seg = x86_seg_ss;
1248 ea.mem.off = _regs.ebp + _regs.esi;
1249 break;
1250 case 3:
1251 ea.mem.seg = x86_seg_ss;
1252 ea.mem.off = _regs.ebp + _regs.edi;
1253 break;
1254 case 4:
1255 ea.mem.off = _regs.esi;
1256 break;
1257 case 5:
1258 ea.mem.off = _regs.edi;
1259 break;
1260 case 6:
1261 if ( modrm_mod == 0 )
1262 break;
1263 ea.mem.seg = x86_seg_ss;
1264 ea.mem.off = _regs.ebp;
1265 break;
1266 case 7:
1267 ea.mem.off = _regs.ebx;
1268 break;
1270 switch ( modrm_mod )
1272 case 0:
1273 if ( modrm_rm == 6 )
1274 ea.mem.off = insn_fetch_type(int16_t);
1275 break;
1276 case 1:
1277 ea.mem.off += insn_fetch_type(int8_t);
1278 break;
1279 case 2:
1280 ea.mem.off += insn_fetch_type(int16_t);
1281 break;
1283 ea.mem.off = truncate_ea(ea.mem.off);
1285 else
1287 /* 32/64-bit ModR/M decode. */
1288 if ( modrm_rm == 4 )
1290 sib = insn_fetch_type(uint8_t);
1291 sib_index = ((sib >> 3) & 7) | ((rex_prefix << 2) & 8);
1292 sib_base = (sib & 7) | ((rex_prefix << 3) & 8);
1293 if ( sib_index != 4 )
1294 ea.mem.off = *(long*)decode_register(sib_index, &_regs, 0);
1295 ea.mem.off <<= (sib >> 6) & 3;
1296 if ( (modrm_mod == 0) && ((sib_base & 7) == 5) )
1297 ea.mem.off += insn_fetch_type(int32_t);
1298 else if ( sib_base == 4 )
1300 ea.mem.seg = x86_seg_ss;
1301 ea.mem.off += _regs.esp;
1302 if ( !twobyte && (b == 0x8f) )
1303 /* POP <rm> computes its EA post increment. */
1304 ea.mem.off += ((mode_64bit() && (op_bytes == 4))
1305 ? 8 : op_bytes);
1307 else if ( sib_base == 5 )
1309 ea.mem.seg = x86_seg_ss;
1310 ea.mem.off += _regs.ebp;
1312 else
1313 ea.mem.off += *(long*)decode_register(sib_base, &_regs, 0);
1315 else
1317 modrm_rm |= (rex_prefix & 1) << 3;
1318 ea.mem.off = *(long *)decode_register(modrm_rm, &_regs, 0);
1319 if ( (modrm_rm == 5) && (modrm_mod != 0) )
1320 ea.mem.seg = x86_seg_ss;
1322 switch ( modrm_mod )
1324 case 0:
1325 if ( (modrm_rm & 7) != 5 )
1326 break;
1327 ea.mem.off = insn_fetch_type(int32_t);
1328 if ( !mode_64bit() )
1329 break;
1330 /* Relative to RIP of next instruction. Argh! */
1331 ea.mem.off += _regs.eip;
1332 if ( (d & SrcMask) == SrcImm )
1333 ea.mem.off += (d & ByteOp) ? 1 :
1334 ((op_bytes == 8) ? 4 : op_bytes);
1335 else if ( (d & SrcMask) == SrcImmByte )
1336 ea.mem.off += 1;
1337 else if ( !twobyte && ((b & 0xfe) == 0xf6) &&
1338 ((modrm_reg & 7) <= 1) )
1339 /* Special case in Grp3: test has immediate operand. */
1340 ea.mem.off += (d & ByteOp) ? 1
1341 : ((op_bytes == 8) ? 4 : op_bytes);
1342 else if ( twobyte && ((b & 0xf7) == 0xa4) )
1343 /* SHLD/SHRD with immediate byte third operand. */
1344 ea.mem.off++;
1345 break;
1346 case 1:
1347 ea.mem.off += insn_fetch_type(int8_t);
1348 break;
1349 case 2:
1350 ea.mem.off += insn_fetch_type(int32_t);
1351 break;
1353 ea.mem.off = truncate_ea(ea.mem.off);
1357 if ( override_seg != -1 )
1358 ea.mem.seg = override_seg;
1360 /* Special instructions do their own operand decoding. */
1361 if ( (d & DstMask) == ImplicitOps )
1362 goto special_insn;
1364 /* Decode and fetch the source operand: register, memory or immediate. */
1365 switch ( d & SrcMask )
1367 case SrcNone:
1368 break;
1369 case SrcReg:
1370 src.type = OP_REG;
1371 if ( d & ByteOp )
1373 src.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1374 src.val = *(uint8_t *)src.reg;
1375 src.bytes = 1;
1377 else
1379 src.reg = decode_register(modrm_reg, &_regs, 0);
1380 switch ( (src.bytes = op_bytes) )
1382 case 2: src.val = *(uint16_t *)src.reg; break;
1383 case 4: src.val = *(uint32_t *)src.reg; break;
1384 case 8: src.val = *(uint64_t *)src.reg; break;
1387 break;
1388 case SrcMem16:
1389 ea.bytes = 2;
1390 goto srcmem_common;
1391 case SrcMem:
1392 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1393 srcmem_common:
1394 src = ea;
1395 if ( src.type == OP_REG )
1397 switch ( src.bytes )
1399 case 1: src.val = *(uint8_t *)src.reg; break;
1400 case 2: src.val = *(uint16_t *)src.reg; break;
1401 case 4: src.val = *(uint32_t *)src.reg; break;
1402 case 8: src.val = *(uint64_t *)src.reg; break;
1405 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1406 &src.val, src.bytes, ctxt)) )
1407 goto done;
1408 break;
1409 case SrcImm:
1410 src.type = OP_IMM;
1411 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1412 if ( src.bytes == 8 ) src.bytes = 4;
1413 /* NB. Immediates are sign-extended as necessary. */
1414 switch ( src.bytes )
1416 case 1: src.val = insn_fetch_type(int8_t); break;
1417 case 2: src.val = insn_fetch_type(int16_t); break;
1418 case 4: src.val = insn_fetch_type(int32_t); break;
1420 break;
1421 case SrcImmByte:
1422 src.type = OP_IMM;
1423 src.bytes = 1;
1424 src.val = insn_fetch_type(int8_t);
1425 break;
1428 /* Decode and fetch the destination operand: register or memory. */
1429 switch ( d & DstMask )
1431 case DstReg:
1432 dst.type = OP_REG;
1433 if ( d & ByteOp )
1435 dst.reg = decode_register(modrm_reg, &_regs, (rex_prefix == 0));
1436 dst.val = *(uint8_t *)dst.reg;
1437 dst.bytes = 1;
1439 else
1441 dst.reg = decode_register(modrm_reg, &_regs, 0);
1442 switch ( (dst.bytes = op_bytes) )
1444 case 2: dst.val = *(uint16_t *)dst.reg; break;
1445 case 4: dst.val = *(uint32_t *)dst.reg; break;
1446 case 8: dst.val = *(uint64_t *)dst.reg; break;
1449 break;
1450 case DstBitBase:
1451 if ( ((d & SrcMask) == SrcImmByte) || (ea.type == OP_REG) )
1453 src.val &= (op_bytes << 3) - 1;
1455 else
1457 /*
1458 * EA += BitOffset DIV op_bytes*8
1459 * BitOffset = BitOffset MOD op_bytes*8
1460 * DIV truncates towards negative infinity.
1461 * MOD always produces a positive result.
1462 */
1463 if ( op_bytes == 2 )
1464 src.val = (int16_t)src.val;
1465 else if ( op_bytes == 4 )
1466 src.val = (int32_t)src.val;
1467 if ( (long)src.val < 0 )
1469 unsigned long byte_offset;
1470 byte_offset = op_bytes + (((-src.val-1) >> 3) & ~(op_bytes-1));
1471 ea.mem.off -= byte_offset;
1472 src.val = (byte_offset << 3) + src.val;
1474 else
1476 ea.mem.off += (src.val >> 3) & ~(op_bytes - 1);
1477 src.val &= (op_bytes << 3) - 1;
1480 /* Becomes a normal DstMem operation from here on. */
1481 d = (d & ~DstMask) | DstMem;
1482 case DstMem:
1483 ea.bytes = (d & ByteOp) ? 1 : op_bytes;
1484 dst = ea;
1485 if ( dst.type == OP_REG )
1487 switch ( dst.bytes )
1489 case 1: dst.val = *(uint8_t *)dst.reg; break;
1490 case 2: dst.val = *(uint16_t *)dst.reg; break;
1491 case 4: dst.val = *(uint32_t *)dst.reg; break;
1492 case 8: dst.val = *(uint64_t *)dst.reg; break;
1495 else if ( !(d & Mov) ) /* optimisation - avoid slow emulated read */
1497 if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
1498 &dst.val, dst.bytes, ctxt)) )
1499 goto done;
1500 dst.orig_val = dst.val;
1502 break;
1505 /* LOCK prefix allowed only on instructions with memory destination. */
1506 generate_exception_if(lock_prefix && (dst.type != OP_MEM), EXC_GP, 0);
1508 if ( twobyte )
1509 goto twobyte_insn;
1511 switch ( b )
1513 case 0x04 ... 0x05: /* add imm,%%eax */
1514 dst.reg = (unsigned long *)&_regs.eax;
1515 dst.val = _regs.eax;
1516 case 0x00 ... 0x03: add: /* add */
1517 emulate_2op_SrcV("add", src, dst, _regs.eflags);
1518 break;
1520 case 0x0c ... 0x0d: /* or imm,%%eax */
1521 dst.reg = (unsigned long *)&_regs.eax;
1522 dst.val = _regs.eax;
1523 case 0x08 ... 0x0b: or: /* or */
1524 emulate_2op_SrcV("or", src, dst, _regs.eflags);
1525 break;
1527 case 0x14 ... 0x15: /* adc imm,%%eax */
1528 dst.reg = (unsigned long *)&_regs.eax;
1529 dst.val = _regs.eax;
1530 case 0x10 ... 0x13: adc: /* adc */
1531 emulate_2op_SrcV("adc", src, dst, _regs.eflags);
1532 break;
1534 case 0x1c ... 0x1d: /* sbb imm,%%eax */
1535 dst.reg = (unsigned long *)&_regs.eax;
1536 dst.val = _regs.eax;
1537 case 0x18 ... 0x1b: sbb: /* sbb */
1538 emulate_2op_SrcV("sbb", src, dst, _regs.eflags);
1539 break;
1541 case 0x24 ... 0x25: /* and imm,%%eax */
1542 dst.reg = (unsigned long *)&_regs.eax;
1543 dst.val = _regs.eax;
1544 case 0x20 ... 0x23: and: /* and */
1545 emulate_2op_SrcV("and", src, dst, _regs.eflags);
1546 break;
1548 case 0x2c ... 0x2d: /* sub imm,%%eax */
1549 dst.reg = (unsigned long *)&_regs.eax;
1550 dst.val = _regs.eax;
1551 case 0x28 ... 0x2b: sub: /* sub */
1552 emulate_2op_SrcV("sub", src, dst, _regs.eflags);
1553 break;
1555 case 0x34 ... 0x35: /* xor imm,%%eax */
1556 dst.reg = (unsigned long *)&_regs.eax;
1557 dst.val = _regs.eax;
1558 case 0x30 ... 0x33: xor: /* xor */
1559 emulate_2op_SrcV("xor", src, dst, _regs.eflags);
1560 break;
1562 case 0x3c ... 0x3d: /* cmp imm,%%eax */
1563 dst.reg = (unsigned long *)&_regs.eax;
1564 dst.val = _regs.eax;
1565 case 0x38 ... 0x3b: cmp: /* cmp */
1566 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
1567 break;
1569 case 0x62: /* bound */ {
1570 unsigned long src_val2;
1571 int lb, ub, idx;
1572 generate_exception_if(mode_64bit() || (src.type != OP_MEM),
1573 EXC_UD, -1);
1574 if ( (rc = ops->read(src.mem.seg, src.mem.off + op_bytes,
1575 &src_val2, op_bytes, ctxt)) )
1576 goto done;
1577 ub = (op_bytes == 2) ? (int16_t)src_val2 : (int32_t)src_val2;
1578 lb = (op_bytes == 2) ? (int16_t)src.val : (int32_t)src.val;
1579 idx = (op_bytes == 2) ? (int16_t)dst.val : (int32_t)dst.val;
1580 generate_exception_if((idx < lb) || (idx > ub), EXC_BR, -1);
1581 dst.type = OP_NONE;
1582 break;
1585 case 0x63: /* movsxd (x86/64) / arpl (x86/32) */
1586 if ( mode_64bit() )
1588 /* movsxd */
1589 if ( src.type == OP_REG )
1590 src.val = *(int32_t *)src.reg;
1591 else if ( (rc = ops->read(src.mem.seg, src.mem.off,
1592 &src.val, 4, ctxt)) )
1593 goto done;
1594 dst.val = (int32_t)src.val;
1596 else
1598 /* arpl */
1599 uint16_t src_val = dst.val;
1600 dst = src;
1601 _regs.eflags &= ~EFLG_ZF;
1602 _regs.eflags |= ((src_val & 3) > (dst.val & 3)) ? EFLG_ZF : 0;
1603 if ( _regs.eflags & EFLG_ZF )
1604 dst.val = (dst.val & ~3) | (src_val & 3);
1605 else
1606 dst.type = OP_NONE;
1607 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
1609 break;
1611 case 0x69: /* imul imm16/32 */
1612 case 0x6b: /* imul imm8 */ {
1613 unsigned long src1; /* ModR/M source operand */
1614 if ( ea.type == OP_REG )
1615 src1 = *ea.reg;
1616 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
1617 &src1, op_bytes, ctxt)) )
1618 goto done;
1619 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1620 switch ( dst.bytes )
1622 case 2:
1623 dst.val = ((uint32_t)(int16_t)src.val *
1624 (uint32_t)(int16_t)src1);
1625 if ( (int16_t)dst.val != (uint32_t)dst.val )
1626 _regs.eflags |= EFLG_OF|EFLG_CF;
1627 break;
1628 #ifdef __x86_64__
1629 case 4:
1630 dst.val = ((uint64_t)(int32_t)src.val *
1631 (uint64_t)(int32_t)src1);
1632 if ( (int32_t)dst.val != dst.val )
1633 _regs.eflags |= EFLG_OF|EFLG_CF;
1634 break;
1635 #endif
1636 default: {
1637 unsigned long m[2] = { src.val, src1 };
1638 if ( imul_dbl(m) )
1639 _regs.eflags |= EFLG_OF|EFLG_CF;
1640 dst.val = m[0];
1641 break;
1644 break;
1647 case 0x82: /* Grp1 (x86/32 only) */
1648 generate_exception_if(mode_64bit(), EXC_UD, -1);
1649 case 0x80: case 0x81: case 0x83: /* Grp1 */
1650 switch ( modrm_reg & 7 )
1652 case 0: goto add;
1653 case 1: goto or;
1654 case 2: goto adc;
1655 case 3: goto sbb;
1656 case 4: goto and;
1657 case 5: goto sub;
1658 case 6: goto xor;
1659 case 7: goto cmp;
1661 break;
1663 case 0xa8 ... 0xa9: /* test imm,%%eax */
1664 dst.reg = (unsigned long *)&_regs.eax;
1665 dst.val = _regs.eax;
1666 case 0x84 ... 0x85: test: /* test */
1667 emulate_2op_SrcV("test", src, dst, _regs.eflags);
1668 break;
1670 case 0x86 ... 0x87: xchg: /* xchg */
1671 /* Write back the register source. */
1672 switch ( dst.bytes )
1674 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
1675 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
1676 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
1677 case 8: *src.reg = dst.val; break;
1679 /* Write back the memory destination with implicit LOCK prefix. */
1680 dst.val = src.val;
1681 lock_prefix = 1;
1682 break;
1684 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1685 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
1686 case 0x88 ... 0x8b: /* mov */
1687 dst.val = src.val;
1688 break;
1690 case 0x8c: /* mov Sreg,r/m */ {
1691 struct segment_register reg;
1692 enum x86_segment seg = decode_segment(modrm_reg);
1693 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
1694 fail_if(ops->read_segment == NULL);
1695 if ( (rc = ops->read_segment(seg, &reg, ctxt)) != 0 )
1696 goto done;
1697 dst.val = reg.sel;
1698 if ( dst.type == OP_MEM )
1699 dst.bytes = 2;
1700 break;
1703 case 0x8e: /* mov r/m,Sreg */ {
1704 enum x86_segment seg = decode_segment(modrm_reg);
1705 generate_exception_if(seg == decode_segment_failed, EXC_UD, -1);
1706 if ( (rc = load_seg(seg, (uint16_t)src.val, ctxt, ops)) != 0 )
1707 goto done;
1708 if ( seg == x86_seg_ss )
1709 ctxt->retire.flags.mov_ss = 1;
1710 dst.type = OP_NONE;
1711 break;
1714 case 0x8d: /* lea */
1715 dst.val = ea.mem.off;
1716 break;
1718 case 0x8f: /* pop (sole member of Grp1a) */
1719 generate_exception_if((modrm_reg & 7) != 0, EXC_UD, -1);
1720 /* 64-bit mode: POP defaults to a 64-bit operand. */
1721 if ( mode_64bit() && (dst.bytes == 4) )
1722 dst.bytes = 8;
1723 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
1724 &dst.val, dst.bytes, ctxt)) != 0 )
1725 goto done;
1726 break;
1728 case 0xb0 ... 0xb7: /* mov imm8,r8 */
1729 dst.reg = decode_register(
1730 (b & 7) | ((rex_prefix & 1) << 3), &_regs, (rex_prefix == 0));
1731 dst.val = src.val;
1732 break;
1734 case 0xb8 ... 0xbf: /* mov imm{16,32,64},r{16,32,64} */
1735 if ( dst.bytes == 8 ) /* Fetch more bytes to obtain imm64 */
1736 src.val = ((uint32_t)src.val |
1737 ((uint64_t)insn_fetch_type(uint32_t) << 32));
1738 dst.reg = decode_register(
1739 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
1740 dst.val = src.val;
1741 break;
1743 case 0xc0 ... 0xc1: grp2: /* Grp2 */
1744 switch ( modrm_reg & 7 )
1746 case 0: /* rol */
1747 emulate_2op_SrcB("rol", src, dst, _regs.eflags);
1748 break;
1749 case 1: /* ror */
1750 emulate_2op_SrcB("ror", src, dst, _regs.eflags);
1751 break;
1752 case 2: /* rcl */
1753 emulate_2op_SrcB("rcl", src, dst, _regs.eflags);
1754 break;
1755 case 3: /* rcr */
1756 emulate_2op_SrcB("rcr", src, dst, _regs.eflags);
1757 break;
1758 case 4: /* sal/shl */
1759 case 6: /* sal/shl */
1760 emulate_2op_SrcB("sal", src, dst, _regs.eflags);
1761 break;
1762 case 5: /* shr */
1763 emulate_2op_SrcB("shr", src, dst, _regs.eflags);
1764 break;
1765 case 7: /* sar */
1766 emulate_2op_SrcB("sar", src, dst, _regs.eflags);
1767 break;
1769 break;
1771 case 0xc4: /* les */ {
1772 unsigned long sel;
1773 dst.val = x86_seg_es;
1774 les: /* dst.val identifies the segment */
1775 generate_exception_if(src.type != OP_MEM, EXC_UD, -1);
1776 if ( (rc = ops->read(src.mem.seg, src.mem.off + src.bytes,
1777 &sel, 2, ctxt)) != 0 )
1778 goto done;
1779 if ( (rc = load_seg(dst.val, (uint16_t)sel, ctxt, ops)) != 0 )
1780 goto done;
1781 dst.val = src.val;
1782 break;
1785 case 0xc5: /* lds */
1786 dst.val = x86_seg_ds;
1787 goto les;
1789 case 0xd0 ... 0xd1: /* Grp2 */
1790 src.val = 1;
1791 goto grp2;
1793 case 0xd2 ... 0xd3: /* Grp2 */
1794 src.val = _regs.ecx;
1795 goto grp2;
1797 case 0xf6 ... 0xf7: /* Grp3 */
1798 switch ( modrm_reg & 7 )
1800 case 0 ... 1: /* test */
1801 /* Special case in Grp3: test has an immediate source operand. */
1802 src.type = OP_IMM;
1803 src.bytes = (d & ByteOp) ? 1 : op_bytes;
1804 if ( src.bytes == 8 ) src.bytes = 4;
1805 switch ( src.bytes )
1807 case 1: src.val = insn_fetch_type(int8_t); break;
1808 case 2: src.val = insn_fetch_type(int16_t); break;
1809 case 4: src.val = insn_fetch_type(int32_t); break;
1811 goto test;
1812 case 2: /* not */
1813 dst.val = ~dst.val;
1814 break;
1815 case 3: /* neg */
1816 emulate_1op("neg", dst, _regs.eflags);
1817 break;
1818 case 4: /* mul */
1819 src = dst;
1820 dst.type = OP_REG;
1821 dst.reg = (unsigned long *)&_regs.eax;
1822 dst.val = *dst.reg;
1823 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1824 switch ( src.bytes )
1826 case 1:
1827 dst.val = (uint8_t)dst.val;
1828 dst.val *= src.val;
1829 if ( (uint8_t)dst.val != (uint16_t)dst.val )
1830 _regs.eflags |= EFLG_OF|EFLG_CF;
1831 dst.bytes = 2;
1832 break;
1833 case 2:
1834 dst.val = (uint16_t)dst.val;
1835 dst.val *= src.val;
1836 if ( (uint16_t)dst.val != (uint32_t)dst.val )
1837 _regs.eflags |= EFLG_OF|EFLG_CF;
1838 *(uint16_t *)&_regs.edx = dst.val >> 16;
1839 break;
1840 #ifdef __x86_64__
1841 case 4:
1842 dst.val = (uint32_t)dst.val;
1843 dst.val *= src.val;
1844 if ( (uint32_t)dst.val != dst.val )
1845 _regs.eflags |= EFLG_OF|EFLG_CF;
1846 _regs.edx = (uint32_t)(dst.val >> 32);
1847 break;
1848 #endif
1849 default: {
1850 unsigned long m[2] = { src.val, dst.val };
1851 if ( mul_dbl(m) )
1852 _regs.eflags |= EFLG_OF|EFLG_CF;
1853 _regs.edx = m[1];
1854 dst.val = m[0];
1855 break;
1858 break;
1859 case 5: /* imul */
1860 src = dst;
1861 dst.type = OP_REG;
1862 dst.reg = (unsigned long *)&_regs.eax;
1863 dst.val = *dst.reg;
1864 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
1865 switch ( src.bytes )
1867 case 1:
1868 dst.val = ((uint16_t)(int8_t)src.val *
1869 (uint16_t)(int8_t)dst.val);
1870 if ( (int8_t)dst.val != (uint16_t)dst.val )
1871 _regs.eflags |= EFLG_OF|EFLG_CF;
1872 dst.bytes = 2;
1873 break;
1874 case 2:
1875 dst.val = ((uint32_t)(int16_t)src.val *
1876 (uint32_t)(int16_t)dst.val);
1877 if ( (int16_t)dst.val != (uint32_t)dst.val )
1878 _regs.eflags |= EFLG_OF|EFLG_CF;
1879 *(uint16_t *)&_regs.edx = dst.val >> 16;
1880 break;
1881 #ifdef __x86_64__
1882 case 4:
1883 dst.val = ((uint64_t)(int32_t)src.val *
1884 (uint64_t)(int32_t)dst.val);
1885 if ( (int32_t)dst.val != dst.val )
1886 _regs.eflags |= EFLG_OF|EFLG_CF;
1887 _regs.edx = (uint32_t)(dst.val >> 32);
1888 break;
1889 #endif
1890 default: {
1891 unsigned long m[2] = { src.val, dst.val };
1892 if ( imul_dbl(m) )
1893 _regs.eflags |= EFLG_OF|EFLG_CF;
1894 _regs.edx = m[1];
1895 dst.val = m[0];
1896 break;
1899 break;
1900 case 6: /* div */ {
1901 unsigned long u[2], v;
1902 src = dst;
1903 dst.type = OP_REG;
1904 dst.reg = (unsigned long *)&_regs.eax;
1905 switch ( src.bytes )
1907 case 1:
1908 u[0] = (uint16_t)_regs.eax;
1909 u[1] = 0;
1910 v = (uint8_t)src.val;
1911 generate_exception_if(
1912 div_dbl(u, v) || ((uint8_t)u[0] != (uint16_t)u[0]),
1913 EXC_DE, -1);
1914 dst.val = (uint8_t)u[0];
1915 ((uint8_t *)&_regs.eax)[1] = u[1];
1916 break;
1917 case 2:
1918 u[0] = ((uint32_t)_regs.edx << 16) | (uint16_t)_regs.eax;
1919 u[1] = 0;
1920 v = (uint16_t)src.val;
1921 generate_exception_if(
1922 div_dbl(u, v) || ((uint16_t)u[0] != (uint32_t)u[0]),
1923 EXC_DE, -1);
1924 dst.val = (uint16_t)u[0];
1925 *(uint16_t *)&_regs.edx = u[1];
1926 break;
1927 #ifdef __x86_64__
1928 case 4:
1929 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1930 u[1] = 0;
1931 v = (uint32_t)src.val;
1932 generate_exception_if(
1933 div_dbl(u, v) || ((uint32_t)u[0] != u[0]),
1934 EXC_DE, -1);
1935 dst.val = (uint32_t)u[0];
1936 _regs.edx = (uint32_t)u[1];
1937 break;
1938 #endif
1939 default:
1940 u[0] = _regs.eax;
1941 u[1] = _regs.edx;
1942 v = src.val;
1943 generate_exception_if(div_dbl(u, v), EXC_DE, -1);
1944 dst.val = u[0];
1945 _regs.edx = u[1];
1946 break;
1948 break;
1950 case 7: /* idiv */ {
1951 unsigned long u[2], v;
1952 src = dst;
1953 dst.type = OP_REG;
1954 dst.reg = (unsigned long *)&_regs.eax;
1955 switch ( src.bytes )
1957 case 1:
1958 u[0] = (int16_t)_regs.eax;
1959 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1960 v = (int8_t)src.val;
1961 generate_exception_if(
1962 idiv_dbl(u, v) || ((int8_t)u[0] != (int16_t)u[0]),
1963 EXC_DE, -1);
1964 dst.val = (int8_t)u[0];
1965 ((int8_t *)&_regs.eax)[1] = u[1];
1966 break;
1967 case 2:
1968 u[0] = (int32_t)((_regs.edx << 16) | (uint16_t)_regs.eax);
1969 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1970 v = (int16_t)src.val;
1971 generate_exception_if(
1972 idiv_dbl(u, v) || ((int16_t)u[0] != (int32_t)u[0]),
1973 EXC_DE, -1);
1974 dst.val = (int16_t)u[0];
1975 *(int16_t *)&_regs.edx = u[1];
1976 break;
1977 #ifdef __x86_64__
1978 case 4:
1979 u[0] = (_regs.edx << 32) | (uint32_t)_regs.eax;
1980 u[1] = ((long)u[0] < 0) ? ~0UL : 0UL;
1981 v = (int32_t)src.val;
1982 generate_exception_if(
1983 idiv_dbl(u, v) || ((int32_t)u[0] != u[0]),
1984 EXC_DE, -1);
1985 dst.val = (int32_t)u[0];
1986 _regs.edx = (uint32_t)u[1];
1987 break;
1988 #endif
1989 default:
1990 u[0] = _regs.eax;
1991 u[1] = _regs.edx;
1992 v = src.val;
1993 generate_exception_if(idiv_dbl(u, v), EXC_DE, -1);
1994 dst.val = u[0];
1995 _regs.edx = u[1];
1996 break;
1998 break;
2000 default:
2001 goto cannot_emulate;
2003 break;
2005 case 0xfe: /* Grp4 */
2006 generate_exception_if((modrm_reg & 7) >= 2, EXC_UD, -1);
2007 case 0xff: /* Grp5 */
2008 switch ( modrm_reg & 7 )
2010 case 0: /* inc */
2011 emulate_1op("inc", dst, _regs.eflags);
2012 break;
2013 case 1: /* dec */
2014 emulate_1op("dec", dst, _regs.eflags);
2015 break;
2016 case 2: /* call (near) */
2017 case 4: /* jmp (near) */
2018 if ( (dst.bytes != 8) && mode_64bit() )
2020 dst.bytes = op_bytes = 8;
2021 if ( dst.type == OP_REG )
2022 dst.val = *dst.reg;
2023 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
2024 &dst.val, 8, ctxt)) != 0 )
2025 goto done;
2027 src.val = _regs.eip;
2028 _regs.eip = dst.val;
2029 if ( (modrm_reg & 7) == 2 )
2030 goto push; /* call */
2031 dst.type = OP_NONE;
2032 break;
2033 case 3: /* call (far, absolute indirect) */
2034 case 5: /* jmp (far, absolute indirect) */ {
2035 unsigned long sel;
2037 generate_exception_if(dst.type != OP_MEM, EXC_UD, -1);
2039 if ( (rc = ops->read(dst.mem.seg, dst.mem.off+dst.bytes,
2040 &sel, 2, ctxt)) )
2041 goto done;
2043 if ( (modrm_reg & 7) == 3 ) /* call */
2045 struct segment_register reg;
2046 fail_if(ops->read_segment == NULL);
2047 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2048 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2049 reg.sel, op_bytes, ctxt)) ||
2050 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2051 _regs.eip, op_bytes, ctxt)) )
2052 goto done;
2055 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2056 goto done;
2057 _regs.eip = dst.val;
2059 dst.type = OP_NONE;
2060 break;
2062 case 6: /* push */
2063 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
2064 if ( mode_64bit() && (dst.bytes == 4) )
2066 dst.bytes = 8;
2067 if ( dst.type == OP_REG )
2068 dst.val = *dst.reg;
2069 else if ( (rc = ops->read(dst.mem.seg, dst.mem.off,
2070 &dst.val, 8, ctxt)) != 0 )
2071 goto done;
2073 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2074 dst.val, dst.bytes, ctxt)) != 0 )
2075 goto done;
2076 dst.type = OP_NONE;
2077 break;
2078 case 7:
2079 generate_exception_if(1, EXC_UD, -1);
2080 default:
2081 goto cannot_emulate;
2083 break;
2086 writeback:
2087 switch ( dst.type )
2089 case OP_REG:
2090 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
2091 switch ( dst.bytes )
2093 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2094 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2095 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2096 case 8: *dst.reg = dst.val; break;
2098 break;
2099 case OP_MEM:
2100 if ( !(d & Mov) && (dst.orig_val == dst.val) &&
2101 !ctxt->force_writeback )
2102 /* nothing to do */;
2103 else if ( lock_prefix )
2104 rc = ops->cmpxchg(
2105 dst.mem.seg, dst.mem.off, &dst.orig_val,
2106 &dst.val, dst.bytes, ctxt);
2107 else
2108 rc = ops->write(
2109 dst.mem.seg, dst.mem.off, dst.val, dst.bytes, ctxt);
2110 if ( rc != 0 )
2111 goto done;
2112 default:
2113 break;
2116 /* Inject #DB if single-step tracing was enabled at instruction start. */
2117 if ( (ctxt->regs->eflags & EFLG_TF) && (rc == X86EMUL_OKAY) &&
2118 (ops->inject_hw_exception != NULL) )
2119 rc = ops->inject_hw_exception(EXC_DB, -1, ctxt) ? : X86EMUL_EXCEPTION;
2121 /* Commit shadow register state. */
2122 _regs.eflags &= ~EFLG_RF;
2123 *ctxt->regs = _regs;
2125 done:
2126 return rc;
2128 special_insn:
2129 dst.type = OP_NONE;
2131 /*
2132 * The only implicit-operands instructions allowed a LOCK prefix are
2133 * CMPXCHG{8,16}B, MOV CRn, MOV DRn.
2134 */
2135 generate_exception_if(lock_prefix &&
2136 ((b < 0x20) || (b > 0x23)) && /* MOV CRn/DRn */
2137 (b != 0xc7), /* CMPXCHG{8,16}B */
2138 EXC_GP, 0);
2140 if ( twobyte )
2141 goto twobyte_special_insn;
2143 switch ( b )
2145 case 0x06: /* push %%es */ {
2146 struct segment_register reg;
2147 src.val = x86_seg_es;
2148 push_seg:
2149 fail_if(ops->read_segment == NULL);
2150 if ( (rc = ops->read_segment(src.val, &reg, ctxt)) != 0 )
2151 return rc;
2152 /* 64-bit mode: PUSH defaults to a 64-bit operand. */
2153 if ( mode_64bit() && (op_bytes == 4) )
2154 op_bytes = 8;
2155 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2156 reg.sel, op_bytes, ctxt)) != 0 )
2157 goto done;
2158 break;
2161 case 0x07: /* pop %%es */
2162 src.val = x86_seg_es;
2163 pop_seg:
2164 fail_if(ops->write_segment == NULL);
2165 /* 64-bit mode: POP defaults to a 64-bit operand. */
2166 if ( mode_64bit() && (op_bytes == 4) )
2167 op_bytes = 8;
2168 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2169 &dst.val, op_bytes, ctxt)) != 0 )
2170 goto done;
2171 if ( (rc = load_seg(src.val, (uint16_t)dst.val, ctxt, ops)) != 0 )
2172 return rc;
2173 break;
2175 case 0x0e: /* push %%cs */
2176 src.val = x86_seg_cs;
2177 goto push_seg;
2179 case 0x16: /* push %%ss */
2180 src.val = x86_seg_ss;
2181 goto push_seg;
2183 case 0x17: /* pop %%ss */
2184 src.val = x86_seg_ss;
2185 ctxt->retire.flags.mov_ss = 1;
2186 goto pop_seg;
2188 case 0x1e: /* push %%ds */
2189 src.val = x86_seg_ds;
2190 goto push_seg;
2192 case 0x1f: /* pop %%ds */
2193 src.val = x86_seg_ds;
2194 goto pop_seg;
2196 case 0x27: /* daa */ {
2197 uint8_t al = _regs.eax;
2198 unsigned long eflags = _regs.eflags;
2199 generate_exception_if(mode_64bit(), EXC_UD, -1);
2200 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
2201 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
2203 *(uint8_t *)&_regs.eax += 6;
2204 _regs.eflags |= EFLG_AF;
2206 if ( (al > 0x99) || (eflags & EFLG_CF) )
2208 *(uint8_t *)&_regs.eax += 0x60;
2209 _regs.eflags |= EFLG_CF;
2211 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2212 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2213 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2214 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2215 break;
2218 case 0x2f: /* das */ {
2219 uint8_t al = _regs.eax;
2220 unsigned long eflags = _regs.eflags;
2221 generate_exception_if(mode_64bit(), EXC_UD, -1);
2222 _regs.eflags &= ~(EFLG_CF|EFLG_AF);
2223 if ( ((al & 0x0f) > 9) || (eflags & EFLG_AF) )
2225 _regs.eflags |= EFLG_AF;
2226 if ( (al < 6) || (eflags & EFLG_CF) )
2227 _regs.eflags |= EFLG_CF;
2228 *(uint8_t *)&_regs.eax -= 6;
2230 if ( (al > 0x99) || (eflags & EFLG_CF) )
2232 *(uint8_t *)&_regs.eax -= 0x60;
2233 _regs.eflags |= EFLG_CF;
2235 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2236 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2237 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2238 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2239 break;
2242 case 0x37: /* aaa */
2243 case 0x3f: /* aas */
2244 generate_exception_if(mode_64bit(), EXC_UD, -1);
2245 _regs.eflags &= ~EFLG_CF;
2246 if ( ((uint8_t)_regs.eax > 9) || (_regs.eflags & EFLG_AF) )
2248 ((uint8_t *)&_regs.eax)[0] += (b == 0x37) ? 6 : -6;
2249 ((uint8_t *)&_regs.eax)[1] += (b == 0x37) ? 1 : -1;
2250 _regs.eflags |= EFLG_CF | EFLG_AF;
2252 ((uint8_t *)&_regs.eax)[0] &= 0x0f;
2253 break;
2255 case 0x40 ... 0x4f: /* inc/dec reg */
2256 dst.type = OP_REG;
2257 dst.reg = decode_register(b & 7, &_regs, 0);
2258 dst.bytes = op_bytes;
2259 dst.val = *dst.reg;
2260 if ( b & 8 )
2261 emulate_1op("dec", dst, _regs.eflags);
2262 else
2263 emulate_1op("inc", dst, _regs.eflags);
2264 break;
2266 case 0x50 ... 0x57: /* push reg */
2267 src.val = *(unsigned long *)decode_register(
2268 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2269 goto push;
2271 case 0x58 ... 0x5f: /* pop reg */
2272 dst.type = OP_REG;
2273 dst.reg = decode_register(
2274 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2275 dst.bytes = op_bytes;
2276 if ( mode_64bit() && (dst.bytes == 4) )
2277 dst.bytes = 8;
2278 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2279 &dst.val, dst.bytes, ctxt)) != 0 )
2280 goto done;
2281 break;
2283 case 0x60: /* pusha */ {
2284 int i;
2285 unsigned long regs[] = {
2286 _regs.eax, _regs.ecx, _regs.edx, _regs.ebx,
2287 _regs.esp, _regs.ebp, _regs.esi, _regs.edi };
2288 generate_exception_if(mode_64bit(), EXC_UD, -1);
2289 for ( i = 0; i < 8; i++ )
2290 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2291 regs[i], op_bytes, ctxt)) != 0 )
2292 goto done;
2293 break;
2296 case 0x61: /* popa */ {
2297 int i;
2298 unsigned long dummy_esp, *regs[] = {
2299 (unsigned long *)&_regs.edi, (unsigned long *)&_regs.esi,
2300 (unsigned long *)&_regs.ebp, (unsigned long *)&dummy_esp,
2301 (unsigned long *)&_regs.ebx, (unsigned long *)&_regs.edx,
2302 (unsigned long *)&_regs.ecx, (unsigned long *)&_regs.eax };
2303 generate_exception_if(mode_64bit(), EXC_UD, -1);
2304 for ( i = 0; i < 8; i++ )
2306 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2307 &dst.val, op_bytes, ctxt)) != 0 )
2308 goto done;
2309 switch ( op_bytes )
2311 case 1: *(uint8_t *)regs[i] = (uint8_t)dst.val; break;
2312 case 2: *(uint16_t *)regs[i] = (uint16_t)dst.val; break;
2313 case 4: *regs[i] = (uint32_t)dst.val; break; /* 64b: zero-ext */
2314 case 8: *regs[i] = dst.val; break;
2317 break;
2320 case 0x68: /* push imm{16,32,64} */
2321 src.val = ((op_bytes == 2)
2322 ? (int32_t)insn_fetch_type(int16_t)
2323 : insn_fetch_type(int32_t));
2324 goto push;
2326 case 0x6a: /* push imm8 */
2327 src.val = insn_fetch_type(int8_t);
2328 push:
2329 d |= Mov; /* force writeback */
2330 dst.type = OP_MEM;
2331 dst.bytes = op_bytes;
2332 if ( mode_64bit() && (dst.bytes == 4) )
2333 dst.bytes = 8;
2334 dst.val = src.val;
2335 dst.mem.seg = x86_seg_ss;
2336 dst.mem.off = sp_pre_dec(dst.bytes);
2337 break;
2339 case 0x6c ... 0x6d: /* ins %dx,%es:%edi */ {
2340 unsigned long nr_reps = get_rep_prefix();
2341 unsigned int port = (uint16_t)_regs.edx;
2342 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2343 dst.mem.seg = x86_seg_es;
2344 dst.mem.off = truncate_ea(_regs.edi);
2345 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
2346 goto done;
2347 if ( (nr_reps > 1) && (ops->rep_ins != NULL) &&
2348 ((rc = ops->rep_ins(port, dst.mem.seg, dst.mem.off, dst.bytes,
2349 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2351 if ( rc != 0 )
2352 goto done;
2354 else
2356 fail_if(ops->read_io == NULL);
2357 if ( (rc = ops->read_io(port, dst.bytes, &dst.val, ctxt)) != 0 )
2358 goto done;
2359 dst.type = OP_MEM;
2360 nr_reps = 1;
2362 register_address_increment(
2363 _regs.edi,
2364 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2365 put_rep_prefix(nr_reps);
2366 break;
2369 case 0x6e ... 0x6f: /* outs %esi,%dx */ {
2370 unsigned long nr_reps = get_rep_prefix();
2371 unsigned int port = (uint16_t)_regs.edx;
2372 dst.bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2373 if ( (rc = ioport_access_check(port, dst.bytes, ctxt, ops)) != 0 )
2374 goto done;
2375 if ( (nr_reps > 1) && (ops->rep_outs != NULL) &&
2376 ((rc = ops->rep_outs(ea.mem.seg, truncate_ea(_regs.esi),
2377 port, dst.bytes,
2378 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2380 if ( rc != 0 )
2381 goto done;
2383 else
2385 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2386 &dst.val, dst.bytes, ctxt)) != 0 )
2387 goto done;
2388 fail_if(ops->write_io == NULL);
2389 if ( (rc = ops->write_io(port, dst.bytes, dst.val, ctxt)) != 0 )
2390 goto done;
2391 nr_reps = 1;
2393 register_address_increment(
2394 _regs.esi,
2395 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2396 put_rep_prefix(nr_reps);
2397 break;
2400 case 0x70 ... 0x7f: /* jcc (short) */ {
2401 int rel = insn_fetch_type(int8_t);
2402 if ( test_cc(b, _regs.eflags) )
2403 jmp_rel(rel);
2404 break;
2407 case 0x90: /* nop / xchg %%r8,%%rax */
2408 if ( !(rex_prefix & 1) )
2409 break; /* nop */
2411 case 0x91 ... 0x97: /* xchg reg,%%rax */
2412 src.type = dst.type = OP_REG;
2413 src.bytes = dst.bytes = op_bytes;
2414 src.reg = (unsigned long *)&_regs.eax;
2415 src.val = *src.reg;
2416 dst.reg = decode_register(
2417 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
2418 dst.val = *dst.reg;
2419 goto xchg;
2421 case 0x98: /* cbw/cwde/cdqe */
2422 switch ( op_bytes )
2424 case 2: *(int16_t *)&_regs.eax = (int8_t)_regs.eax; break; /* cbw */
2425 case 4: _regs.eax = (uint32_t)(int16_t)_regs.eax; break; /* cwde */
2426 case 8: _regs.eax = (int32_t)_regs.eax; break; /* cdqe */
2428 break;
2430 case 0x99: /* cwd/cdq/cqo */
2431 switch ( op_bytes )
2433 case 2:
2434 *(int16_t *)&_regs.edx = ((int16_t)_regs.eax < 0) ? -1 : 0;
2435 break;
2436 case 4:
2437 _regs.edx = (uint32_t)(((int32_t)_regs.eax < 0) ? -1 : 0);
2438 break;
2439 case 8:
2440 _regs.edx = (_regs.eax < 0) ? -1 : 0;
2441 break;
2443 break;
2445 case 0x9a: /* call (far, absolute) */ {
2446 struct segment_register reg;
2447 uint16_t sel;
2448 uint32_t eip;
2450 fail_if(ops->read_segment == NULL);
2451 generate_exception_if(mode_64bit(), EXC_UD, -1);
2453 eip = insn_fetch_bytes(op_bytes);
2454 sel = insn_fetch_type(uint16_t);
2456 if ( (rc = ops->read_segment(x86_seg_cs, &reg, ctxt)) ||
2457 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2458 reg.sel, op_bytes, ctxt)) ||
2459 (rc = ops->write(x86_seg_ss, sp_pre_dec(op_bytes),
2460 _regs.eip, op_bytes, ctxt)) )
2461 goto done;
2463 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2464 goto done;
2465 _regs.eip = eip;
2466 break;
2469 case 0x9b: /* wait/fwait */
2470 emulate_fpu_insn("fwait");
2471 break;
2473 case 0x9c: /* pushf */
2474 src.val = _regs.eflags;
2475 goto push;
2477 case 0x9d: /* popf */ {
2478 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2479 if ( !mode_ring0() )
2480 mask |= EFLG_IOPL;
2481 if ( !mode_iopl() )
2482 mask |= EFLG_IF;
2483 /* 64-bit mode: POP defaults to a 64-bit operand. */
2484 if ( mode_64bit() && (op_bytes == 4) )
2485 op_bytes = 8;
2486 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2487 &dst.val, op_bytes, ctxt)) != 0 )
2488 goto done;
2489 if ( op_bytes == 2 )
2490 dst.val = (uint16_t)dst.val | (_regs.eflags & 0xffff0000u);
2491 dst.val &= 0x257fd5;
2492 _regs.eflags &= mask;
2493 _regs.eflags |= (uint32_t)(dst.val & ~mask) | 0x02;
2494 break;
2497 case 0x9e: /* sahf */
2498 *(uint8_t *)&_regs.eflags = (((uint8_t *)&_regs.eax)[1] & 0xd7) | 0x02;
2499 break;
2501 case 0x9f: /* lahf */
2502 ((uint8_t *)&_regs.eax)[1] = (_regs.eflags & 0xd7) | 0x02;
2503 break;
2505 case 0xa0 ... 0xa1: /* mov mem.offs,{%al,%ax,%eax,%rax} */
2506 /* Source EA is not encoded via ModRM. */
2507 dst.type = OP_REG;
2508 dst.reg = (unsigned long *)&_regs.eax;
2509 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2510 if ( (rc = ops->read(ea.mem.seg, insn_fetch_bytes(ad_bytes),
2511 &dst.val, dst.bytes, ctxt)) != 0 )
2512 goto done;
2513 break;
2515 case 0xa2 ... 0xa3: /* mov {%al,%ax,%eax,%rax},mem.offs */
2516 /* Destination EA is not encoded via ModRM. */
2517 dst.type = OP_MEM;
2518 dst.mem.seg = ea.mem.seg;
2519 dst.mem.off = insn_fetch_bytes(ad_bytes);
2520 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2521 dst.val = (unsigned long)_regs.eax;
2522 break;
2524 case 0xa4 ... 0xa5: /* movs */ {
2525 unsigned long nr_reps = get_rep_prefix();
2526 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2527 dst.mem.seg = x86_seg_es;
2528 dst.mem.off = truncate_ea(_regs.edi);
2529 if ( (nr_reps > 1) && (ops->rep_movs != NULL) &&
2530 ((rc = ops->rep_movs(ea.mem.seg, truncate_ea(_regs.esi),
2531 dst.mem.seg, dst.mem.off, dst.bytes,
2532 &nr_reps, ctxt)) != X86EMUL_UNHANDLEABLE) )
2534 if ( rc != 0 )
2535 goto done;
2537 else
2539 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2540 &dst.val, dst.bytes, ctxt)) != 0 )
2541 goto done;
2542 dst.type = OP_MEM;
2543 nr_reps = 1;
2545 register_address_increment(
2546 _regs.esi,
2547 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2548 register_address_increment(
2549 _regs.edi,
2550 nr_reps * ((_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes));
2551 put_rep_prefix(nr_reps);
2552 break;
2555 case 0xa6 ... 0xa7: /* cmps */ {
2556 unsigned long next_eip = _regs.eip;
2557 get_rep_prefix();
2558 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2559 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2560 &dst.val, dst.bytes, ctxt)) ||
2561 (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2562 &src.val, src.bytes, ctxt)) )
2563 goto done;
2564 register_address_increment(
2565 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2566 register_address_increment(
2567 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2568 put_rep_prefix(1);
2569 /* cmp: dst - src ==> src=*%%edi,dst=*%%esi ==> *%%esi - *%%edi */
2570 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2571 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2572 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2573 _regs.eip = next_eip;
2574 break;
2577 case 0xaa ... 0xab: /* stos */ {
2578 /* unsigned long max_reps = */get_rep_prefix();
2579 dst.type = OP_MEM;
2580 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2581 dst.mem.seg = x86_seg_es;
2582 dst.mem.off = truncate_ea(_regs.edi);
2583 dst.val = _regs.eax;
2584 register_address_increment(
2585 _regs.edi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2586 put_rep_prefix(1);
2587 break;
2590 case 0xac ... 0xad: /* lods */ {
2591 /* unsigned long max_reps = */get_rep_prefix();
2592 dst.type = OP_REG;
2593 dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2594 dst.reg = (unsigned long *)&_regs.eax;
2595 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.esi),
2596 &dst.val, dst.bytes, ctxt)) != 0 )
2597 goto done;
2598 register_address_increment(
2599 _regs.esi, (_regs.eflags & EFLG_DF) ? -dst.bytes : dst.bytes);
2600 put_rep_prefix(1);
2601 break;
2604 case 0xae ... 0xaf: /* scas */ {
2605 unsigned long next_eip = _regs.eip;
2606 get_rep_prefix();
2607 src.bytes = dst.bytes = (d & ByteOp) ? 1 : op_bytes;
2608 dst.val = _regs.eax;
2609 if ( (rc = ops->read(x86_seg_es, truncate_ea(_regs.edi),
2610 &src.val, src.bytes, ctxt)) != 0 )
2611 goto done;
2612 register_address_increment(
2613 _regs.edi, (_regs.eflags & EFLG_DF) ? -src.bytes : src.bytes);
2614 put_rep_prefix(1);
2615 /* cmp: dst - src ==> src=*%%edi,dst=%%eax ==> %%eax - *%%edi */
2616 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
2617 if ( ((rep_prefix == REPE_PREFIX) && !(_regs.eflags & EFLG_ZF)) ||
2618 ((rep_prefix == REPNE_PREFIX) && (_regs.eflags & EFLG_ZF)) )
2619 _regs.eip = next_eip;
2620 break;
2623 case 0xc2: /* ret imm16 (near) */
2624 case 0xc3: /* ret (near) */ {
2625 int offset = (b == 0xc2) ? insn_fetch_type(uint16_t) : 0;
2626 op_bytes = mode_64bit() ? 8 : op_bytes;
2627 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2628 &dst.val, op_bytes, ctxt)) != 0 )
2629 goto done;
2630 _regs.eip = dst.val;
2631 break;
2634 case 0xc8: /* enter imm16,imm8 */ {
2635 uint16_t size = insn_fetch_type(uint16_t);
2636 uint8_t depth = insn_fetch_type(uint8_t) & 31;
2637 int i;
2639 dst.type = OP_REG;
2640 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2641 dst.reg = (unsigned long *)&_regs.ebp;
2642 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2643 _regs.ebp, dst.bytes, ctxt)) )
2644 goto done;
2645 dst.val = _regs.esp;
2647 if ( depth > 0 )
2649 for ( i = 1; i < depth; i++ )
2651 unsigned long ebp, temp_data;
2652 ebp = truncate_word(_regs.ebp - i*dst.bytes, ctxt->sp_size/8);
2653 if ( (rc = ops->read(x86_seg_ss, ebp,
2654 &temp_data, dst.bytes, ctxt)) ||
2655 (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2656 temp_data, dst.bytes, ctxt)) )
2657 goto done;
2659 if ( (rc = ops->write(x86_seg_ss, sp_pre_dec(dst.bytes),
2660 dst.val, dst.bytes, ctxt)) )
2661 goto done;
2664 sp_pre_dec(size);
2665 break;
2668 case 0xc9: /* leave */
2669 /* First writeback, to %%esp. */
2670 dst.type = OP_REG;
2671 dst.bytes = (mode_64bit() && (op_bytes == 4)) ? 8 : op_bytes;
2672 dst.reg = (unsigned long *)&_regs.esp;
2673 dst.val = _regs.ebp;
2675 /* Flush first writeback, since there is a second. */
2676 switch ( dst.bytes )
2678 case 1: *(uint8_t *)dst.reg = (uint8_t)dst.val; break;
2679 case 2: *(uint16_t *)dst.reg = (uint16_t)dst.val; break;
2680 case 4: *dst.reg = (uint32_t)dst.val; break; /* 64b: zero-ext */
2681 case 8: *dst.reg = dst.val; break;
2684 /* Second writeback, to %%ebp. */
2685 dst.reg = (unsigned long *)&_regs.ebp;
2686 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(dst.bytes),
2687 &dst.val, dst.bytes, ctxt)) )
2688 goto done;
2689 break;
2691 case 0xca: /* ret imm16 (far) */
2692 case 0xcb: /* ret (far) */ {
2693 int offset = (b == 0xca) ? insn_fetch_type(uint16_t) : 0;
2694 op_bytes = mode_64bit() ? 8 : op_bytes;
2695 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2696 &dst.val, op_bytes, ctxt)) ||
2697 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes + offset),
2698 &src.val, op_bytes, ctxt)) ||
2699 (rc = load_seg(x86_seg_cs, (uint16_t)src.val, ctxt, ops)) )
2700 goto done;
2701 _regs.eip = dst.val;
2702 break;
2705 case 0xcc: /* int3 */
2706 src.val = EXC_BP;
2707 goto swint;
2709 case 0xcd: /* int imm8 */
2710 src.val = insn_fetch_type(uint8_t);
2711 swint:
2712 fail_if(ops->inject_sw_interrupt == NULL);
2713 rc = ops->inject_sw_interrupt(src.val, _regs.eip - ctxt->regs->eip,
2714 ctxt) ? : X86EMUL_EXCEPTION;
2715 goto done;
2717 case 0xce: /* into */
2718 generate_exception_if(mode_64bit(), EXC_UD, -1);
2719 if ( !(_regs.eflags & EFLG_OF) )
2720 break;
2721 src.val = EXC_OF;
2722 goto swint;
2724 case 0xcf: /* iret */ {
2725 unsigned long cs, eip, eflags;
2726 uint32_t mask = EFLG_VIP | EFLG_VIF | EFLG_VM;
2727 if ( !mode_ring0() )
2728 mask |= EFLG_IOPL;
2729 if ( !mode_iopl() )
2730 mask |= EFLG_IF;
2731 fail_if(!in_realmode(ctxt, ops));
2732 if ( (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2733 &eip, op_bytes, ctxt)) ||
2734 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2735 &cs, op_bytes, ctxt)) ||
2736 (rc = ops->read(x86_seg_ss, sp_post_inc(op_bytes),
2737 &eflags, op_bytes, ctxt)) )
2738 goto done;
2739 if ( op_bytes == 2 )
2740 eflags = (uint16_t)eflags | (_regs.eflags & 0xffff0000u);
2741 eflags &= 0x257fd5;
2742 _regs.eflags &= mask;
2743 _regs.eflags |= (uint32_t)(eflags & ~mask) | 0x02;
2744 _regs.eip = eip;
2745 if ( (rc = load_seg(x86_seg_cs, (uint16_t)cs, ctxt, ops)) != 0 )
2746 goto done;
2747 break;
2750 case 0xd4: /* aam */ {
2751 unsigned int base = insn_fetch_type(uint8_t);
2752 uint8_t al = _regs.eax;
2753 generate_exception_if(mode_64bit(), EXC_UD, -1);
2754 generate_exception_if(base == 0, EXC_DE, -1);
2755 *(uint16_t *)&_regs.eax = ((al / base) << 8) | (al % base);
2756 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2757 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2758 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2759 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2760 break;
2763 case 0xd5: /* aad */ {
2764 unsigned int base = insn_fetch_type(uint8_t);
2765 uint16_t ax = _regs.eax;
2766 generate_exception_if(mode_64bit(), EXC_UD, -1);
2767 *(uint16_t *)&_regs.eax = (uint8_t)(ax + ((ax >> 8) * base));
2768 _regs.eflags &= ~(EFLG_SF|EFLG_ZF|EFLG_PF);
2769 _regs.eflags |= ((uint8_t)_regs.eax == 0) ? EFLG_ZF : 0;
2770 _regs.eflags |= (( int8_t)_regs.eax < 0) ? EFLG_SF : 0;
2771 _regs.eflags |= even_parity(_regs.eax) ? EFLG_PF : 0;
2772 break;
2775 case 0xd6: /* salc */
2776 generate_exception_if(mode_64bit(), EXC_UD, -1);
2777 *(uint8_t *)&_regs.eax = (_regs.eflags & EFLG_CF) ? 0xff : 0x00;
2778 break;
2780 case 0xd7: /* xlat */ {
2781 unsigned long al = (uint8_t)_regs.eax;
2782 if ( (rc = ops->read(ea.mem.seg, truncate_ea(_regs.ebx + al),
2783 &al, 1, ctxt)) != 0 )
2784 goto done;
2785 *(uint8_t *)&_regs.eax = al;
2786 break;
2789 case 0xd9: /* FPU 0xd9 */
2790 switch ( modrm )
2792 case 0xc0 ... 0xc7: /* fld %stN */
2793 case 0xc8 ... 0xcf: /* fxch %stN */
2794 case 0xd0: /* fnop */
2795 case 0xe0: /* fchs */
2796 case 0xe1: /* fabs */
2797 case 0xe4: /* ftst */
2798 case 0xe5: /* fxam */
2799 case 0xe8: /* fld1 */
2800 case 0xe9: /* fldl2t */
2801 case 0xea: /* fldl2e */
2802 case 0xeb: /* fldpi */
2803 case 0xec: /* fldlg2 */
2804 case 0xed: /* fldln2 */
2805 case 0xee: /* fldz */
2806 case 0xf0: /* f2xm1 */
2807 case 0xf1: /* fyl2x */
2808 case 0xf2: /* fptan */
2809 case 0xf3: /* fpatan */
2810 case 0xf4: /* fxtract */
2811 case 0xf5: /* fprem1 */
2812 case 0xf6: /* fdecstp */
2813 case 0xf7: /* fincstp */
2814 case 0xf8: /* fprem */
2815 case 0xf9: /* fyl2xp1 */
2816 case 0xfa: /* fsqrt */
2817 case 0xfb: /* fsincos */
2818 case 0xfc: /* frndint */
2819 case 0xfd: /* fscale */
2820 case 0xfe: /* fsin */
2821 case 0xff: /* fcos */
2822 emulate_fpu_insn_stub(0xd9, modrm);
2823 break;
2824 default:
2825 fail_if((modrm_reg & 7) != 7);
2826 fail_if(modrm >= 0xc0);
2827 /* fnstcw m2byte */
2828 ea.bytes = 2;
2829 dst = ea;
2830 emulate_fpu_insn_memdst("fnstcw", dst.val);
2832 break;
2834 case 0xdb: /* FPU 0xdb */
2835 fail_if(modrm != 0xe3);
2836 /* fninit */
2837 emulate_fpu_insn("fninit");
2838 break;
2840 case 0xdd: /* FPU 0xdd */
2841 fail_if((modrm_reg & 7) != 7);
2842 fail_if(modrm >= 0xc0);
2843 /* fnstsw m2byte */
2844 ea.bytes = 2;
2845 dst = ea;
2846 emulate_fpu_insn_memdst("fnstsw", dst.val);
2847 break;
2849 case 0xde: /* FPU 0xde */
2850 switch ( modrm )
2852 case 0xc0 ... 0xc7: /* faddp %stN */
2853 case 0xc8 ... 0xcf: /* fmulp %stN */
2854 case 0xd9: /* fcompp */
2855 case 0xe0 ... 0xe7: /* fsubrp %stN */
2856 case 0xe8 ... 0xef: /* fsubp %stN */
2857 case 0xf0 ... 0xf7: /* fdivrp %stN */
2858 case 0xf8 ... 0xff: /* fdivp %stN */
2859 emulate_fpu_insn_stub(0xde, modrm);
2860 break;
2861 default:
2862 goto cannot_emulate;
2864 break;
2866 case 0xdf: /* FPU 0xdf */
2867 fail_if(modrm != 0xe0);
2868 /* fnstsw %ax */
2869 dst.bytes = 2;
2870 dst.type = OP_REG;
2871 dst.reg = (unsigned long *)&_regs.eax;
2872 emulate_fpu_insn_memdst("fnstsw", dst.val);
2873 break;
2875 case 0xe0 ... 0xe2: /* loop{,z,nz} */ {
2876 int rel = insn_fetch_type(int8_t);
2877 int do_jmp = !(_regs.eflags & EFLG_ZF); /* loopnz */
2878 if ( b == 0xe1 )
2879 do_jmp = !do_jmp; /* loopz */
2880 else if ( b == 0xe2 )
2881 do_jmp = 1; /* loop */
2882 switch ( ad_bytes )
2884 case 2:
2885 do_jmp &= --(*(uint16_t *)&_regs.ecx) != 0;
2886 break;
2887 case 4:
2888 do_jmp &= --(*(uint32_t *)&_regs.ecx) != 0;
2889 _regs.ecx = (uint32_t)_regs.ecx; /* zero extend in x86/64 mode */
2890 break;
2891 default: /* case 8: */
2892 do_jmp &= --_regs.ecx != 0;
2893 break;
2895 if ( do_jmp )
2896 jmp_rel(rel);
2897 break;
2900 case 0xe3: /* jcxz/jecxz (short) */ {
2901 int rel = insn_fetch_type(int8_t);
2902 if ( (ad_bytes == 2) ? !(uint16_t)_regs.ecx :
2903 (ad_bytes == 4) ? !(uint32_t)_regs.ecx : !_regs.ecx )
2904 jmp_rel(rel);
2905 break;
2908 case 0xe4: /* in imm8,%al */
2909 case 0xe5: /* in imm8,%eax */
2910 case 0xe6: /* out %al,imm8 */
2911 case 0xe7: /* out %eax,imm8 */
2912 case 0xec: /* in %dx,%al */
2913 case 0xed: /* in %dx,%eax */
2914 case 0xee: /* out %al,%dx */
2915 case 0xef: /* out %eax,%dx */ {
2916 unsigned int port = ((b < 0xe8)
2917 ? insn_fetch_type(uint8_t)
2918 : (uint16_t)_regs.edx);
2919 op_bytes = !(b & 1) ? 1 : (op_bytes == 8) ? 4 : op_bytes;
2920 if ( (rc = ioport_access_check(port, op_bytes, ctxt, ops)) != 0 )
2921 goto done;
2922 if ( b & 2 )
2924 /* out */
2925 fail_if(ops->write_io == NULL);
2926 rc = ops->write_io(port, op_bytes, _regs.eax, ctxt);
2929 else
2931 /* in */
2932 dst.type = OP_REG;
2933 dst.bytes = op_bytes;
2934 dst.reg = (unsigned long *)&_regs.eax;
2935 fail_if(ops->read_io == NULL);
2936 rc = ops->read_io(port, dst.bytes, &dst.val, ctxt);
2938 if ( rc != 0 )
2939 goto done;
2940 break;
2943 case 0xe8: /* call (near) */ {
2944 int rel = (((op_bytes == 2) && !mode_64bit())
2945 ? (int32_t)insn_fetch_type(int16_t)
2946 : insn_fetch_type(int32_t));
2947 op_bytes = mode_64bit() ? 8 : op_bytes;
2948 src.val = _regs.eip;
2949 jmp_rel(rel);
2950 goto push;
2953 case 0xe9: /* jmp (near) */ {
2954 int rel = (((op_bytes == 2) && !mode_64bit())
2955 ? (int32_t)insn_fetch_type(int16_t)
2956 : insn_fetch_type(int32_t));
2957 jmp_rel(rel);
2958 break;
2961 case 0xea: /* jmp (far, absolute) */ {
2962 uint16_t sel;
2963 uint32_t eip;
2964 generate_exception_if(mode_64bit(), EXC_UD, -1);
2965 eip = insn_fetch_bytes(op_bytes);
2966 sel = insn_fetch_type(uint16_t);
2967 if ( (rc = load_seg(x86_seg_cs, sel, ctxt, ops)) != 0 )
2968 goto done;
2969 _regs.eip = eip;
2970 break;
2973 case 0xeb: /* jmp (short) */ {
2974 int rel = insn_fetch_type(int8_t);
2975 jmp_rel(rel);
2976 break;
2979 case 0xf1: /* int1 (icebp) */
2980 src.val = EXC_DB;
2981 goto swint;
2983 case 0xf4: /* hlt */
2984 ctxt->retire.flags.hlt = 1;
2985 break;
2987 case 0xf5: /* cmc */
2988 _regs.eflags ^= EFLG_CF;
2989 break;
2991 case 0xf8: /* clc */
2992 _regs.eflags &= ~EFLG_CF;
2993 break;
2995 case 0xf9: /* stc */
2996 _regs.eflags |= EFLG_CF;
2997 break;
2999 case 0xfa: /* cli */
3000 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3001 _regs.eflags &= ~EFLG_IF;
3002 break;
3004 case 0xfb: /* sti */
3005 generate_exception_if(!mode_iopl(), EXC_GP, 0);
3006 if ( !(_regs.eflags & EFLG_IF) )
3008 _regs.eflags |= EFLG_IF;
3009 ctxt->retire.flags.sti = 1;
3011 break;
3013 case 0xfc: /* cld */
3014 _regs.eflags &= ~EFLG_DF;
3015 break;
3017 case 0xfd: /* std */
3018 _regs.eflags |= EFLG_DF;
3019 break;
3021 goto writeback;
3023 twobyte_insn:
3024 switch ( b )
3026 case 0x40 ... 0x4f: /* cmovcc */
3027 dst.val = src.val;
3028 if ( !test_cc(b, _regs.eflags) )
3029 dst.type = OP_NONE;
3030 break;
3032 case 0x90 ... 0x9f: /* setcc */
3033 dst.val = test_cc(b, _regs.eflags);
3034 break;
3036 case 0xb0 ... 0xb1: /* cmpxchg */
3037 /* Save real source value, then compare EAX against destination. */
3038 src.orig_val = src.val;
3039 src.val = _regs.eax;
3040 emulate_2op_SrcV("cmp", src, dst, _regs.eflags);
3041 if ( _regs.eflags & EFLG_ZF )
3043 /* Success: write back to memory. */
3044 dst.val = src.orig_val;
3046 else
3048 /* Failure: write the value we saw to EAX. */
3049 dst.type = OP_REG;
3050 dst.reg = (unsigned long *)&_regs.eax;
3052 break;
3054 case 0xa3: bt: /* bt */
3055 emulate_2op_SrcV_nobyte("bt", src, dst, _regs.eflags);
3056 dst.type = OP_NONE;
3057 break;
3059 case 0xa4: /* shld imm8,r,r/m */
3060 case 0xa5: /* shld %%cl,r,r/m */
3061 case 0xac: /* shrd imm8,r,r/m */
3062 case 0xad: /* shrd %%cl,r,r/m */ {
3063 uint8_t shift, width = dst.bytes << 3;
3064 shift = (b & 1) ? (uint8_t)_regs.ecx : insn_fetch_type(uint8_t);
3065 if ( (shift &= width - 1) == 0 )
3066 break;
3067 dst.orig_val = truncate_word(dst.val, dst.bytes);
3068 dst.val = ((shift == width) ? src.val :
3069 (b & 8) ?
3070 /* shrd */
3071 ((dst.orig_val >> shift) |
3072 truncate_word(src.val << (width - shift), dst.bytes)) :
3073 /* shld */
3074 ((dst.orig_val << shift) |
3075 ((src.val >> (width - shift)) & ((1ull << shift) - 1))));
3076 dst.val = truncate_word(dst.val, dst.bytes);
3077 _regs.eflags &= ~(EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_PF|EFLG_CF);
3078 if ( (dst.val >> ((b & 8) ? (shift - 1) : (width - shift))) & 1 )
3079 _regs.eflags |= EFLG_CF;
3080 if ( ((dst.val ^ dst.orig_val) >> (width - 1)) & 1 )
3081 _regs.eflags |= EFLG_OF;
3082 _regs.eflags |= ((dst.val >> (width - 1)) & 1) ? EFLG_SF : 0;
3083 _regs.eflags |= (dst.val == 0) ? EFLG_ZF : 0;
3084 _regs.eflags |= even_parity(dst.val) ? EFLG_PF : 0;
3085 break;
3088 case 0xb3: btr: /* btr */
3089 emulate_2op_SrcV_nobyte("btr", src, dst, _regs.eflags);
3090 break;
3092 case 0xab: bts: /* bts */
3093 emulate_2op_SrcV_nobyte("bts", src, dst, _regs.eflags);
3094 break;
3096 case 0xaf: /* imul */
3097 _regs.eflags &= ~(EFLG_OF|EFLG_CF);
3098 switch ( dst.bytes )
3100 case 2:
3101 dst.val = ((uint32_t)(int16_t)src.val *
3102 (uint32_t)(int16_t)dst.val);
3103 if ( (int16_t)dst.val != (uint32_t)dst.val )
3104 _regs.eflags |= EFLG_OF|EFLG_CF;
3105 break;
3106 #ifdef __x86_64__
3107 case 4:
3108 dst.val = ((uint64_t)(int32_t)src.val *
3109 (uint64_t)(int32_t)dst.val);
3110 if ( (int32_t)dst.val != dst.val )
3111 _regs.eflags |= EFLG_OF|EFLG_CF;
3112 break;
3113 #endif
3114 default: {
3115 unsigned long m[2] = { src.val, dst.val };
3116 if ( imul_dbl(m) )
3117 _regs.eflags |= EFLG_OF|EFLG_CF;
3118 dst.val = m[0];
3119 break;
3122 break;
3124 case 0xb2: /* lss */
3125 dst.val = x86_seg_ss;
3126 goto les;
3128 case 0xb4: /* lfs */
3129 dst.val = x86_seg_fs;
3130 goto les;
3132 case 0xb5: /* lgs */
3133 dst.val = x86_seg_gs;
3134 goto les;
3136 case 0xb6: /* movzx rm8,r{16,32,64} */
3137 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3138 dst.reg = decode_register(modrm_reg, &_regs, 0);
3139 dst.bytes = op_bytes;
3140 dst.val = (uint8_t)src.val;
3141 break;
3143 case 0xbc: /* bsf */ {
3144 int zf;
3145 asm ( "bsf %2,%0; setz %b1"
3146 : "=r" (dst.val), "=q" (zf)
3147 : "r" (src.val), "1" (0) );
3148 _regs.eflags &= ~EFLG_ZF;
3149 if ( zf )
3151 _regs.eflags |= EFLG_ZF;
3152 dst.type = OP_NONE;
3154 break;
3157 case 0xbd: /* bsr */ {
3158 int zf;
3159 asm ( "bsr %2,%0; setz %b1"
3160 : "=r" (dst.val), "=q" (zf)
3161 : "r" (src.val), "1" (0) );
3162 _regs.eflags &= ~EFLG_ZF;
3163 if ( zf )
3165 _regs.eflags |= EFLG_ZF;
3166 dst.type = OP_NONE;
3168 break;
3171 case 0xb7: /* movzx rm16,r{16,32,64} */
3172 dst.val = (uint16_t)src.val;
3173 break;
3175 case 0xbb: btc: /* btc */
3176 emulate_2op_SrcV_nobyte("btc", src, dst, _regs.eflags);
3177 break;
3179 case 0xba: /* Grp8 */
3180 switch ( modrm_reg & 7 )
3182 case 4: goto bt;
3183 case 5: goto bts;
3184 case 6: goto btr;
3185 case 7: goto btc;
3186 default: generate_exception_if(1, EXC_UD, -1);
3188 break;
3190 case 0xbe: /* movsx rm8,r{16,32,64} */
3191 /* Recompute DstReg as we may have decoded AH/BH/CH/DH. */
3192 dst.reg = decode_register(modrm_reg, &_regs, 0);
3193 dst.bytes = op_bytes;
3194 dst.val = (int8_t)src.val;
3195 break;
3197 case 0xbf: /* movsx rm16,r{16,32,64} */
3198 dst.val = (int16_t)src.val;
3199 break;
3201 case 0xc0 ... 0xc1: /* xadd */
3202 /* Write back the register source. */
3203 switch ( dst.bytes )
3205 case 1: *(uint8_t *)src.reg = (uint8_t)dst.val; break;
3206 case 2: *(uint16_t *)src.reg = (uint16_t)dst.val; break;
3207 case 4: *src.reg = (uint32_t)dst.val; break; /* 64b reg: zero-extend */
3208 case 8: *src.reg = dst.val; break;
3210 goto add;
3212 goto writeback;
3214 twobyte_special_insn:
3215 switch ( b )
3217 case 0x01: /* Grp7 */ {
3218 struct segment_register reg;
3219 unsigned long base, limit, cr0, cr0w;
3221 if ( modrm == 0xdf ) /* invlpga */
3223 generate_exception_if(!in_protmode(ctxt, ops), EXC_UD, -1);
3224 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3225 fail_if(ops->invlpg == NULL);
3226 if ( (rc = ops->invlpg(x86_seg_none, truncate_ea(_regs.eax),
3227 ctxt)) )
3228 goto done;
3229 break;
3232 switch ( modrm_reg & 7 )
3234 case 0: /* sgdt */
3235 case 1: /* sidt */
3236 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3237 fail_if(ops->read_segment == NULL);
3238 if ( (rc = ops->read_segment((modrm_reg & 1) ?
3239 x86_seg_idtr : x86_seg_gdtr,
3240 &reg, ctxt)) )
3241 goto done;
3242 if ( op_bytes == 2 )
3243 reg.base &= 0xffffff;
3244 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0,
3245 reg.limit, 2, ctxt)) ||
3246 (rc = ops->write(ea.mem.seg, ea.mem.off+2,
3247 reg.base, mode_64bit() ? 8 : 4, ctxt)) )
3248 goto done;
3249 break;
3250 case 2: /* lgdt */
3251 case 3: /* lidt */
3252 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3253 fail_if(ops->write_segment == NULL);
3254 memset(&reg, 0, sizeof(reg));
3255 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0,
3256 &limit, 2, ctxt)) ||
3257 (rc = ops->read(ea.mem.seg, ea.mem.off+2,
3258 &base, mode_64bit() ? 8 : 4, ctxt)) )
3259 goto done;
3260 reg.base = base;
3261 reg.limit = limit;
3262 if ( op_bytes == 2 )
3263 reg.base &= 0xffffff;
3264 if ( (rc = ops->write_segment((modrm_reg & 1) ?
3265 x86_seg_idtr : x86_seg_gdtr,
3266 &reg, ctxt)) )
3267 goto done;
3268 break;
3269 case 4: /* smsw */
3270 if ( ea.type == OP_MEM )
3271 ea.bytes = 2;
3272 dst = ea;
3273 fail_if(ops->read_cr == NULL);
3274 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) )
3275 goto done;
3276 d |= Mov; /* force writeback */
3277 break;
3278 case 6: /* lmsw */
3279 fail_if(ops->read_cr == NULL);
3280 fail_if(ops->write_cr == NULL);
3281 if ( (rc = ops->read_cr(0, &cr0, ctxt)) )
3282 goto done;
3283 if ( ea.type == OP_REG )
3284 cr0w = *ea.reg;
3285 else if ( (rc = ops->read(ea.mem.seg, ea.mem.off,
3286 &cr0w, 2, ctxt)) )
3287 goto done;
3288 /* LMSW can: (1) set bits 0-3; (2) clear bits 1-3. */
3289 cr0 = (cr0 & ~0xe) | (cr0w & 0xf);
3290 if ( (rc = ops->write_cr(0, cr0, ctxt)) )
3291 goto done;
3292 break;
3293 case 7: /* invlpg */
3294 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3295 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3296 fail_if(ops->invlpg == NULL);
3297 if ( (rc = ops->invlpg(ea.mem.seg, ea.mem.off, ctxt)) )
3298 goto done;
3299 break;
3300 default:
3301 goto cannot_emulate;
3303 break;
3306 case 0x06: /* clts */
3307 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3308 fail_if((ops->read_cr == NULL) || (ops->write_cr == NULL));
3309 if ( (rc = ops->read_cr(0, &dst.val, ctxt)) ||
3310 (rc = ops->write_cr(0, dst.val&~8, ctxt)) )
3311 goto done;
3312 break;
3314 case 0x08: /* invd */
3315 case 0x09: /* wbinvd */
3316 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3317 fail_if(ops->wbinvd == NULL);
3318 if ( (rc = ops->wbinvd(ctxt)) != 0 )
3319 goto done;
3320 break;
3322 case 0x0d: /* GrpP (prefetch) */
3323 case 0x18: /* Grp16 (prefetch/nop) */
3324 case 0x19 ... 0x1f: /* nop (amd-defined) */
3325 break;
3327 case 0x20: /* mov cr,reg */
3328 case 0x21: /* mov dr,reg */
3329 case 0x22: /* mov reg,cr */
3330 case 0x23: /* mov reg,dr */
3331 generate_exception_if(ea.type != OP_REG, EXC_UD, -1);
3332 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3333 modrm_reg |= lock_prefix << 3;
3334 if ( b & 2 )
3336 /* Write to CR/DR. */
3337 src.val = *(unsigned long *)decode_register(modrm_rm, &_regs, 0);
3338 if ( !mode_64bit() )
3339 src.val = (uint32_t)src.val;
3340 rc = ((b & 1)
3341 ? (ops->write_dr
3342 ? ops->write_dr(modrm_reg, src.val, ctxt)
3343 : X86EMUL_UNHANDLEABLE)
3344 : (ops->write_cr
3345 ? ops->write_cr(modrm_reg, src.val, ctxt)
3346 : X86EMUL_UNHANDLEABLE));
3348 else
3350 /* Read from CR/DR. */
3351 dst.type = OP_REG;
3352 dst.bytes = mode_64bit() ? 8 : 4;
3353 dst.reg = decode_register(modrm_rm, &_regs, 0);
3354 rc = ((b & 1)
3355 ? (ops->read_dr
3356 ? ops->read_dr(modrm_reg, &dst.val, ctxt)
3357 : X86EMUL_UNHANDLEABLE)
3358 : (ops->read_cr
3359 ? ops->read_cr(modrm_reg, &dst.val, ctxt)
3360 : X86EMUL_UNHANDLEABLE));
3362 if ( rc != 0 )
3363 goto done;
3364 break;
3366 case 0x30: /* wrmsr */ {
3367 uint64_t val = ((uint64_t)_regs.edx << 32) | (uint32_t)_regs.eax;
3368 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3369 fail_if(ops->write_msr == NULL);
3370 if ( (rc = ops->write_msr((uint32_t)_regs.ecx, val, ctxt)) != 0 )
3371 goto done;
3372 break;
3375 case 0x31: /* rdtsc */ {
3376 unsigned long cr4;
3377 uint64_t val;
3378 fail_if(ops->read_cr == NULL);
3379 if ( (rc = ops->read_cr(4, &cr4, ctxt)) )
3380 goto done;
3381 generate_exception_if((cr4 & CR4_TSD) && !mode_ring0(), EXC_GP, 0);
3382 fail_if(ops->read_msr == NULL);
3383 if ( (rc = ops->read_msr(MSR_TSC, &val, ctxt)) != 0 )
3384 goto done;
3385 _regs.edx = (uint32_t)(val >> 32);
3386 _regs.eax = (uint32_t)(val >> 0);
3387 break;
3390 case 0x32: /* rdmsr */ {
3391 uint64_t val;
3392 generate_exception_if(!mode_ring0(), EXC_GP, 0);
3393 fail_if(ops->read_msr == NULL);
3394 if ( (rc = ops->read_msr((uint32_t)_regs.ecx, &val, ctxt)) != 0 )
3395 goto done;
3396 _regs.edx = (uint32_t)(val >> 32);
3397 _regs.eax = (uint32_t)(val >> 0);
3398 break;
3401 case 0x6f: /* movq mm/m64,mm */ {
3402 uint8_t stub[] = { 0x0f, 0x6f, modrm, 0xc3 };
3403 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3404 uint64_t val;
3405 if ( ea.type == OP_MEM )
3407 unsigned long lval, hval;
3408 if ( (rc = ops->read(ea.mem.seg, ea.mem.off+0, &lval, 4, ctxt)) ||
3409 (rc = ops->read(ea.mem.seg, ea.mem.off+4, &hval, 4, ctxt)) )
3410 goto done;
3411 val = ((uint64_t)hval << 32) | (uint32_t)lval;
3412 stub[2] = modrm & 0x38; /* movq (%eax),%mmN */
3414 get_fpu(X86EMUL_FPU_mmx, &fic);
3415 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3416 put_fpu(&fic);
3417 break;
3420 case 0x7f: /* movq mm,mm/m64 */ {
3421 uint8_t stub[] = { 0x0f, 0x7f, modrm, 0xc3 };
3422 struct fpu_insn_ctxt fic = { .insn_bytes = sizeof(stub)-1 };
3423 uint64_t val;
3424 if ( ea.type == OP_MEM )
3425 stub[2] = modrm & 0x38; /* movq %mmN,(%eax) */
3426 get_fpu(X86EMUL_FPU_mmx, &fic);
3427 asm volatile ( "call *%0" : : "r" (stub), "a" (&val) : "memory" );
3428 put_fpu(&fic);
3429 if ( ea.type == OP_MEM )
3431 unsigned long lval = (uint32_t)val, hval = (uint32_t)(val >> 32);
3432 if ( (rc = ops->write(ea.mem.seg, ea.mem.off+0, lval, 4, ctxt)) ||
3433 (rc = ops->write(ea.mem.seg, ea.mem.off+4, hval, 4, ctxt)) )
3434 goto done;
3436 break;
3439 case 0x80 ... 0x8f: /* jcc (near) */ {
3440 int rel = (((op_bytes == 2) && !mode_64bit())
3441 ? (int32_t)insn_fetch_type(int16_t)
3442 : insn_fetch_type(int32_t));
3443 if ( test_cc(b, _regs.eflags) )
3444 jmp_rel(rel);
3445 break;
3448 case 0xa0: /* push %%fs */
3449 src.val = x86_seg_fs;
3450 goto push_seg;
3452 case 0xa1: /* pop %%fs */
3453 src.val = x86_seg_fs;
3454 goto pop_seg;
3456 case 0xa2: /* cpuid */ {
3457 unsigned int eax = _regs.eax, ebx = _regs.ebx;
3458 unsigned int ecx = _regs.ecx, edx = _regs.edx;
3459 fail_if(ops->cpuid == NULL);
3460 if ( (rc = ops->cpuid(&eax, &ebx, &ecx, &edx, ctxt)) != 0 )
3461 goto done;
3462 _regs.eax = eax; _regs.ebx = ebx;
3463 _regs.ecx = ecx; _regs.edx = edx;
3464 break;
3467 case 0xa8: /* push %%gs */
3468 src.val = x86_seg_gs;
3469 goto push_seg;
3471 case 0xa9: /* pop %%gs */
3472 src.val = x86_seg_gs;
3473 goto pop_seg;
3475 case 0xc7: /* Grp9 (cmpxchg8b/cmpxchg16b) */ {
3476 unsigned long old[2], exp[2], new[2];
3477 unsigned int i;
3479 generate_exception_if((modrm_reg & 7) != 1, EXC_UD, -1);
3480 generate_exception_if(ea.type != OP_MEM, EXC_UD, -1);
3481 op_bytes *= 2;
3483 /* Get actual old value. */
3484 for ( i = 0; i < (op_bytes/sizeof(long)); i++ )
3485 if ( (rc = ops->read(ea.mem.seg, ea.mem.off + i*sizeof(long),
3486 &old[i], sizeof(long), ctxt)) != 0 )
3487 goto done;
3489 /* Get expected and proposed values. */
3490 if ( op_bytes == 8 )
3492 ((uint32_t *)exp)[0] = _regs.eax; ((uint32_t *)exp)[1] = _regs.edx;
3493 ((uint32_t *)new)[0] = _regs.ebx; ((uint32_t *)new)[1] = _regs.ecx;
3495 else
3497 exp[0] = _regs.eax; exp[1] = _regs.edx;
3498 new[0] = _regs.ebx; new[1] = _regs.ecx;
3501 if ( memcmp(old, exp, op_bytes) )
3503 /* Expected != actual: store actual to rDX:rAX and clear ZF. */
3504 _regs.eax = (op_bytes == 8) ? ((uint32_t *)old)[0] : old[0];
3505 _regs.edx = (op_bytes == 8) ? ((uint32_t *)old)[1] : old[1];
3506 _regs.eflags &= ~EFLG_ZF;
3508 else
3510 /* Expected == actual: attempt atomic cmpxchg and set ZF. */
3511 if ( (rc = ops->cmpxchg(ea.mem.seg, ea.mem.off, old,
3512 new, op_bytes, ctxt)) != 0 )
3513 goto done;
3514 _regs.eflags |= EFLG_ZF;
3516 break;
3519 case 0xc8 ... 0xcf: /* bswap */
3520 dst.type = OP_REG;
3521 dst.reg = decode_register(
3522 (b & 7) | ((rex_prefix & 1) << 3), &_regs, 0);
3523 switch ( dst.bytes = op_bytes )
3525 default: /* case 2: */
3526 /* Undefined behaviour. Writes zero on all tested CPUs. */
3527 dst.val = 0;
3528 break;
3529 case 4:
3530 #ifdef __x86_64__
3531 asm ( "bswap %k0" : "=r" (dst.val) : "0" (*dst.reg) );
3532 break;
3533 case 8:
3534 #endif
3535 asm ( "bswap %0" : "=r" (dst.val) : "0" (*dst.reg) );
3536 break;
3538 break;
3540 goto writeback;
3542 cannot_emulate:
3543 return X86EMUL_UNHANDLEABLE;