VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 101858

Last change on this file since 101858 was 101850, checked in by vboxsync, 15 months ago

VMM/IEM: Replaced all IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF use with IEM_MC_CLEAR_HIGH_GREG_U64 and removed the MC, as it forced argument variables to be used after IEM_MC_CALL_XXXX which made recompiling more complicated. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 490.8 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 101850 2023-11-06 10:13:31Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1126 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1127}
1128
1129
1130/**
1131 * @opcode 0x18
1132 * @opgroup og_gen_arith_bin
1133 * @opfltest cf
1134 * @opflmodify cf,pf,af,zf,sf,of
1135 */
1136FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1137{
1138 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1139 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1140 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1141}
1142
1143
1144/**
1145 * @opcode 0x19
1146 * @opgroup og_gen_arith_bin
1147 * @opfltest cf
1148 * @opflmodify cf,pf,af,zf,sf,of
1149 */
1150FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1151{
1152 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1153 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1154 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1155}
1156
1157
1158/**
1159 * @opcode 0x1a
1160 * @opgroup og_gen_arith_bin
1161 * @opfltest cf
1162 * @opflmodify cf,pf,af,zf,sf,of
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1165{
1166 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1b
1173 * @opgroup og_gen_arith_bin
1174 * @opfltest cf
1175 * @opflmodify cf,pf,af,zf,sf,of
1176 */
1177FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1178{
1179 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1180 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1181}
1182
1183
1184/**
1185 * @opcode 0x1c
1186 * @opgroup og_gen_arith_bin
1187 * @opfltest cf
1188 * @opflmodify cf,pf,af,zf,sf,of
1189 */
1190FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1191{
1192 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1193 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1194}
1195
1196
1197/**
1198 * @opcode 0x1d
1199 * @opgroup og_gen_arith_bin
1200 * @opfltest cf
1201 * @opflmodify cf,pf,af,zf,sf,of
1202 */
1203FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1204{
1205 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1206 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1207}
1208
1209
1210/**
1211 * @opcode 0x1e
1212 * @opgroup og_stack_sreg
1213 */
1214FNIEMOP_DEF(iemOp_push_DS)
1215{
1216 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1217 IEMOP_HLP_NO_64BIT();
1218 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1219}
1220
1221
1222/**
1223 * @opcode 0x1f
1224 * @opgroup og_stack_sreg
1225 */
1226FNIEMOP_DEF(iemOp_pop_DS)
1227{
1228 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1230 IEMOP_HLP_NO_64BIT();
1231 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1232}
1233
1234
1235/**
1236 * @opcode 0x20
1237 * @opgroup og_gen_arith_bin
1238 * @opflmodify cf,pf,af,zf,sf,of
1239 * @opflundef af
1240 * @opflclear of,cf
1241 */
1242FNIEMOP_DEF(iemOp_and_Eb_Gb)
1243{
1244 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1246 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1247 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1248}
1249
1250
1251/**
1252 * @opcode 0x21
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Ev_Gv)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1263 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x22
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Gb_Eb)
1275{
1276 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1279}
1280
1281
1282/**
1283 * @opcode 0x23
1284 * @opgroup og_gen_arith_bin
1285 * @opflmodify cf,pf,af,zf,sf,of
1286 * @opflundef af
1287 * @opflclear of,cf
1288 */
1289FNIEMOP_DEF(iemOp_and_Gv_Ev)
1290{
1291 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1293 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1294}
1295
1296
1297/**
1298 * @opcode 0x24
1299 * @opgroup og_gen_arith_bin
1300 * @opflmodify cf,pf,af,zf,sf,of
1301 * @opflundef af
1302 * @opflclear of,cf
1303 */
1304FNIEMOP_DEF(iemOp_and_Al_Ib)
1305{
1306 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1308 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1309}
1310
1311
1312/**
1313 * @opcode 0x25
1314 * @opgroup og_gen_arith_bin
1315 * @opflmodify cf,pf,af,zf,sf,of
1316 * @opflundef af
1317 * @opflclear of,cf
1318 */
1319FNIEMOP_DEF(iemOp_and_eAX_Iz)
1320{
1321 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1323 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1324}
1325
1326
1327/**
1328 * @opcode 0x26
1329 * @opmnemonic SEG
1330 * @op1 ES
1331 * @opgroup og_prefix
1332 * @openc prefix
1333 * @opdisenum OP_SEG
1334 * @ophints harmless
1335 */
1336FNIEMOP_DEF(iemOp_seg_ES)
1337{
1338 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1339 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1340 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1341
1342 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1343 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1344}
1345
1346
1347/**
1348 * @opcode 0x27
1349 * @opfltest af,cf
1350 * @opflmodify cf,pf,af,zf,sf,of
1351 * @opflundef of
1352 */
1353FNIEMOP_DEF(iemOp_daa)
1354{
1355 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1356 IEMOP_HLP_NO_64BIT();
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1359 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1360}
1361
1362
1363/**
1364 * @opcode 0x28
1365 * @opgroup og_gen_arith_bin
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 */
1368FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1369{
1370 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1371 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1372 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1373}
1374
1375
1376/**
1377 * @opcode 0x29
1378 * @opgroup og_gen_arith_bin
1379 * @opflmodify cf,pf,af,zf,sf,of
1380 */
1381FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1382{
1383 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1384 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1385 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1386}
1387
1388
1389/**
1390 * @opcode 0x2a
1391 * @opgroup og_gen_arith_bin
1392 * @opflmodify cf,pf,af,zf,sf,of
1393 */
1394FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1395{
1396 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1397 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1398}
1399
1400
1401/**
1402 * @opcode 0x2b
1403 * @opgroup og_gen_arith_bin
1404 * @opflmodify cf,pf,af,zf,sf,of
1405 */
1406FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1407{
1408 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1409 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1410}
1411
1412
1413/**
1414 * @opcode 0x2c
1415 * @opgroup og_gen_arith_bin
1416 * @opflmodify cf,pf,af,zf,sf,of
1417 */
1418FNIEMOP_DEF(iemOp_sub_Al_Ib)
1419{
1420 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1421 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1422}
1423
1424
1425/**
1426 * @opcode 0x2d
1427 * @opgroup og_gen_arith_bin
1428 * @opflmodify cf,pf,af,zf,sf,of
1429 */
1430FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1431{
1432 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1433 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1434}
1435
1436
1437/**
1438 * @opcode 0x2e
1439 * @opmnemonic SEG
1440 * @op1 CS
1441 * @opgroup og_prefix
1442 * @openc prefix
1443 * @opdisenum OP_SEG
1444 * @ophints harmless
1445 */
1446FNIEMOP_DEF(iemOp_seg_CS)
1447{
1448 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1449 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1450 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1451
1452 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1453 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1454}
1455
1456
1457/**
1458 * @opcode 0x2f
1459 * @opfltest af,cf
1460 * @opflmodify cf,pf,af,zf,sf,of
1461 * @opflundef of
1462 */
1463FNIEMOP_DEF(iemOp_das)
1464{
1465 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1466 IEMOP_HLP_NO_64BIT();
1467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1468 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1469 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1470}
1471
1472
1473/**
1474 * @opcode 0x30
1475 * @opgroup og_gen_arith_bin
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef af
1478 * @opflclear of,cf
1479 */
1480FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1481{
1482 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1483 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1484 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1485 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1486}
1487
1488
1489/**
1490 * @opcode 0x31
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1501 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x32
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1513{
1514 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1517}
1518
1519
1520/**
1521 * @opcode 0x33
1522 * @opgroup og_gen_arith_bin
1523 * @opflmodify cf,pf,af,zf,sf,of
1524 * @opflundef af
1525 * @opflclear of,cf
1526 */
1527FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1528{
1529 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1531 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1532}
1533
1534
1535/**
1536 * @opcode 0x34
1537 * @opgroup og_gen_arith_bin
1538 * @opflmodify cf,pf,af,zf,sf,of
1539 * @opflundef af
1540 * @opflclear of,cf
1541 */
1542FNIEMOP_DEF(iemOp_xor_Al_Ib)
1543{
1544 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1546 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1547}
1548
1549
1550/**
1551 * @opcode 0x35
1552 * @opgroup og_gen_arith_bin
1553 * @opflmodify cf,pf,af,zf,sf,of
1554 * @opflundef af
1555 * @opflclear of,cf
1556 */
1557FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1558{
1559 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1561 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1562}
1563
1564
1565/**
1566 * @opcode 0x36
1567 * @opmnemonic SEG
1568 * @op1 SS
1569 * @opgroup og_prefix
1570 * @openc prefix
1571 * @opdisenum OP_SEG
1572 * @ophints harmless
1573 */
1574FNIEMOP_DEF(iemOp_seg_SS)
1575{
1576 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1577 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1578 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1579
1580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1581 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1582}
1583
1584
1585/**
1586 * @opcode 0x37
1587 * @opfltest af,cf
1588 * @opflmodify cf,pf,af,zf,sf,of
1589 * @opflundef pf,zf,sf,of
1590 * @opgroup og_gen_arith_dec
1591 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1592 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1593 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1594 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1596 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1597 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1598 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1599 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1600 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1601 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1602 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1603 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1604 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1605 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1606 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1607 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1608 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1609 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1611 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1612 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1613 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1615 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1617 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1618 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1619 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1621 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1622 */
1623FNIEMOP_DEF(iemOp_aaa)
1624{
1625 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1626 IEMOP_HLP_NO_64BIT();
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1628 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1629
1630 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1631}
1632
1633
1634/**
1635 * @opcode 0x38
1636 */
1637FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1638{
1639 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1640 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1641 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1642}
1643
1644
1645/**
1646 * @opcode 0x39
1647 */
1648FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1649{
1650 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1651 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1652}
1653
1654
1655/**
1656 * @opcode 0x3a
1657 */
1658FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1659{
1660 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1661 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1662}
1663
1664
1665/**
1666 * @opcode 0x3b
1667 */
1668FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1669{
1670 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1671 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1672}
1673
1674
1675/**
1676 * @opcode 0x3c
1677 */
1678FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1679{
1680 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1681 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1682}
1683
1684
1685/**
1686 * @opcode 0x3d
1687 */
1688FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1689{
1690 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1691 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1692}
1693
1694
1695/**
1696 * @opcode 0x3e
1697 */
1698FNIEMOP_DEF(iemOp_seg_DS)
1699{
1700 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1701 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1702 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1703
1704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1705 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1706}
1707
1708
1709/**
1710 * @opcode 0x3f
1711 * @opfltest af,cf
1712 * @opflmodify cf,pf,af,zf,sf,of
1713 * @opflundef pf,zf,sf,of
1714 * @opgroup og_gen_arith_dec
1715 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1716 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1717 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1718 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1719 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1720 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1721 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1722 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1723 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1724 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1725 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1726 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1731 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1732 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1733 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1734 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1735 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1736 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1744 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1745 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1746 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 */
1751FNIEMOP_DEF(iemOp_aas)
1752{
1753 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1754 IEMOP_HLP_NO_64BIT();
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1757
1758 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1759}
1760
1761
1762/**
1763 * Common 'inc/dec register' helper.
1764 *
1765 * Not for 64-bit code, only for what became the rex prefixes.
1766 */
1767#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1768 switch (pVCpu->iem.s.enmEffOpSize) \
1769 { \
1770 case IEMMODE_16BIT: \
1771 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1773 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1774 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1775 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1776 IEM_MC_REF_EFLAGS(pEFlags); \
1777 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1779 IEM_MC_END(); \
1780 break; \
1781 \
1782 case IEMMODE_32BIT: \
1783 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1785 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1786 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1787 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1788 IEM_MC_REF_EFLAGS(pEFlags); \
1789 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1790 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1791 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1792 IEM_MC_END(); \
1793 break; \
1794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1795 } \
1796 (void)0
1797
1798/**
1799 * @opcode 0x40
1800 */
1801FNIEMOP_DEF(iemOp_inc_eAX)
1802{
1803 /*
1804 * This is a REX prefix in 64-bit mode.
1805 */
1806 if (IEM_IS_64BIT_CODE(pVCpu))
1807 {
1808 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1809 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1810
1811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1813 }
1814
1815 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1816 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1817}
1818
1819
1820/**
1821 * @opcode 0x41
1822 */
1823FNIEMOP_DEF(iemOp_inc_eCX)
1824{
1825 /*
1826 * This is a REX prefix in 64-bit mode.
1827 */
1828 if (IEM_IS_64BIT_CODE(pVCpu))
1829 {
1830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1832 pVCpu->iem.s.uRexB = 1 << 3;
1833
1834 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1835 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1836 }
1837
1838 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1839 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1840}
1841
1842
1843/**
1844 * @opcode 0x42
1845 */
1846FNIEMOP_DEF(iemOp_inc_eDX)
1847{
1848 /*
1849 * This is a REX prefix in 64-bit mode.
1850 */
1851 if (IEM_IS_64BIT_CODE(pVCpu))
1852 {
1853 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1854 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1855 pVCpu->iem.s.uRexIndex = 1 << 3;
1856
1857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1859 }
1860
1861 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1862 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1863}
1864
1865
1866
1867/**
1868 * @opcode 0x43
1869 */
1870FNIEMOP_DEF(iemOp_inc_eBX)
1871{
1872 /*
1873 * This is a REX prefix in 64-bit mode.
1874 */
1875 if (IEM_IS_64BIT_CODE(pVCpu))
1876 {
1877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1879 pVCpu->iem.s.uRexB = 1 << 3;
1880 pVCpu->iem.s.uRexIndex = 1 << 3;
1881
1882 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1883 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1884 }
1885
1886 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1887 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1888}
1889
1890
1891/**
1892 * @opcode 0x44
1893 */
1894FNIEMOP_DEF(iemOp_inc_eSP)
1895{
1896 /*
1897 * This is a REX prefix in 64-bit mode.
1898 */
1899 if (IEM_IS_64BIT_CODE(pVCpu))
1900 {
1901 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1902 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1903 pVCpu->iem.s.uRexReg = 1 << 3;
1904
1905 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1906 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1907 }
1908
1909 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1910 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1911}
1912
1913
1914/**
1915 * @opcode 0x45
1916 */
1917FNIEMOP_DEF(iemOp_inc_eBP)
1918{
1919 /*
1920 * This is a REX prefix in 64-bit mode.
1921 */
1922 if (IEM_IS_64BIT_CODE(pVCpu))
1923 {
1924 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1925 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1926 pVCpu->iem.s.uRexReg = 1 << 3;
1927 pVCpu->iem.s.uRexB = 1 << 3;
1928
1929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1930 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1931 }
1932
1933 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1934 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1935}
1936
1937
1938/**
1939 * @opcode 0x46
1940 */
1941FNIEMOP_DEF(iemOp_inc_eSI)
1942{
1943 /*
1944 * This is a REX prefix in 64-bit mode.
1945 */
1946 if (IEM_IS_64BIT_CODE(pVCpu))
1947 {
1948 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1949 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1950 pVCpu->iem.s.uRexReg = 1 << 3;
1951 pVCpu->iem.s.uRexIndex = 1 << 3;
1952
1953 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1954 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1955 }
1956
1957 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1958 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1959}
1960
1961
1962/**
1963 * @opcode 0x47
1964 */
1965FNIEMOP_DEF(iemOp_inc_eDI)
1966{
1967 /*
1968 * This is a REX prefix in 64-bit mode.
1969 */
1970 if (IEM_IS_64BIT_CODE(pVCpu))
1971 {
1972 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1974 pVCpu->iem.s.uRexReg = 1 << 3;
1975 pVCpu->iem.s.uRexB = 1 << 3;
1976 pVCpu->iem.s.uRexIndex = 1 << 3;
1977
1978 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1979 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1980 }
1981
1982 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1983 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1984}
1985
1986
1987/**
1988 * @opcode 0x48
1989 */
1990FNIEMOP_DEF(iemOp_dec_eAX)
1991{
1992 /*
1993 * This is a REX prefix in 64-bit mode.
1994 */
1995 if (IEM_IS_64BIT_CODE(pVCpu))
1996 {
1997 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1998 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1999 iemRecalEffOpSize(pVCpu);
2000
2001 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2002 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2003 }
2004
2005 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2006 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2007}
2008
2009
2010/**
2011 * @opcode 0x49
2012 */
2013FNIEMOP_DEF(iemOp_dec_eCX)
2014{
2015 /*
2016 * This is a REX prefix in 64-bit mode.
2017 */
2018 if (IEM_IS_64BIT_CODE(pVCpu))
2019 {
2020 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2021 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2022 pVCpu->iem.s.uRexB = 1 << 3;
2023 iemRecalEffOpSize(pVCpu);
2024
2025 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2026 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2027 }
2028
2029 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2030 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2031}
2032
2033
2034/**
2035 * @opcode 0x4a
2036 */
2037FNIEMOP_DEF(iemOp_dec_eDX)
2038{
2039 /*
2040 * This is a REX prefix in 64-bit mode.
2041 */
2042 if (IEM_IS_64BIT_CODE(pVCpu))
2043 {
2044 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2045 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2046 pVCpu->iem.s.uRexIndex = 1 << 3;
2047 iemRecalEffOpSize(pVCpu);
2048
2049 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2050 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2051 }
2052
2053 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2054 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2055}
2056
2057
2058/**
2059 * @opcode 0x4b
2060 */
2061FNIEMOP_DEF(iemOp_dec_eBX)
2062{
2063 /*
2064 * This is a REX prefix in 64-bit mode.
2065 */
2066 if (IEM_IS_64BIT_CODE(pVCpu))
2067 {
2068 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2069 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2070 pVCpu->iem.s.uRexB = 1 << 3;
2071 pVCpu->iem.s.uRexIndex = 1 << 3;
2072 iemRecalEffOpSize(pVCpu);
2073
2074 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2075 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2076 }
2077
2078 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2079 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2080}
2081
2082
2083/**
2084 * @opcode 0x4c
2085 */
2086FNIEMOP_DEF(iemOp_dec_eSP)
2087{
2088 /*
2089 * This is a REX prefix in 64-bit mode.
2090 */
2091 if (IEM_IS_64BIT_CODE(pVCpu))
2092 {
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2094 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2095 pVCpu->iem.s.uRexReg = 1 << 3;
2096 iemRecalEffOpSize(pVCpu);
2097
2098 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2099 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2100 }
2101
2102 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2103 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2104}
2105
2106
2107/**
2108 * @opcode 0x4d
2109 */
2110FNIEMOP_DEF(iemOp_dec_eBP)
2111{
2112 /*
2113 * This is a REX prefix in 64-bit mode.
2114 */
2115 if (IEM_IS_64BIT_CODE(pVCpu))
2116 {
2117 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2118 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2119 pVCpu->iem.s.uRexReg = 1 << 3;
2120 pVCpu->iem.s.uRexB = 1 << 3;
2121 iemRecalEffOpSize(pVCpu);
2122
2123 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2124 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2125 }
2126
2127 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2128 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2129}
2130
2131
2132/**
2133 * @opcode 0x4e
2134 */
2135FNIEMOP_DEF(iemOp_dec_eSI)
2136{
2137 /*
2138 * This is a REX prefix in 64-bit mode.
2139 */
2140 if (IEM_IS_64BIT_CODE(pVCpu))
2141 {
2142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2144 pVCpu->iem.s.uRexReg = 1 << 3;
2145 pVCpu->iem.s.uRexIndex = 1 << 3;
2146 iemRecalEffOpSize(pVCpu);
2147
2148 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2149 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2150 }
2151
2152 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2153 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2154}
2155
2156
2157/**
2158 * @opcode 0x4f
2159 */
2160FNIEMOP_DEF(iemOp_dec_eDI)
2161{
2162 /*
2163 * This is a REX prefix in 64-bit mode.
2164 */
2165 if (IEM_IS_64BIT_CODE(pVCpu))
2166 {
2167 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2168 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2169 pVCpu->iem.s.uRexReg = 1 << 3;
2170 pVCpu->iem.s.uRexB = 1 << 3;
2171 pVCpu->iem.s.uRexIndex = 1 << 3;
2172 iemRecalEffOpSize(pVCpu);
2173
2174 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2175 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2176 }
2177
2178 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2179 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2180}
2181
2182
2183/**
2184 * Common 'push register' helper.
2185 */
2186FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2187{
2188 if (IEM_IS_64BIT_CODE(pVCpu))
2189 {
2190 iReg |= pVCpu->iem.s.uRexB;
2191 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2192 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2193 }
2194
2195 switch (pVCpu->iem.s.enmEffOpSize)
2196 {
2197 case IEMMODE_16BIT:
2198 IEM_MC_BEGIN(0, 1, 0, 0);
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 IEM_MC_LOCAL(uint16_t, u16Value);
2201 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2202 IEM_MC_PUSH_U16(u16Value);
2203 IEM_MC_ADVANCE_RIP_AND_FINISH();
2204 IEM_MC_END();
2205 break;
2206
2207 case IEMMODE_32BIT:
2208 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2210 IEM_MC_LOCAL(uint32_t, u32Value);
2211 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2212 IEM_MC_PUSH_U32(u32Value);
2213 IEM_MC_ADVANCE_RIP_AND_FINISH();
2214 IEM_MC_END();
2215 break;
2216
2217 case IEMMODE_64BIT:
2218 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220 IEM_MC_LOCAL(uint64_t, u64Value);
2221 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2222 IEM_MC_PUSH_U64(u64Value);
2223 IEM_MC_ADVANCE_RIP_AND_FINISH();
2224 IEM_MC_END();
2225 break;
2226
2227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2228 }
2229}
2230
2231
2232/**
2233 * @opcode 0x50
2234 */
2235FNIEMOP_DEF(iemOp_push_eAX)
2236{
2237 IEMOP_MNEMONIC(push_rAX, "push rAX");
2238 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2239}
2240
2241
2242/**
2243 * @opcode 0x51
2244 */
2245FNIEMOP_DEF(iemOp_push_eCX)
2246{
2247 IEMOP_MNEMONIC(push_rCX, "push rCX");
2248 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2249}
2250
2251
2252/**
2253 * @opcode 0x52
2254 */
2255FNIEMOP_DEF(iemOp_push_eDX)
2256{
2257 IEMOP_MNEMONIC(push_rDX, "push rDX");
2258 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2259}
2260
2261
2262/**
2263 * @opcode 0x53
2264 */
2265FNIEMOP_DEF(iemOp_push_eBX)
2266{
2267 IEMOP_MNEMONIC(push_rBX, "push rBX");
2268 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2269}
2270
2271
2272/**
2273 * @opcode 0x54
2274 */
2275FNIEMOP_DEF(iemOp_push_eSP)
2276{
2277 IEMOP_MNEMONIC(push_rSP, "push rSP");
2278 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2279 {
2280 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_LOCAL(uint16_t, u16Value);
2283 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2284 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2285 IEM_MC_PUSH_U16(u16Value);
2286 IEM_MC_ADVANCE_RIP_AND_FINISH();
2287 IEM_MC_END();
2288 }
2289 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2290}
2291
2292
2293/**
2294 * @opcode 0x55
2295 */
2296FNIEMOP_DEF(iemOp_push_eBP)
2297{
2298 IEMOP_MNEMONIC(push_rBP, "push rBP");
2299 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2300}
2301
2302
2303/**
2304 * @opcode 0x56
2305 */
2306FNIEMOP_DEF(iemOp_push_eSI)
2307{
2308 IEMOP_MNEMONIC(push_rSI, "push rSI");
2309 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2310}
2311
2312
2313/**
2314 * @opcode 0x57
2315 */
2316FNIEMOP_DEF(iemOp_push_eDI)
2317{
2318 IEMOP_MNEMONIC(push_rDI, "push rDI");
2319 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2320}
2321
2322
2323/**
2324 * Common 'pop register' helper.
2325 */
2326FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2327{
2328 if (IEM_IS_64BIT_CODE(pVCpu))
2329 {
2330 iReg |= pVCpu->iem.s.uRexB;
2331 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2332 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2333 }
2334
2335 switch (pVCpu->iem.s.enmEffOpSize)
2336 {
2337 case IEMMODE_16BIT:
2338 IEM_MC_BEGIN(0, 1, 0, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2341 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2342 IEM_MC_POP_U16(pu16Dst);
2343 IEM_MC_ADVANCE_RIP_AND_FINISH();
2344 IEM_MC_END();
2345 break;
2346
2347 case IEMMODE_32BIT:
2348 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2351 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2352 IEM_MC_POP_U32(pu32Dst);
2353 IEM_MC_CLEAR_HIGH_GREG_U64(iReg); /** @todo testcase*/
2354 IEM_MC_ADVANCE_RIP_AND_FINISH();
2355 IEM_MC_END();
2356 break;
2357
2358 case IEMMODE_64BIT:
2359 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2362 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2363 IEM_MC_POP_U64(pu64Dst);
2364 IEM_MC_ADVANCE_RIP_AND_FINISH();
2365 IEM_MC_END();
2366 break;
2367
2368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2369 }
2370}
2371
2372
2373/**
2374 * @opcode 0x58
2375 */
2376FNIEMOP_DEF(iemOp_pop_eAX)
2377{
2378 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2379 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2380}
2381
2382
2383/**
2384 * @opcode 0x59
2385 */
2386FNIEMOP_DEF(iemOp_pop_eCX)
2387{
2388 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2389 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2390}
2391
2392
2393/**
2394 * @opcode 0x5a
2395 */
2396FNIEMOP_DEF(iemOp_pop_eDX)
2397{
2398 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2399 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2400}
2401
2402
2403/**
2404 * @opcode 0x5b
2405 */
2406FNIEMOP_DEF(iemOp_pop_eBX)
2407{
2408 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2409 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2410}
2411
2412
2413/**
2414 * @opcode 0x5c
2415 */
2416FNIEMOP_DEF(iemOp_pop_eSP)
2417{
2418 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2419 if (IEM_IS_64BIT_CODE(pVCpu))
2420 {
2421 if (pVCpu->iem.s.uRexB)
2422 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2423 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2424 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2425 }
2426
2427 /** @todo add testcase for this instruction. */
2428 switch (pVCpu->iem.s.enmEffOpSize)
2429 {
2430 case IEMMODE_16BIT:
2431 IEM_MC_BEGIN(0, 1, 0, 0);
2432 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2433 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2434 IEM_MC_LOCAL(uint16_t, u16Dst);
2435 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2436 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 break;
2440
2441 case IEMMODE_32BIT:
2442 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
2443 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2444 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2445 IEM_MC_LOCAL(uint32_t, u32Dst);
2446 IEM_MC_POP_U32(&u32Dst);
2447 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2448 IEM_MC_ADVANCE_RIP_AND_FINISH();
2449 IEM_MC_END();
2450 break;
2451
2452 case IEMMODE_64BIT:
2453 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2454 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2455 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2456 IEM_MC_LOCAL(uint64_t, u64Dst);
2457 IEM_MC_POP_U64(&u64Dst);
2458 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2459 IEM_MC_ADVANCE_RIP_AND_FINISH();
2460 IEM_MC_END();
2461 break;
2462
2463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2464 }
2465}
2466
2467
2468/**
2469 * @opcode 0x5d
2470 */
2471FNIEMOP_DEF(iemOp_pop_eBP)
2472{
2473 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2474 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2475}
2476
2477
2478/**
2479 * @opcode 0x5e
2480 */
2481FNIEMOP_DEF(iemOp_pop_eSI)
2482{
2483 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2484 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2485}
2486
2487
2488/**
2489 * @opcode 0x5f
2490 */
2491FNIEMOP_DEF(iemOp_pop_eDI)
2492{
2493 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2494 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2495}
2496
2497
2498/**
2499 * @opcode 0x60
2500 */
2501FNIEMOP_DEF(iemOp_pusha)
2502{
2503 IEMOP_MNEMONIC(pusha, "pusha");
2504 IEMOP_HLP_MIN_186();
2505 IEMOP_HLP_NO_64BIT();
2506 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2507 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2508 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2509 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2510}
2511
2512
2513/**
2514 * @opcode 0x61
2515 */
2516FNIEMOP_DEF(iemOp_popa__mvex)
2517{
2518 if (!IEM_IS_64BIT_CODE(pVCpu))
2519 {
2520 IEMOP_MNEMONIC(popa, "popa");
2521 IEMOP_HLP_MIN_186();
2522 IEMOP_HLP_NO_64BIT();
2523 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2524 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2525 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2526 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2527 }
2528 IEMOP_MNEMONIC(mvex, "mvex");
2529 Log(("mvex prefix is not supported!\n"));
2530 IEMOP_RAISE_INVALID_OPCODE_RET();
2531}
2532
2533
2534/**
2535 * @opcode 0x62
2536 * @opmnemonic bound
2537 * @op1 Gv_RO
2538 * @op2 Ma
2539 * @opmincpu 80186
2540 * @ophints harmless x86_invalid_64
2541 * @optest op1=0 op2=0 ->
2542 * @optest op1=1 op2=0 -> value.xcpt=5
2543 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2544 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2545 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2546 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2547 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2548 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2549 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2550 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2551 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2555 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2564 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2565 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2567 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2568 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2569 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2570 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2571 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2572 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2573 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2577 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2584 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2585 */
2586FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2587{
2588 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2589 compatability mode it is invalid with MOD=3.
2590
2591 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2592 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2593 given as R and X without an exact description, so we assume it builds on
2594 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2595 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2596 uint8_t bRm;
2597 if (!IEM_IS_64BIT_CODE(pVCpu))
2598 {
2599 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2600 IEMOP_HLP_MIN_186();
2601 IEM_OPCODE_GET_NEXT_U8(&bRm);
2602 if (IEM_IS_MODRM_MEM_MODE(bRm))
2603 {
2604 /** @todo testcase: check that there are two memory accesses involved. Check
2605 * whether they're both read before the \#BR triggers. */
2606 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2607 {
2608 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2609 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2610 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2611 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2613
2614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616
2617 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2618 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2619 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2620
2621 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2622 IEM_MC_END();
2623 }
2624 else /* 32-bit operands */
2625 {
2626 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2627 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2628 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2629 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2631
2632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2634
2635 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2637 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2638
2639 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2640 IEM_MC_END();
2641 }
2642 }
2643
2644 /*
2645 * @opdone
2646 */
2647 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2648 {
2649 /* Note that there is no need for the CPU to fetch further bytes
2650 here because MODRM.MOD == 3. */
2651 Log(("evex not supported by the guest CPU!\n"));
2652 IEMOP_RAISE_INVALID_OPCODE_RET();
2653 }
2654 }
2655 else
2656 {
2657 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2658 * does modr/m read, whereas AMD probably doesn't... */
2659 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2660 {
2661 Log(("evex not supported by the guest CPU!\n"));
2662 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2663 }
2664 IEM_OPCODE_GET_NEXT_U8(&bRm);
2665 }
2666
2667 IEMOP_MNEMONIC(evex, "evex");
2668 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2669 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2670 Log(("evex prefix is not implemented!\n"));
2671 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2672}
2673
2674
2675/** Opcode 0x63 - non-64-bit modes. */
2676FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2677{
2678 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2679 IEMOP_HLP_MIN_286();
2680 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2682
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /* Register */
2686 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2687 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2688 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2689 IEM_MC_ARG(uint16_t, u16Src, 1);
2690 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2691
2692 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2693 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2694 IEM_MC_REF_EFLAGS(pEFlags);
2695 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2696
2697 IEM_MC_ADVANCE_RIP_AND_FINISH();
2698 IEM_MC_END();
2699 }
2700 else
2701 {
2702 /* Memory */
2703 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2704 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2705 IEM_MC_ARG(uint16_t, u16Src, 1);
2706 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2708 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2711 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2712 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2713 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2714 IEM_MC_FETCH_EFLAGS(EFlags);
2715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2716
2717 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2718 IEM_MC_COMMIT_EFLAGS(EFlags);
2719 IEM_MC_ADVANCE_RIP_AND_FINISH();
2720 IEM_MC_END();
2721 }
2722}
2723
2724
2725/**
2726 * @opcode 0x63
2727 *
2728 * @note This is a weird one. It works like a regular move instruction if
2729 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2730 * @todo This definitely needs a testcase to verify the odd cases. */
2731FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2732{
2733 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2734
2735 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2737
2738 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2739 {
2740 if (IEM_IS_MODRM_REG_MODE(bRm))
2741 {
2742 /*
2743 * Register to register.
2744 */
2745 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_LOCAL(uint64_t, u64Value);
2748 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2749 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2750 IEM_MC_ADVANCE_RIP_AND_FINISH();
2751 IEM_MC_END();
2752 }
2753 else
2754 {
2755 /*
2756 * We're loading a register from memory.
2757 */
2758 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2759 IEM_MC_LOCAL(uint64_t, u64Value);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2764 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2765 IEM_MC_ADVANCE_RIP_AND_FINISH();
2766 IEM_MC_END();
2767 }
2768 }
2769 else
2770 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2771}
2772
2773
2774/**
2775 * @opcode 0x64
2776 * @opmnemonic segfs
2777 * @opmincpu 80386
2778 * @opgroup og_prefixes
2779 */
2780FNIEMOP_DEF(iemOp_seg_FS)
2781{
2782 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2783 IEMOP_HLP_MIN_386();
2784
2785 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2786 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2787
2788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2790}
2791
2792
2793/**
2794 * @opcode 0x65
2795 * @opmnemonic seggs
2796 * @opmincpu 80386
2797 * @opgroup og_prefixes
2798 */
2799FNIEMOP_DEF(iemOp_seg_GS)
2800{
2801 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2802 IEMOP_HLP_MIN_386();
2803
2804 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2805 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2806
2807 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2808 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2809}
2810
2811
2812/**
2813 * @opcode 0x66
2814 * @opmnemonic opsize
2815 * @openc prefix
2816 * @opmincpu 80386
2817 * @ophints harmless
2818 * @opgroup og_prefixes
2819 */
2820FNIEMOP_DEF(iemOp_op_size)
2821{
2822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2823 IEMOP_HLP_MIN_386();
2824
2825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2826 iemRecalEffOpSize(pVCpu);
2827
2828 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2829 when REPZ or REPNZ are present. */
2830 if (pVCpu->iem.s.idxPrefix == 0)
2831 pVCpu->iem.s.idxPrefix = 1;
2832
2833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2835}
2836
2837
2838/**
2839 * @opcode 0x67
2840 * @opmnemonic addrsize
2841 * @openc prefix
2842 * @opmincpu 80386
2843 * @ophints harmless
2844 * @opgroup og_prefixes
2845 */
2846FNIEMOP_DEF(iemOp_addr_size)
2847{
2848 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2849 IEMOP_HLP_MIN_386();
2850
2851 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2852 switch (pVCpu->iem.s.enmDefAddrMode)
2853 {
2854 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2855 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2856 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2857 default: AssertFailed();
2858 }
2859
2860 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2861 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2862}
2863
2864
2865/**
2866 * @opcode 0x68
2867 */
2868FNIEMOP_DEF(iemOp_push_Iz)
2869{
2870 IEMOP_MNEMONIC(push_Iz, "push Iz");
2871 IEMOP_HLP_MIN_186();
2872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2873 switch (pVCpu->iem.s.enmEffOpSize)
2874 {
2875 case IEMMODE_16BIT:
2876 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2877 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883
2884 case IEMMODE_32BIT:
2885 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2886 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2888 IEM_MC_PUSH_U32(u32Imm);
2889 IEM_MC_ADVANCE_RIP_AND_FINISH();
2890 IEM_MC_END();
2891 break;
2892
2893 case IEMMODE_64BIT:
2894 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2895 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2897 IEM_MC_PUSH_U64(u64Imm);
2898 IEM_MC_ADVANCE_RIP_AND_FINISH();
2899 IEM_MC_END();
2900 break;
2901
2902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2903 }
2904}
2905
2906
2907/**
2908 * @opcode 0x69
2909 */
2910FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2911{
2912 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2913 IEMOP_HLP_MIN_186();
2914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2916
2917 switch (pVCpu->iem.s.enmEffOpSize)
2918 {
2919 case IEMMODE_16BIT:
2920 {
2921 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2922 if (IEM_IS_MODRM_REG_MODE(bRm))
2923 {
2924 /* register operand */
2925 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2926 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2928 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2929 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2930 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2931 IEM_MC_LOCAL(uint16_t, u16Tmp);
2932
2933 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2934 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2935 IEM_MC_REF_EFLAGS(pEFlags);
2936 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2937 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2938
2939 IEM_MC_ADVANCE_RIP_AND_FINISH();
2940 IEM_MC_END();
2941 }
2942 else
2943 {
2944 /* memory operand */
2945 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2946 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2947 IEM_MC_ARG(uint16_t, u16Src, 1);
2948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2949 IEM_MC_LOCAL(uint16_t, u16Tmp);
2950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2951
2952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2953 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2954 IEM_MC_ASSIGN(u16Src, u16Imm);
2955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2956 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2957 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2958 IEM_MC_REF_EFLAGS(pEFlags);
2959 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2960 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2961
2962 IEM_MC_ADVANCE_RIP_AND_FINISH();
2963 IEM_MC_END();
2964 }
2965 break;
2966 }
2967
2968 case IEMMODE_32BIT:
2969 {
2970 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2971 if (IEM_IS_MODRM_REG_MODE(bRm))
2972 {
2973 /* register operand */
2974 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2975 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2977 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2978 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2979 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2980 IEM_MC_LOCAL(uint32_t, u32Tmp);
2981
2982 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2983 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2984 IEM_MC_REF_EFLAGS(pEFlags);
2985 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2986 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991 else
2992 {
2993 /* memory operand */
2994 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2995 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2996 IEM_MC_ARG(uint32_t, u32Src, 1);
2997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2998 IEM_MC_LOCAL(uint32_t, u32Tmp);
2999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3000
3001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3002 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3003 IEM_MC_ASSIGN(u32Src, u32Imm);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3006 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3007 IEM_MC_REF_EFLAGS(pEFlags);
3008 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3009 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3010
3011 IEM_MC_ADVANCE_RIP_AND_FINISH();
3012 IEM_MC_END();
3013 }
3014 break;
3015 }
3016
3017 case IEMMODE_64BIT:
3018 {
3019 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3020 if (IEM_IS_MODRM_REG_MODE(bRm))
3021 {
3022 /* register operand */
3023 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3024 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3027 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3029 IEM_MC_LOCAL(uint64_t, u64Tmp);
3030
3031 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3032 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3033 IEM_MC_REF_EFLAGS(pEFlags);
3034 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3035 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3036
3037 IEM_MC_ADVANCE_RIP_AND_FINISH();
3038 IEM_MC_END();
3039 }
3040 else
3041 {
3042 /* memory operand */
3043 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3045 IEM_MC_ARG(uint64_t, u64Src, 1);
3046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3047 IEM_MC_LOCAL(uint64_t, u64Tmp);
3048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3049
3050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3051 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3052 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
3053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3054 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3055 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3056 IEM_MC_REF_EFLAGS(pEFlags);
3057 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3058 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3059
3060 IEM_MC_ADVANCE_RIP_AND_FINISH();
3061 IEM_MC_END();
3062 }
3063 break;
3064 }
3065
3066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3067 }
3068}
3069
3070
3071/**
3072 * @opcode 0x6a
3073 */
3074FNIEMOP_DEF(iemOp_push_Ib)
3075{
3076 IEMOP_MNEMONIC(push_Ib, "push Ib");
3077 IEMOP_HLP_MIN_186();
3078 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3080
3081 switch (pVCpu->iem.s.enmEffOpSize)
3082 {
3083 case IEMMODE_16BIT:
3084 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3086 IEM_MC_PUSH_U16(i8Imm);
3087 IEM_MC_ADVANCE_RIP_AND_FINISH();
3088 IEM_MC_END();
3089 break;
3090 case IEMMODE_32BIT:
3091 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3093 IEM_MC_PUSH_U32(i8Imm);
3094 IEM_MC_ADVANCE_RIP_AND_FINISH();
3095 IEM_MC_END();
3096 break;
3097 case IEMMODE_64BIT:
3098 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
3099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3100 IEM_MC_PUSH_U64(i8Imm);
3101 IEM_MC_ADVANCE_RIP_AND_FINISH();
3102 IEM_MC_END();
3103 break;
3104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3105 }
3106}
3107
3108
3109/**
3110 * @opcode 0x6b
3111 */
3112FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3113{
3114 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3115 IEMOP_HLP_MIN_186();
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3118
3119 switch (pVCpu->iem.s.enmEffOpSize)
3120 {
3121 case IEMMODE_16BIT:
3122 {
3123 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3124 if (IEM_IS_MODRM_REG_MODE(bRm))
3125 {
3126 /* register operand */
3127 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3128 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3131 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_LOCAL(uint16_t, u16Tmp);
3134
3135 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3136 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3137 IEM_MC_REF_EFLAGS(pEFlags);
3138 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3139 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /* memory operand */
3147 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3148 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3149 IEM_MC_ARG(uint16_t, u16Src, 1);
3150 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3151 IEM_MC_LOCAL(uint16_t, u16Tmp);
3152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3153
3154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3155 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3156 IEM_MC_ASSIGN(u16Src, u16Imm);
3157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3158 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3159 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3160 IEM_MC_REF_EFLAGS(pEFlags);
3161 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3162 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3163
3164 IEM_MC_ADVANCE_RIP_AND_FINISH();
3165 IEM_MC_END();
3166 }
3167 break;
3168 }
3169
3170 case IEMMODE_32BIT:
3171 {
3172 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3173 if (IEM_IS_MODRM_REG_MODE(bRm))
3174 {
3175 /* register operand */
3176 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3177 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3179 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3180 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3182 IEM_MC_LOCAL(uint32_t, u32Tmp);
3183
3184 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3185 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3186 IEM_MC_REF_EFLAGS(pEFlags);
3187 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3188 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3189
3190 IEM_MC_ADVANCE_RIP_AND_FINISH();
3191 IEM_MC_END();
3192 }
3193 else
3194 {
3195 /* memory operand */
3196 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3198 IEM_MC_ARG(uint32_t, u32Src, 1);
3199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3200 IEM_MC_LOCAL(uint32_t, u32Tmp);
3201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3202
3203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3204 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3205 IEM_MC_ASSIGN(u32Src, u32Imm);
3206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3207 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3208 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3209 IEM_MC_REF_EFLAGS(pEFlags);
3210 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3211 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3212
3213 IEM_MC_ADVANCE_RIP_AND_FINISH();
3214 IEM_MC_END();
3215 }
3216 break;
3217 }
3218
3219 case IEMMODE_64BIT:
3220 {
3221 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3222 if (IEM_IS_MODRM_REG_MODE(bRm))
3223 {
3224 /* register operand */
3225 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3226 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3229 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3230 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3231 IEM_MC_LOCAL(uint64_t, u64Tmp);
3232
3233 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3234 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3235 IEM_MC_REF_EFLAGS(pEFlags);
3236 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3237 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3238
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242 else
3243 {
3244 /* memory operand */
3245 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3246 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3247 IEM_MC_ARG(uint64_t, u64Src, 1);
3248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3249 IEM_MC_LOCAL(uint64_t, u64Tmp);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3251
3252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3253 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3254 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3256 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3257 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3258 IEM_MC_REF_EFLAGS(pEFlags);
3259 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3260 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3261
3262 IEM_MC_ADVANCE_RIP_AND_FINISH();
3263 IEM_MC_END();
3264 }
3265 break;
3266 }
3267
3268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3269 }
3270}
3271
3272
3273/**
3274 * @opcode 0x6c
3275 */
3276FNIEMOP_DEF(iemOp_insb_Yb_DX)
3277{
3278 IEMOP_HLP_MIN_186();
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3281 {
3282 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3283 switch (pVCpu->iem.s.enmEffAddrMode)
3284 {
3285 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3286 iemCImpl_rep_ins_op8_addr16, false);
3287 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3288 iemCImpl_rep_ins_op8_addr32, false);
3289 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3290 iemCImpl_rep_ins_op8_addr64, false);
3291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3292 }
3293 }
3294 else
3295 {
3296 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3297 switch (pVCpu->iem.s.enmEffAddrMode)
3298 {
3299 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3300 iemCImpl_ins_op8_addr16, false);
3301 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3302 iemCImpl_ins_op8_addr32, false);
3303 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3304 iemCImpl_ins_op8_addr64, false);
3305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3306 }
3307 }
3308}
3309
3310
3311/**
3312 * @opcode 0x6d
3313 */
3314FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3315{
3316 IEMOP_HLP_MIN_186();
3317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3318 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3319 {
3320 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3321 switch (pVCpu->iem.s.enmEffOpSize)
3322 {
3323 case IEMMODE_16BIT:
3324 switch (pVCpu->iem.s.enmEffAddrMode)
3325 {
3326 case IEMMODE_16BIT:
3327 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3328 iemCImpl_rep_ins_op16_addr16, false);
3329 case IEMMODE_32BIT:
3330 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3331 iemCImpl_rep_ins_op16_addr32, false);
3332 case IEMMODE_64BIT:
3333 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3334 iemCImpl_rep_ins_op16_addr64, false);
3335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3336 }
3337 break;
3338 case IEMMODE_64BIT:
3339 case IEMMODE_32BIT:
3340 switch (pVCpu->iem.s.enmEffAddrMode)
3341 {
3342 case IEMMODE_16BIT:
3343 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3344 iemCImpl_rep_ins_op32_addr16, false);
3345 case IEMMODE_32BIT:
3346 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3347 iemCImpl_rep_ins_op32_addr32, false);
3348 case IEMMODE_64BIT:
3349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3350 iemCImpl_rep_ins_op32_addr64, false);
3351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3352 }
3353 break;
3354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3355 }
3356 }
3357 else
3358 {
3359 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3360 switch (pVCpu->iem.s.enmEffOpSize)
3361 {
3362 case IEMMODE_16BIT:
3363 switch (pVCpu->iem.s.enmEffAddrMode)
3364 {
3365 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3366 iemCImpl_ins_op16_addr16, false);
3367 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3368 iemCImpl_ins_op16_addr32, false);
3369 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3370 iemCImpl_ins_op16_addr64, false);
3371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3372 }
3373 break;
3374 case IEMMODE_64BIT:
3375 case IEMMODE_32BIT:
3376 switch (pVCpu->iem.s.enmEffAddrMode)
3377 {
3378 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3379 iemCImpl_ins_op32_addr16, false);
3380 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3381 iemCImpl_ins_op32_addr32, false);
3382 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3383 iemCImpl_ins_op32_addr64, false);
3384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3385 }
3386 break;
3387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3388 }
3389 }
3390}
3391
3392
3393/**
3394 * @opcode 0x6e
3395 */
3396FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3397{
3398 IEMOP_HLP_MIN_186();
3399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3400 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3401 {
3402 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3403 switch (pVCpu->iem.s.enmEffAddrMode)
3404 {
3405 case IEMMODE_16BIT:
3406 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3407 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3408 case IEMMODE_32BIT:
3409 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3410 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3411 case IEMMODE_64BIT:
3412 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3413 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3415 }
3416 }
3417 else
3418 {
3419 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3420 switch (pVCpu->iem.s.enmEffAddrMode)
3421 {
3422 case IEMMODE_16BIT:
3423 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3424 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3425 case IEMMODE_32BIT:
3426 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3427 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3428 case IEMMODE_64BIT:
3429 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3430 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3432 }
3433 }
3434}
3435
3436
3437/**
3438 * @opcode 0x6f
3439 */
3440FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3441{
3442 IEMOP_HLP_MIN_186();
3443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3444 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3445 {
3446 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3447 switch (pVCpu->iem.s.enmEffOpSize)
3448 {
3449 case IEMMODE_16BIT:
3450 switch (pVCpu->iem.s.enmEffAddrMode)
3451 {
3452 case IEMMODE_16BIT:
3453 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3454 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3455 case IEMMODE_32BIT:
3456 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3457 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3458 case IEMMODE_64BIT:
3459 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3460 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3462 }
3463 break;
3464 case IEMMODE_64BIT:
3465 case IEMMODE_32BIT:
3466 switch (pVCpu->iem.s.enmEffAddrMode)
3467 {
3468 case IEMMODE_16BIT:
3469 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3470 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3471 case IEMMODE_32BIT:
3472 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3473 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3474 case IEMMODE_64BIT:
3475 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3476 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3478 }
3479 break;
3480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3481 }
3482 }
3483 else
3484 {
3485 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3486 switch (pVCpu->iem.s.enmEffOpSize)
3487 {
3488 case IEMMODE_16BIT:
3489 switch (pVCpu->iem.s.enmEffAddrMode)
3490 {
3491 case IEMMODE_16BIT:
3492 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3493 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3494 case IEMMODE_32BIT:
3495 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3496 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3497 case IEMMODE_64BIT:
3498 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3499 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3501 }
3502 break;
3503 case IEMMODE_64BIT:
3504 case IEMMODE_32BIT:
3505 switch (pVCpu->iem.s.enmEffAddrMode)
3506 {
3507 case IEMMODE_16BIT:
3508 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3509 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3510 case IEMMODE_32BIT:
3511 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3512 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3513 case IEMMODE_64BIT:
3514 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3515 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3517 }
3518 break;
3519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3520 }
3521 }
3522}
3523
3524
3525/**
3526 * @opcode 0x70
3527 */
3528FNIEMOP_DEF(iemOp_jo_Jb)
3529{
3530 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3531 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3532 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3533
3534 IEM_MC_BEGIN(0, 0, 0, 0);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3537 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3538 } IEM_MC_ELSE() {
3539 IEM_MC_ADVANCE_RIP_AND_FINISH();
3540 } IEM_MC_ENDIF();
3541 IEM_MC_END();
3542}
3543
3544
3545/**
3546 * @opcode 0x71
3547 */
3548FNIEMOP_DEF(iemOp_jno_Jb)
3549{
3550 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3551 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3553
3554 IEM_MC_BEGIN(0, 0, 0, 0);
3555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3557 IEM_MC_ADVANCE_RIP_AND_FINISH();
3558 } IEM_MC_ELSE() {
3559 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3560 } IEM_MC_ENDIF();
3561 IEM_MC_END();
3562}
3563
3564/**
3565 * @opcode 0x72
3566 */
3567FNIEMOP_DEF(iemOp_jc_Jb)
3568{
3569 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3570 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3571 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3572
3573 IEM_MC_BEGIN(0, 0, 0, 0);
3574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3576 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3577 } IEM_MC_ELSE() {
3578 IEM_MC_ADVANCE_RIP_AND_FINISH();
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581}
3582
3583
3584/**
3585 * @opcode 0x73
3586 */
3587FNIEMOP_DEF(iemOp_jnc_Jb)
3588{
3589 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3590 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3592
3593 IEM_MC_BEGIN(0, 0, 0, 0);
3594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3596 IEM_MC_ADVANCE_RIP_AND_FINISH();
3597 } IEM_MC_ELSE() {
3598 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3599 } IEM_MC_ENDIF();
3600 IEM_MC_END();
3601}
3602
3603
3604/**
3605 * @opcode 0x74
3606 */
3607FNIEMOP_DEF(iemOp_je_Jb)
3608{
3609 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0, 0, 0);
3614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3616 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3617 } IEM_MC_ELSE() {
3618 IEM_MC_ADVANCE_RIP_AND_FINISH();
3619 } IEM_MC_ENDIF();
3620 IEM_MC_END();
3621}
3622
3623
3624/**
3625 * @opcode 0x75
3626 */
3627FNIEMOP_DEF(iemOp_jne_Jb)
3628{
3629 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3630 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3631 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0, 0, 0);
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3636 IEM_MC_ADVANCE_RIP_AND_FINISH();
3637 } IEM_MC_ELSE() {
3638 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3639 } IEM_MC_ENDIF();
3640 IEM_MC_END();
3641}
3642
3643
3644/**
3645 * @opcode 0x76
3646 */
3647FNIEMOP_DEF(iemOp_jbe_Jb)
3648{
3649 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3650 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3651 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3652
3653 IEM_MC_BEGIN(0, 0, 0, 0);
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3656 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3657 } IEM_MC_ELSE() {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ENDIF();
3660 IEM_MC_END();
3661}
3662
3663
3664/**
3665 * @opcode 0x77
3666 */
3667FNIEMOP_DEF(iemOp_jnbe_Jb)
3668{
3669 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3670 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3672
3673 IEM_MC_BEGIN(0, 0, 0, 0);
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3675 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3676 IEM_MC_ADVANCE_RIP_AND_FINISH();
3677 } IEM_MC_ELSE() {
3678 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3679 } IEM_MC_ENDIF();
3680 IEM_MC_END();
3681}
3682
3683
3684/**
3685 * @opcode 0x78
3686 */
3687FNIEMOP_DEF(iemOp_js_Jb)
3688{
3689 IEMOP_MNEMONIC(js_Jb, "js Jb");
3690 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3691 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3692
3693 IEM_MC_BEGIN(0, 0, 0, 0);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3696 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3697 } IEM_MC_ELSE() {
3698 IEM_MC_ADVANCE_RIP_AND_FINISH();
3699 } IEM_MC_ENDIF();
3700 IEM_MC_END();
3701}
3702
3703
3704/**
3705 * @opcode 0x79
3706 */
3707FNIEMOP_DEF(iemOp_jns_Jb)
3708{
3709 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3710 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3711 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3712
3713 IEM_MC_BEGIN(0, 0, 0, 0);
3714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3715 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3716 IEM_MC_ADVANCE_RIP_AND_FINISH();
3717 } IEM_MC_ELSE() {
3718 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3719 } IEM_MC_ENDIF();
3720 IEM_MC_END();
3721}
3722
3723
3724/**
3725 * @opcode 0x7a
3726 */
3727FNIEMOP_DEF(iemOp_jp_Jb)
3728{
3729 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3730 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3732
3733 IEM_MC_BEGIN(0, 0, 0, 0);
3734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3736 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3737 } IEM_MC_ELSE() {
3738 IEM_MC_ADVANCE_RIP_AND_FINISH();
3739 } IEM_MC_ENDIF();
3740 IEM_MC_END();
3741}
3742
3743
3744/**
3745 * @opcode 0x7b
3746 */
3747FNIEMOP_DEF(iemOp_jnp_Jb)
3748{
3749 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3750 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3751 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3752
3753 IEM_MC_BEGIN(0, 0, 0, 0);
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3756 IEM_MC_ADVANCE_RIP_AND_FINISH();
3757 } IEM_MC_ELSE() {
3758 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3759 } IEM_MC_ENDIF();
3760 IEM_MC_END();
3761}
3762
3763
3764/**
3765 * @opcode 0x7c
3766 */
3767FNIEMOP_DEF(iemOp_jl_Jb)
3768{
3769 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3770 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3772
3773 IEM_MC_BEGIN(0, 0, 0, 0);
3774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3775 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3776 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3777 } IEM_MC_ELSE() {
3778 IEM_MC_ADVANCE_RIP_AND_FINISH();
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781}
3782
3783
3784/**
3785 * @opcode 0x7d
3786 */
3787FNIEMOP_DEF(iemOp_jnl_Jb)
3788{
3789 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3790 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3792
3793 IEM_MC_BEGIN(0, 0, 0, 0);
3794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3795 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3796 IEM_MC_ADVANCE_RIP_AND_FINISH();
3797 } IEM_MC_ELSE() {
3798 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3799 } IEM_MC_ENDIF();
3800 IEM_MC_END();
3801}
3802
3803
3804/**
3805 * @opcode 0x7e
3806 */
3807FNIEMOP_DEF(iemOp_jle_Jb)
3808{
3809 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3810 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3811 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3812
3813 IEM_MC_BEGIN(0, 0, 0, 0);
3814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3815 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3816 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3817 } IEM_MC_ELSE() {
3818 IEM_MC_ADVANCE_RIP_AND_FINISH();
3819 } IEM_MC_ENDIF();
3820 IEM_MC_END();
3821}
3822
3823
3824/**
3825 * @opcode 0x7f
3826 */
3827FNIEMOP_DEF(iemOp_jnle_Jb)
3828{
3829 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3830 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3832
3833 IEM_MC_BEGIN(0, 0, 0, 0);
3834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3835 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3836 IEM_MC_ADVANCE_RIP_AND_FINISH();
3837 } IEM_MC_ELSE() {
3838 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3839 } IEM_MC_ENDIF();
3840 IEM_MC_END();
3841}
3842
3843
3844/**
3845 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3846 * iemOp_Grp1_Eb_Ib_80.
3847 */
3848#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3849 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3850 { \
3851 /* register target */ \
3852 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3853 IEM_MC_BEGIN(3, 0, 0, 0); \
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3855 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3856 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3857 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3858 \
3859 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3860 IEM_MC_REF_EFLAGS(pEFlags); \
3861 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3862 \
3863 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3864 IEM_MC_END(); \
3865 } \
3866 else \
3867 { \
3868 /* memory target */ \
3869 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3870 { \
3871 IEM_MC_BEGIN(3, 3, 0, 0); \
3872 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3873 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3875 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3876 \
3877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3878 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3879 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3880 IEMOP_HLP_DONE_DECODING(); \
3881 \
3882 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3883 IEM_MC_FETCH_EFLAGS(EFlags); \
3884 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3885 \
3886 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3887 IEM_MC_COMMIT_EFLAGS(EFlags); \
3888 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3889 IEM_MC_END(); \
3890 } \
3891 else \
3892 { \
3893 (void)0
3894
3895#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3896 IEM_MC_BEGIN(3, 3, 0, 0); \
3897 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3898 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3900 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3901 \
3902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3903 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3904 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3905 IEMOP_HLP_DONE_DECODING(); \
3906 \
3907 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3908 IEM_MC_FETCH_EFLAGS(EFlags); \
3909 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3910 \
3911 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3912 IEM_MC_COMMIT_EFLAGS(EFlags); \
3913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3914 IEM_MC_END(); \
3915 } \
3916 } \
3917 (void)0
3918
3919#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3920 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3921 { \
3922 /* register target */ \
3923 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3924 IEM_MC_BEGIN(3, 0, 0, 0); \
3925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3926 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3927 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3928 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3929 \
3930 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3931 IEM_MC_REF_EFLAGS(pEFlags); \
3932 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3933 \
3934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3935 IEM_MC_END(); \
3936 } \
3937 else \
3938 { \
3939 /* memory target */ \
3940 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3941 { \
3942 IEM_MC_BEGIN(3, 3, 0, 0); \
3943 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3946 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3947 \
3948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3949 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3950 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3951 IEMOP_HLP_DONE_DECODING(); \
3952 \
3953 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3954 IEM_MC_FETCH_EFLAGS(EFlags); \
3955 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3956 \
3957 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3958 IEM_MC_COMMIT_EFLAGS(EFlags); \
3959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3960 IEM_MC_END(); \
3961 } \
3962 else \
3963 { \
3964 (void)0
3965
3966#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3967 IEMOP_HLP_DONE_DECODING(); \
3968 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3969 } \
3970 } \
3971 (void)0
3972
3973
3974
3975/**
3976 * @opmaps grp1_80,grp1_83
3977 * @opcode /0
3978 */
3979FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3980{
3981 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3982 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3983 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3984}
3985
3986
3987/**
3988 * @opmaps grp1_80,grp1_83
3989 * @opcode /1
3990 */
3991FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3992{
3993 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3994 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
3995 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3996}
3997
3998
3999/**
4000 * @opmaps grp1_80,grp1_83
4001 * @opcode /2
4002 */
4003FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4004{
4005 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4006 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4007 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4008}
4009
4010
4011/**
4012 * @opmaps grp1_80,grp1_83
4013 * @opcode /3
4014 */
4015FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4016{
4017 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4018 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4019 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4020}
4021
4022
4023/**
4024 * @opmaps grp1_80,grp1_83
4025 * @opcode /4
4026 */
4027FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4028{
4029 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4030 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4031 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4032}
4033
4034
4035/**
4036 * @opmaps grp1_80,grp1_83
4037 * @opcode /5
4038 */
4039FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4040{
4041 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4042 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4043 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4044}
4045
4046
4047/**
4048 * @opmaps grp1_80,grp1_83
4049 * @opcode /6
4050 */
4051FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4052{
4053 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4054 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4055 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4056}
4057
4058
4059/**
4060 * @opmaps grp1_80,grp1_83
4061 * @opcode /7
4062 */
4063FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4064{
4065 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4066 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4067 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4068}
4069
4070
4071/**
4072 * @opcode 0x80
4073 */
4074FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4075{
4076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4077 switch (IEM_GET_MODRM_REG_8(bRm))
4078 {
4079 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4080 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4081 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4082 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4083 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4084 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4085 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4086 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4088 }
4089}
4090
4091
4092/**
4093 * Body for a group 1 binary operator.
4094 */
4095#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4096 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4097 { \
4098 /* register target */ \
4099 switch (pVCpu->iem.s.enmEffOpSize) \
4100 { \
4101 case IEMMODE_16BIT: \
4102 { \
4103 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4104 IEM_MC_BEGIN(3, 0, 0, 0); \
4105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4106 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4107 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4108 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4109 \
4110 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4111 IEM_MC_REF_EFLAGS(pEFlags); \
4112 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4113 \
4114 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4115 IEM_MC_END(); \
4116 break; \
4117 } \
4118 \
4119 case IEMMODE_32BIT: \
4120 { \
4121 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4122 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4124 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4125 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4126 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4127 \
4128 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4129 IEM_MC_REF_EFLAGS(pEFlags); \
4130 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4131 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4132 \
4133 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4134 IEM_MC_END(); \
4135 break; \
4136 } \
4137 \
4138 case IEMMODE_64BIT: \
4139 { \
4140 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4141 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4143 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4144 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4145 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4146 \
4147 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4148 IEM_MC_REF_EFLAGS(pEFlags); \
4149 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4150 \
4151 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4152 IEM_MC_END(); \
4153 break; \
4154 } \
4155 \
4156 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4157 } \
4158 } \
4159 else \
4160 { \
4161 /* memory target */ \
4162 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4163 { \
4164 switch (pVCpu->iem.s.enmEffOpSize) \
4165 { \
4166 case IEMMODE_16BIT: \
4167 { \
4168 IEM_MC_BEGIN(3, 3, 0, 0); \
4169 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4170 IEM_MC_ARG(uint16_t, u16Src, 1); \
4171 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4173 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4174 \
4175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4176 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4177 IEM_MC_ASSIGN(u16Src, u16Imm); \
4178 IEMOP_HLP_DONE_DECODING(); \
4179 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4180 IEM_MC_FETCH_EFLAGS(EFlags); \
4181 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4182 \
4183 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4184 IEM_MC_COMMIT_EFLAGS(EFlags); \
4185 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4186 IEM_MC_END(); \
4187 break; \
4188 } \
4189 \
4190 case IEMMODE_32BIT: \
4191 { \
4192 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4193 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4194 IEM_MC_ARG(uint32_t, u32Src, 1); \
4195 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4197 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4198 \
4199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4200 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4201 IEM_MC_ASSIGN(u32Src, u32Imm); \
4202 IEMOP_HLP_DONE_DECODING(); \
4203 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4204 IEM_MC_FETCH_EFLAGS(EFlags); \
4205 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4206 \
4207 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4208 IEM_MC_COMMIT_EFLAGS(EFlags); \
4209 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4210 IEM_MC_END(); \
4211 break; \
4212 } \
4213 \
4214 case IEMMODE_64BIT: \
4215 { \
4216 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4217 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4218 IEM_MC_ARG(uint64_t, u64Src, 1); \
4219 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4221 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4222 \
4223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4224 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4225 IEMOP_HLP_DONE_DECODING(); \
4226 IEM_MC_ASSIGN(u64Src, u64Imm); \
4227 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4228 IEM_MC_FETCH_EFLAGS(EFlags); \
4229 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4230 \
4231 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4232 IEM_MC_COMMIT_EFLAGS(EFlags); \
4233 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4234 IEM_MC_END(); \
4235 break; \
4236 } \
4237 \
4238 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4239 } \
4240 } \
4241 else \
4242 { \
4243 (void)0
4244/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4245#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4246 switch (pVCpu->iem.s.enmEffOpSize) \
4247 { \
4248 case IEMMODE_16BIT: \
4249 { \
4250 IEM_MC_BEGIN(3, 3, 0, 0); \
4251 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4252 IEM_MC_ARG(uint16_t, u16Src, 1); \
4253 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4255 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4256 \
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4258 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4259 IEM_MC_ASSIGN(u16Src, u16Imm); \
4260 IEMOP_HLP_DONE_DECODING(); \
4261 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4262 IEM_MC_FETCH_EFLAGS(EFlags); \
4263 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4264 \
4265 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4266 IEM_MC_COMMIT_EFLAGS(EFlags); \
4267 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4268 IEM_MC_END(); \
4269 break; \
4270 } \
4271 \
4272 case IEMMODE_32BIT: \
4273 { \
4274 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4275 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4276 IEM_MC_ARG(uint32_t, u32Src, 1); \
4277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4279 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4280 \
4281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4282 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4283 IEM_MC_ASSIGN(u32Src, u32Imm); \
4284 IEMOP_HLP_DONE_DECODING(); \
4285 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4286 IEM_MC_FETCH_EFLAGS(EFlags); \
4287 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4288 \
4289 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4290 IEM_MC_COMMIT_EFLAGS(EFlags); \
4291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4292 IEM_MC_END(); \
4293 break; \
4294 } \
4295 \
4296 case IEMMODE_64BIT: \
4297 { \
4298 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4300 IEM_MC_ARG(uint64_t, u64Src, 1); \
4301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4303 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4304 \
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4306 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4307 IEMOP_HLP_DONE_DECODING(); \
4308 IEM_MC_ASSIGN(u64Src, u64Imm); \
4309 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4310 IEM_MC_FETCH_EFLAGS(EFlags); \
4311 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4312 \
4313 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4314 IEM_MC_COMMIT_EFLAGS(EFlags); \
4315 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4316 IEM_MC_END(); \
4317 break; \
4318 } \
4319 \
4320 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4321 } \
4322 } \
4323 } \
4324 (void)0
4325
4326/* read-only version */
4327#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4328 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4329 { \
4330 /* register target */ \
4331 switch (pVCpu->iem.s.enmEffOpSize) \
4332 { \
4333 case IEMMODE_16BIT: \
4334 { \
4335 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4336 IEM_MC_BEGIN(3, 0, 0, 0); \
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4339 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4340 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4341 \
4342 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4343 IEM_MC_REF_EFLAGS(pEFlags); \
4344 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4345 \
4346 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4347 IEM_MC_END(); \
4348 break; \
4349 } \
4350 \
4351 case IEMMODE_32BIT: \
4352 { \
4353 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4354 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4356 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4357 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4358 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4359 \
4360 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4361 IEM_MC_REF_EFLAGS(pEFlags); \
4362 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4363 \
4364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4365 IEM_MC_END(); \
4366 break; \
4367 } \
4368 \
4369 case IEMMODE_64BIT: \
4370 { \
4371 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4372 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4374 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4375 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4376 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4377 \
4378 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4379 IEM_MC_REF_EFLAGS(pEFlags); \
4380 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4381 \
4382 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4383 IEM_MC_END(); \
4384 break; \
4385 } \
4386 \
4387 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4388 } \
4389 } \
4390 else \
4391 { \
4392 /* memory target */ \
4393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4394 { \
4395 switch (pVCpu->iem.s.enmEffOpSize) \
4396 { \
4397 case IEMMODE_16BIT: \
4398 { \
4399 IEM_MC_BEGIN(3, 3, 0, 0); \
4400 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4401 IEM_MC_ARG(uint16_t, u16Src, 1); \
4402 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4404 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4405 \
4406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4407 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4408 IEM_MC_ASSIGN(u16Src, u16Imm); \
4409 IEMOP_HLP_DONE_DECODING(); \
4410 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4411 IEM_MC_FETCH_EFLAGS(EFlags); \
4412 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4413 \
4414 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4415 IEM_MC_COMMIT_EFLAGS(EFlags); \
4416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4417 IEM_MC_END(); \
4418 break; \
4419 } \
4420 \
4421 case IEMMODE_32BIT: \
4422 { \
4423 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4424 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4425 IEM_MC_ARG(uint32_t, u32Src, 1); \
4426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4428 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4429 \
4430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4431 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4432 IEM_MC_ASSIGN(u32Src, u32Imm); \
4433 IEMOP_HLP_DONE_DECODING(); \
4434 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4435 IEM_MC_FETCH_EFLAGS(EFlags); \
4436 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4437 \
4438 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4439 IEM_MC_COMMIT_EFLAGS(EFlags); \
4440 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4441 IEM_MC_END(); \
4442 break; \
4443 } \
4444 \
4445 case IEMMODE_64BIT: \
4446 { \
4447 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4448 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4449 IEM_MC_ARG(uint64_t, u64Src, 1); \
4450 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4452 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4453 \
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4455 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4456 IEMOP_HLP_DONE_DECODING(); \
4457 IEM_MC_ASSIGN(u64Src, u64Imm); \
4458 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4459 IEM_MC_FETCH_EFLAGS(EFlags); \
4460 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4461 \
4462 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4463 IEM_MC_COMMIT_EFLAGS(EFlags); \
4464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4465 IEM_MC_END(); \
4466 break; \
4467 } \
4468 \
4469 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4470 } \
4471 } \
4472 else \
4473 { \
4474 IEMOP_HLP_DONE_DECODING(); \
4475 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4476 } \
4477 } \
4478 (void)0
4479
4480
4481/**
4482 * @opmaps grp1_81
4483 * @opcode /0
4484 */
4485FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4486{
4487 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4488 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4489 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4490}
4491
4492
4493/**
4494 * @opmaps grp1_81
4495 * @opcode /1
4496 */
4497FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4498{
4499 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4500 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4501 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4502}
4503
4504
4505/**
4506 * @opmaps grp1_81
4507 * @opcode /2
4508 */
4509FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4510{
4511 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4512 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4513 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4514}
4515
4516
4517/**
4518 * @opmaps grp1_81
4519 * @opcode /3
4520 */
4521FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4522{
4523 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4524 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4525 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4526}
4527
4528
4529/**
4530 * @opmaps grp1_81
4531 * @opcode /4
4532 */
4533FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4534{
4535 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4536 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4537 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4538}
4539
4540
4541/**
4542 * @opmaps grp1_81
4543 * @opcode /5
4544 */
4545FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4546{
4547 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4548 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4549 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4550}
4551
4552
4553/**
4554 * @opmaps grp1_81
4555 * @opcode /6
4556 */
4557FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4558{
4559 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4560 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4561 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4562}
4563
4564
4565/**
4566 * @opmaps grp1_81
4567 * @opcode /7
4568 */
4569FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4570{
4571 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4572 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4573}
4574
4575
4576/**
4577 * @opcode 0x81
4578 */
4579FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4580{
4581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4582 switch (IEM_GET_MODRM_REG_8(bRm))
4583 {
4584 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4585 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4586 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4587 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4588 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4589 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4590 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4591 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4593 }
4594}
4595
4596
4597/**
4598 * @opcode 0x82
4599 * @opmnemonic grp1_82
4600 * @opgroup og_groups
4601 */
4602FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4603{
4604 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4605 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4606}
4607
4608
4609/**
4610 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4611 * iemOp_Grp1_Ev_Ib.
4612 */
4613#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4614 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4615 { \
4616 /* \
4617 * Register target \
4618 */ \
4619 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4620 switch (pVCpu->iem.s.enmEffOpSize) \
4621 { \
4622 case IEMMODE_16BIT: \
4623 IEM_MC_BEGIN(3, 0, 0, 0); \
4624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4625 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4626 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4627 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4628 \
4629 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4630 IEM_MC_REF_EFLAGS(pEFlags); \
4631 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4632 \
4633 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4634 IEM_MC_END(); \
4635 break; \
4636 \
4637 case IEMMODE_32BIT: \
4638 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4640 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4641 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4642 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4643 \
4644 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4645 IEM_MC_REF_EFLAGS(pEFlags); \
4646 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4647 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4648 \
4649 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4650 IEM_MC_END(); \
4651 break; \
4652 \
4653 case IEMMODE_64BIT: \
4654 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4656 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4657 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4658 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4659 \
4660 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4661 IEM_MC_REF_EFLAGS(pEFlags); \
4662 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4663 \
4664 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4665 IEM_MC_END(); \
4666 break; \
4667 \
4668 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4669 } \
4670 } \
4671 else \
4672 { \
4673 /* \
4674 * Memory target. \
4675 */ \
4676 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4677 { \
4678 switch (pVCpu->iem.s.enmEffOpSize) \
4679 { \
4680 case IEMMODE_16BIT: \
4681 IEM_MC_BEGIN(3, 3, 0, 0); \
4682 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4683 IEM_MC_ARG(uint16_t, u16Src, 1); \
4684 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4686 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4687 \
4688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4689 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4690 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4691 IEMOP_HLP_DONE_DECODING(); \
4692 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4693 IEM_MC_FETCH_EFLAGS(EFlags); \
4694 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4695 \
4696 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4697 IEM_MC_COMMIT_EFLAGS(EFlags); \
4698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4699 IEM_MC_END(); \
4700 break; \
4701 \
4702 case IEMMODE_32BIT: \
4703 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4704 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4705 IEM_MC_ARG(uint32_t, u32Src, 1); \
4706 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4708 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4709 \
4710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4711 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4712 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4713 IEMOP_HLP_DONE_DECODING(); \
4714 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4715 IEM_MC_FETCH_EFLAGS(EFlags); \
4716 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4717 \
4718 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4719 IEM_MC_COMMIT_EFLAGS(EFlags); \
4720 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4721 IEM_MC_END(); \
4722 break; \
4723 \
4724 case IEMMODE_64BIT: \
4725 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4726 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4727 IEM_MC_ARG(uint64_t, u64Src, 1); \
4728 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4730 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4731 \
4732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4733 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4734 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4735 IEMOP_HLP_DONE_DECODING(); \
4736 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4737 IEM_MC_FETCH_EFLAGS(EFlags); \
4738 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4739 \
4740 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4741 IEM_MC_COMMIT_EFLAGS(EFlags); \
4742 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4743 IEM_MC_END(); \
4744 break; \
4745 \
4746 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4747 } \
4748 } \
4749 else \
4750 { \
4751 (void)0
4752/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4753#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4754 switch (pVCpu->iem.s.enmEffOpSize) \
4755 { \
4756 case IEMMODE_16BIT: \
4757 IEM_MC_BEGIN(3, 3, 0, 0); \
4758 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4759 IEM_MC_ARG(uint16_t, u16Src, 1); \
4760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4762 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4763 \
4764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4765 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4766 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4767 IEMOP_HLP_DONE_DECODING(); \
4768 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4769 IEM_MC_FETCH_EFLAGS(EFlags); \
4770 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4771 \
4772 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4773 IEM_MC_COMMIT_EFLAGS(EFlags); \
4774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4775 IEM_MC_END(); \
4776 break; \
4777 \
4778 case IEMMODE_32BIT: \
4779 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4780 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4781 IEM_MC_ARG(uint32_t, u32Src, 1); \
4782 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4784 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4785 \
4786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4787 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4788 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4789 IEMOP_HLP_DONE_DECODING(); \
4790 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4791 IEM_MC_FETCH_EFLAGS(EFlags); \
4792 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4793 \
4794 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4795 IEM_MC_COMMIT_EFLAGS(EFlags); \
4796 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4797 IEM_MC_END(); \
4798 break; \
4799 \
4800 case IEMMODE_64BIT: \
4801 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4802 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4803 IEM_MC_ARG(uint64_t, u64Src, 1); \
4804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4806 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4807 \
4808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4809 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4810 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4811 IEMOP_HLP_DONE_DECODING(); \
4812 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4813 IEM_MC_FETCH_EFLAGS(EFlags); \
4814 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4815 \
4816 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4817 IEM_MC_COMMIT_EFLAGS(EFlags); \
4818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4819 IEM_MC_END(); \
4820 break; \
4821 \
4822 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4823 } \
4824 } \
4825 } \
4826 (void)0
4827
4828/* read-only variant */
4829#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4830 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4831 { \
4832 /* \
4833 * Register target \
4834 */ \
4835 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4836 switch (pVCpu->iem.s.enmEffOpSize) \
4837 { \
4838 case IEMMODE_16BIT: \
4839 IEM_MC_BEGIN(3, 0, 0, 0); \
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4841 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4842 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4843 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4844 \
4845 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4846 IEM_MC_REF_EFLAGS(pEFlags); \
4847 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4848 \
4849 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4850 IEM_MC_END(); \
4851 break; \
4852 \
4853 case IEMMODE_32BIT: \
4854 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4856 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4857 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4858 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4859 \
4860 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4861 IEM_MC_REF_EFLAGS(pEFlags); \
4862 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4863 \
4864 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4865 IEM_MC_END(); \
4866 break; \
4867 \
4868 case IEMMODE_64BIT: \
4869 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4871 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4872 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4873 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4874 \
4875 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4876 IEM_MC_REF_EFLAGS(pEFlags); \
4877 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4878 \
4879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4880 IEM_MC_END(); \
4881 break; \
4882 \
4883 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4884 } \
4885 } \
4886 else \
4887 { \
4888 /* \
4889 * Memory target. \
4890 */ \
4891 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4892 { \
4893 switch (pVCpu->iem.s.enmEffOpSize) \
4894 { \
4895 case IEMMODE_16BIT: \
4896 IEM_MC_BEGIN(3, 3, 0, 0); \
4897 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4898 IEM_MC_ARG(uint16_t, u16Src, 1); \
4899 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4901 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4902 \
4903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4904 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4905 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4906 IEMOP_HLP_DONE_DECODING(); \
4907 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4908 IEM_MC_FETCH_EFLAGS(EFlags); \
4909 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4910 \
4911 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4912 IEM_MC_COMMIT_EFLAGS(EFlags); \
4913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4914 IEM_MC_END(); \
4915 break; \
4916 \
4917 case IEMMODE_32BIT: \
4918 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4919 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4920 IEM_MC_ARG(uint32_t, u32Src, 1); \
4921 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4923 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4924 \
4925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4926 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4927 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4928 IEMOP_HLP_DONE_DECODING(); \
4929 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4930 IEM_MC_FETCH_EFLAGS(EFlags); \
4931 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4932 \
4933 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4934 IEM_MC_COMMIT_EFLAGS(EFlags); \
4935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4936 IEM_MC_END(); \
4937 break; \
4938 \
4939 case IEMMODE_64BIT: \
4940 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4941 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4942 IEM_MC_ARG(uint64_t, u64Src, 1); \
4943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4945 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4946 \
4947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4948 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4949 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4950 IEMOP_HLP_DONE_DECODING(); \
4951 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4952 IEM_MC_FETCH_EFLAGS(EFlags); \
4953 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4954 \
4955 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4956 IEM_MC_COMMIT_EFLAGS(EFlags); \
4957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4958 IEM_MC_END(); \
4959 break; \
4960 \
4961 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4962 } \
4963 } \
4964 else \
4965 { \
4966 IEMOP_HLP_DONE_DECODING(); \
4967 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4968 } \
4969 } \
4970 (void)0
4971
4972/**
4973 * @opmaps grp1_83
4974 * @opcode /0
4975 */
4976FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4977{
4978 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4979 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4980 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4981}
4982
4983
4984/**
4985 * @opmaps grp1_83
4986 * @opcode /1
4987 */
4988FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4989{
4990 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4991 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4992 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4993}
4994
4995
4996/**
4997 * @opmaps grp1_83
4998 * @opcode /2
4999 */
5000FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5001{
5002 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5003 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5004 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5005}
5006
5007
5008/**
5009 * @opmaps grp1_83
5010 * @opcode /3
5011 */
5012FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5013{
5014 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5015 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5016 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5017}
5018
5019
5020/**
5021 * @opmaps grp1_83
5022 * @opcode /4
5023 */
5024FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5025{
5026 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5027 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5028 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5029}
5030
5031
5032/**
5033 * @opmaps grp1_83
5034 * @opcode /5
5035 */
5036FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5037{
5038 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5039 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5040 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5041}
5042
5043
5044/**
5045 * @opmaps grp1_83
5046 * @opcode /6
5047 */
5048FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5049{
5050 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5051 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5052 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5053}
5054
5055
5056/**
5057 * @opmaps grp1_83
5058 * @opcode /7
5059 */
5060FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5061{
5062 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5063 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5064}
5065
5066
5067/**
5068 * @opcode 0x83
5069 */
5070FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5071{
5072 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5073 to the 386 even if absent in the intel reference manuals and some
5074 3rd party opcode listings. */
5075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5076 switch (IEM_GET_MODRM_REG_8(bRm))
5077 {
5078 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5079 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5080 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5081 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5082 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5083 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5084 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5085 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5087 }
5088}
5089
5090
5091/**
5092 * @opcode 0x84
5093 */
5094FNIEMOP_DEF(iemOp_test_Eb_Gb)
5095{
5096 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5097 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5098 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5099 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5100}
5101
5102
5103/**
5104 * @opcode 0x85
5105 */
5106FNIEMOP_DEF(iemOp_test_Ev_Gv)
5107{
5108 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5110 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5111}
5112
5113
5114/**
5115 * @opcode 0x86
5116 */
5117FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5118{
5119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5120 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5121
5122 /*
5123 * If rm is denoting a register, no more instruction bytes.
5124 */
5125 if (IEM_IS_MODRM_REG_MODE(bRm))
5126 {
5127 IEM_MC_BEGIN(0, 2, 0, 0);
5128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5129 IEM_MC_LOCAL(uint8_t, uTmp1);
5130 IEM_MC_LOCAL(uint8_t, uTmp2);
5131
5132 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5133 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5134 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5135 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5136
5137 IEM_MC_ADVANCE_RIP_AND_FINISH();
5138 IEM_MC_END();
5139 }
5140 else
5141 {
5142 /*
5143 * We're accessing memory.
5144 */
5145 IEM_MC_BEGIN(2, 4, 0, 0);
5146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5147 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5148 IEM_MC_LOCAL(uint8_t, uTmpReg);
5149 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5150 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5151
5152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5154 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5155 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5156 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5157 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5158 else
5159 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5160 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5161 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5162
5163 IEM_MC_ADVANCE_RIP_AND_FINISH();
5164 IEM_MC_END();
5165 }
5166}
5167
5168
5169/**
5170 * @opcode 0x87
5171 */
5172FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5173{
5174 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5176
5177 /*
5178 * If rm is denoting a register, no more instruction bytes.
5179 */
5180 if (IEM_IS_MODRM_REG_MODE(bRm))
5181 {
5182 switch (pVCpu->iem.s.enmEffOpSize)
5183 {
5184 case IEMMODE_16BIT:
5185 IEM_MC_BEGIN(0, 2, 0, 0);
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187 IEM_MC_LOCAL(uint16_t, uTmp1);
5188 IEM_MC_LOCAL(uint16_t, uTmp2);
5189
5190 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5191 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5192 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5193 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5194
5195 IEM_MC_ADVANCE_RIP_AND_FINISH();
5196 IEM_MC_END();
5197 break;
5198
5199 case IEMMODE_32BIT:
5200 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5202 IEM_MC_LOCAL(uint32_t, uTmp1);
5203 IEM_MC_LOCAL(uint32_t, uTmp2);
5204
5205 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5206 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5207 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5208 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5209
5210 IEM_MC_ADVANCE_RIP_AND_FINISH();
5211 IEM_MC_END();
5212 break;
5213
5214 case IEMMODE_64BIT:
5215 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5217 IEM_MC_LOCAL(uint64_t, uTmp1);
5218 IEM_MC_LOCAL(uint64_t, uTmp2);
5219
5220 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5221 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5222 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5223 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5224
5225 IEM_MC_ADVANCE_RIP_AND_FINISH();
5226 IEM_MC_END();
5227 break;
5228
5229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5230 }
5231 }
5232 else
5233 {
5234 /*
5235 * We're accessing memory.
5236 */
5237 switch (pVCpu->iem.s.enmEffOpSize)
5238 {
5239 case IEMMODE_16BIT:
5240 IEM_MC_BEGIN(2, 4, 0, 0);
5241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5242 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5243 IEM_MC_LOCAL(uint16_t, uTmpReg);
5244 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5246
5247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5250 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5251 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5252 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5253 else
5254 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5255 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5256 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5257
5258 IEM_MC_ADVANCE_RIP_AND_FINISH();
5259 IEM_MC_END();
5260 break;
5261
5262 case IEMMODE_32BIT:
5263 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5265 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5266 IEM_MC_LOCAL(uint32_t, uTmpReg);
5267 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5268 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5269
5270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5272 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5273 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5274 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5275 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5276 else
5277 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5278 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5279 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5280
5281 IEM_MC_ADVANCE_RIP_AND_FINISH();
5282 IEM_MC_END();
5283 break;
5284
5285 case IEMMODE_64BIT:
5286 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5288 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5289 IEM_MC_LOCAL(uint64_t, uTmpReg);
5290 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5291 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5292
5293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5295 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5296 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5297 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5298 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5299 else
5300 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5301 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5302 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5303
5304 IEM_MC_ADVANCE_RIP_AND_FINISH();
5305 IEM_MC_END();
5306 break;
5307
5308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5309 }
5310 }
5311}
5312
5313
5314/**
5315 * @opcode 0x88
5316 */
5317FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5318{
5319 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5320
5321 uint8_t bRm;
5322 IEM_OPCODE_GET_NEXT_U8(&bRm);
5323
5324 /*
5325 * If rm is denoting a register, no more instruction bytes.
5326 */
5327 if (IEM_IS_MODRM_REG_MODE(bRm))
5328 {
5329 IEM_MC_BEGIN(0, 1, 0, 0);
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331 IEM_MC_LOCAL(uint8_t, u8Value);
5332 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5333 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5334 IEM_MC_ADVANCE_RIP_AND_FINISH();
5335 IEM_MC_END();
5336 }
5337 else
5338 {
5339 /*
5340 * We're writing a register to memory.
5341 */
5342 IEM_MC_BEGIN(0, 2, 0, 0);
5343 IEM_MC_LOCAL(uint8_t, u8Value);
5344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5347 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5348 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5349 IEM_MC_ADVANCE_RIP_AND_FINISH();
5350 IEM_MC_END();
5351 }
5352}
5353
5354
5355/**
5356 * @opcode 0x89
5357 */
5358FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5359{
5360 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5361
5362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5363
5364 /*
5365 * If rm is denoting a register, no more instruction bytes.
5366 */
5367 if (IEM_IS_MODRM_REG_MODE(bRm))
5368 {
5369 switch (pVCpu->iem.s.enmEffOpSize)
5370 {
5371 case IEMMODE_16BIT:
5372 IEM_MC_BEGIN(0, 1, 0, 0);
5373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5374 IEM_MC_LOCAL(uint16_t, u16Value);
5375 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5376 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5377 IEM_MC_ADVANCE_RIP_AND_FINISH();
5378 IEM_MC_END();
5379 break;
5380
5381 case IEMMODE_32BIT:
5382 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5384 IEM_MC_LOCAL(uint32_t, u32Value);
5385 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5386 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5387 IEM_MC_ADVANCE_RIP_AND_FINISH();
5388 IEM_MC_END();
5389 break;
5390
5391 case IEMMODE_64BIT:
5392 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_LOCAL(uint64_t, u64Value);
5395 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5396 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5397 IEM_MC_ADVANCE_RIP_AND_FINISH();
5398 IEM_MC_END();
5399 break;
5400
5401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5402 }
5403 }
5404 else
5405 {
5406 /*
5407 * We're writing a register to memory.
5408 */
5409 switch (pVCpu->iem.s.enmEffOpSize)
5410 {
5411 case IEMMODE_16BIT:
5412 IEM_MC_BEGIN(0, 2, 0, 0);
5413 IEM_MC_LOCAL(uint16_t, u16Value);
5414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5417 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5418 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5419 IEM_MC_ADVANCE_RIP_AND_FINISH();
5420 IEM_MC_END();
5421 break;
5422
5423 case IEMMODE_32BIT:
5424 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5425 IEM_MC_LOCAL(uint32_t, u32Value);
5426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5429 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5430 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5431 IEM_MC_ADVANCE_RIP_AND_FINISH();
5432 IEM_MC_END();
5433 break;
5434
5435 case IEMMODE_64BIT:
5436 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5437 IEM_MC_LOCAL(uint64_t, u64Value);
5438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5441 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5442 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5443 IEM_MC_ADVANCE_RIP_AND_FINISH();
5444 IEM_MC_END();
5445 break;
5446
5447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5448 }
5449 }
5450}
5451
5452
5453/**
5454 * @opcode 0x8a
5455 */
5456FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5457{
5458 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5459
5460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5461
5462 /*
5463 * If rm is denoting a register, no more instruction bytes.
5464 */
5465 if (IEM_IS_MODRM_REG_MODE(bRm))
5466 {
5467 IEM_MC_BEGIN(0, 1, 0, 0);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469 IEM_MC_LOCAL(uint8_t, u8Value);
5470 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5471 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5472 IEM_MC_ADVANCE_RIP_AND_FINISH();
5473 IEM_MC_END();
5474 }
5475 else
5476 {
5477 /*
5478 * We're loading a register from memory.
5479 */
5480 IEM_MC_BEGIN(0, 2, 0, 0);
5481 IEM_MC_LOCAL(uint8_t, u8Value);
5482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5485 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5486 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5487 IEM_MC_ADVANCE_RIP_AND_FINISH();
5488 IEM_MC_END();
5489 }
5490}
5491
5492
5493/**
5494 * @opcode 0x8b
5495 */
5496FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5497{
5498 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5499
5500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5501
5502 /*
5503 * If rm is denoting a register, no more instruction bytes.
5504 */
5505 if (IEM_IS_MODRM_REG_MODE(bRm))
5506 {
5507 switch (pVCpu->iem.s.enmEffOpSize)
5508 {
5509 case IEMMODE_16BIT:
5510 IEM_MC_BEGIN(0, 1, 0, 0);
5511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5512 IEM_MC_LOCAL(uint16_t, u16Value);
5513 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5514 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5515 IEM_MC_ADVANCE_RIP_AND_FINISH();
5516 IEM_MC_END();
5517 break;
5518
5519 case IEMMODE_32BIT:
5520 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5522 IEM_MC_LOCAL(uint32_t, u32Value);
5523 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5524 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5525 IEM_MC_ADVANCE_RIP_AND_FINISH();
5526 IEM_MC_END();
5527 break;
5528
5529 case IEMMODE_64BIT:
5530 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5532 IEM_MC_LOCAL(uint64_t, u64Value);
5533 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5534 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5535 IEM_MC_ADVANCE_RIP_AND_FINISH();
5536 IEM_MC_END();
5537 break;
5538
5539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5540 }
5541 }
5542 else
5543 {
5544 /*
5545 * We're loading a register from memory.
5546 */
5547 switch (pVCpu->iem.s.enmEffOpSize)
5548 {
5549 case IEMMODE_16BIT:
5550 IEM_MC_BEGIN(0, 2, 0, 0);
5551 IEM_MC_LOCAL(uint16_t, u16Value);
5552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5556 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 break;
5560
5561 case IEMMODE_32BIT:
5562 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5563 IEM_MC_LOCAL(uint32_t, u32Value);
5564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5567 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5568 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5569 IEM_MC_ADVANCE_RIP_AND_FINISH();
5570 IEM_MC_END();
5571 break;
5572
5573 case IEMMODE_64BIT:
5574 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5575 IEM_MC_LOCAL(uint64_t, u64Value);
5576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5579 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5580 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5581 IEM_MC_ADVANCE_RIP_AND_FINISH();
5582 IEM_MC_END();
5583 break;
5584
5585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5586 }
5587 }
5588}
5589
5590
5591/**
5592 * opcode 0x63
5593 * @todo Table fixme
5594 */
5595FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5596{
5597 if (!IEM_IS_64BIT_CODE(pVCpu))
5598 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5599 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5600 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5601 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5602}
5603
5604
5605/**
5606 * @opcode 0x8c
5607 */
5608FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5609{
5610 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5611
5612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5613
5614 /*
5615 * Check that the destination register exists. The REX.R prefix is ignored.
5616 */
5617 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5618 if (iSegReg > X86_SREG_GS)
5619 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5620
5621 /*
5622 * If rm is denoting a register, no more instruction bytes.
5623 * In that case, the operand size is respected and the upper bits are
5624 * cleared (starting with some pentium).
5625 */
5626 if (IEM_IS_MODRM_REG_MODE(bRm))
5627 {
5628 switch (pVCpu->iem.s.enmEffOpSize)
5629 {
5630 case IEMMODE_16BIT:
5631 IEM_MC_BEGIN(0, 1, 0, 0);
5632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5633 IEM_MC_LOCAL(uint16_t, u16Value);
5634 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5635 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5636 IEM_MC_ADVANCE_RIP_AND_FINISH();
5637 IEM_MC_END();
5638 break;
5639
5640 case IEMMODE_32BIT:
5641 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5643 IEM_MC_LOCAL(uint32_t, u32Value);
5644 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5645 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5646 IEM_MC_ADVANCE_RIP_AND_FINISH();
5647 IEM_MC_END();
5648 break;
5649
5650 case IEMMODE_64BIT:
5651 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653 IEM_MC_LOCAL(uint64_t, u64Value);
5654 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5655 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5656 IEM_MC_ADVANCE_RIP_AND_FINISH();
5657 IEM_MC_END();
5658 break;
5659
5660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5661 }
5662 }
5663 else
5664 {
5665 /*
5666 * We're saving the register to memory. The access is word sized
5667 * regardless of operand size prefixes.
5668 */
5669#if 0 /* not necessary */
5670 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5671#endif
5672 IEM_MC_BEGIN(0, 2, 0, 0);
5673 IEM_MC_LOCAL(uint16_t, u16Value);
5674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5678 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5679 IEM_MC_ADVANCE_RIP_AND_FINISH();
5680 IEM_MC_END();
5681 }
5682}
5683
5684
5685
5686
5687/**
5688 * @opcode 0x8d
5689 */
5690FNIEMOP_DEF(iemOp_lea_Gv_M)
5691{
5692 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5694 if (IEM_IS_MODRM_REG_MODE(bRm))
5695 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5696
5697 switch (pVCpu->iem.s.enmEffOpSize)
5698 {
5699 case IEMMODE_16BIT:
5700 IEM_MC_BEGIN(0, 2, 0, 0);
5701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5702 IEM_MC_LOCAL(uint16_t, u16Cast);
5703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5705 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5706 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5707 IEM_MC_ADVANCE_RIP_AND_FINISH();
5708 IEM_MC_END();
5709 break;
5710
5711 case IEMMODE_32BIT:
5712 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5714 IEM_MC_LOCAL(uint32_t, u32Cast);
5715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5717 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5718 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5719 IEM_MC_ADVANCE_RIP_AND_FINISH();
5720 IEM_MC_END();
5721 break;
5722
5723 case IEMMODE_64BIT:
5724 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5729 IEM_MC_ADVANCE_RIP_AND_FINISH();
5730 IEM_MC_END();
5731 break;
5732
5733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5734 }
5735}
5736
5737
5738/**
5739 * @opcode 0x8e
5740 */
5741FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5742{
5743 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5744
5745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5746
5747 /*
5748 * The practical operand size is 16-bit.
5749 */
5750#if 0 /* not necessary */
5751 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5752#endif
5753
5754 /*
5755 * Check that the destination register exists and can be used with this
5756 * instruction. The REX.R prefix is ignored.
5757 */
5758 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5759 /** @todo r=bird: What does 8086 do here wrt CS? */
5760 if ( iSegReg == X86_SREG_CS
5761 || iSegReg > X86_SREG_GS)
5762 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5763
5764 /*
5765 * If rm is denoting a register, no more instruction bytes.
5766 *
5767 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5768 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5769 * register. This is a restriction of the current recompiler
5770 * approach.
5771 */
5772 if (IEM_IS_MODRM_REG_MODE(bRm))
5773 {
5774#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5775 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5777 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5778 IEM_MC_ARG(uint16_t, u16Value, 1); \
5779 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5780 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5781 IEM_MC_END()
5782
5783 if (iSegReg == X86_SREG_SS)
5784 {
5785 if (IEM_IS_32BIT_CODE(pVCpu))
5786 {
5787 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5788 }
5789 else
5790 {
5791 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5792 }
5793 }
5794 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5795 {
5796 IEMOP_MOV_SW_EV_REG_BODY(0);
5797 }
5798 else
5799 {
5800 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5801 }
5802#undef IEMOP_MOV_SW_EV_REG_BODY
5803 }
5804 else
5805 {
5806 /*
5807 * We're loading the register from memory. The access is word sized
5808 * regardless of operand size prefixes.
5809 */
5810#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5811 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5812 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5813 IEM_MC_ARG(uint16_t, u16Value, 1); \
5814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5817 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5818 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5819 IEM_MC_END()
5820
5821 if (iSegReg == X86_SREG_SS)
5822 {
5823 if (IEM_IS_32BIT_CODE(pVCpu))
5824 {
5825 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5826 }
5827 else
5828 {
5829 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5830 }
5831 }
5832 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5833 {
5834 IEMOP_MOV_SW_EV_MEM_BODY(0);
5835 }
5836 else
5837 {
5838 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5839 }
5840#undef IEMOP_MOV_SW_EV_MEM_BODY
5841 }
5842}
5843
5844
5845/** Opcode 0x8f /0. */
5846FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5847{
5848 /* This bugger is rather annoying as it requires rSP to be updated before
5849 doing the effective address calculations. Will eventually require a
5850 split between the R/M+SIB decoding and the effective address
5851 calculation - which is something that is required for any attempt at
5852 reusing this code for a recompiler. It may also be good to have if we
5853 need to delay #UD exception caused by invalid lock prefixes.
5854
5855 For now, we'll do a mostly safe interpreter-only implementation here. */
5856 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5857 * now until tests show it's checked.. */
5858 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5859
5860 /* Register access is relatively easy and can share code. */
5861 if (IEM_IS_MODRM_REG_MODE(bRm))
5862 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5863
5864 /*
5865 * Memory target.
5866 *
5867 * Intel says that RSP is incremented before it's used in any effective
5868 * address calcuations. This means some serious extra annoyance here since
5869 * we decode and calculate the effective address in one step and like to
5870 * delay committing registers till everything is done.
5871 *
5872 * So, we'll decode and calculate the effective address twice. This will
5873 * require some recoding if turned into a recompiler.
5874 */
5875 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5876
5877#if 1 /* This can be compiled, optimize later if needed. */
5878 switch (pVCpu->iem.s.enmEffOpSize)
5879 {
5880 case IEMMODE_16BIT:
5881 IEM_MC_BEGIN(2, 0, 0, 0);
5882 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5883 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5887 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5888 IEM_MC_END();
5889 break;
5890
5891 case IEMMODE_32BIT:
5892 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5893 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5894 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5897 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5898 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5899 IEM_MC_END();
5900 break;
5901
5902 case IEMMODE_64BIT:
5903 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
5904 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5905 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5908 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5909 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5910 IEM_MC_END();
5911 break;
5912
5913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5914 }
5915
5916#else
5917# ifndef TST_IEM_CHECK_MC
5918 /* Calc effective address with modified ESP. */
5919/** @todo testcase */
5920 RTGCPTR GCPtrEff;
5921 VBOXSTRICTRC rcStrict;
5922 switch (pVCpu->iem.s.enmEffOpSize)
5923 {
5924 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5925 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5926 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5928 }
5929 if (rcStrict != VINF_SUCCESS)
5930 return rcStrict;
5931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5932
5933 /* Perform the operation - this should be CImpl. */
5934 RTUINT64U TmpRsp;
5935 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5936 switch (pVCpu->iem.s.enmEffOpSize)
5937 {
5938 case IEMMODE_16BIT:
5939 {
5940 uint16_t u16Value;
5941 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5942 if (rcStrict == VINF_SUCCESS)
5943 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5944 break;
5945 }
5946
5947 case IEMMODE_32BIT:
5948 {
5949 uint32_t u32Value;
5950 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5951 if (rcStrict == VINF_SUCCESS)
5952 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5953 break;
5954 }
5955
5956 case IEMMODE_64BIT:
5957 {
5958 uint64_t u64Value;
5959 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5960 if (rcStrict == VINF_SUCCESS)
5961 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5962 break;
5963 }
5964
5965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5966 }
5967 if (rcStrict == VINF_SUCCESS)
5968 {
5969 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5970 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5971 }
5972 return rcStrict;
5973
5974# else
5975 return VERR_IEM_IPE_2;
5976# endif
5977#endif
5978}
5979
5980
5981/**
5982 * @opcode 0x8f
5983 */
5984FNIEMOP_DEF(iemOp_Grp1A__xop)
5985{
5986 /*
5987 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5988 * three byte VEX prefix, except that the mmmmm field cannot have the values
5989 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5990 */
5991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5992 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5993 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5994
5995 IEMOP_MNEMONIC(xop, "xop");
5996 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5997 {
5998 /** @todo Test when exctly the XOP conformance checks kick in during
5999 * instruction decoding and fetching (using \#PF). */
6000 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6001 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6002 if ( ( pVCpu->iem.s.fPrefixes
6003 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6004 == 0)
6005 {
6006 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6007 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6008 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6009 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6010 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6011 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6012 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6013 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6014 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6015
6016 /** @todo XOP: Just use new tables and decoders. */
6017 switch (bRm & 0x1f)
6018 {
6019 case 8: /* xop opcode map 8. */
6020 IEMOP_BITCH_ABOUT_STUB();
6021 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6022
6023 case 9: /* xop opcode map 9. */
6024 IEMOP_BITCH_ABOUT_STUB();
6025 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6026
6027 case 10: /* xop opcode map 10. */
6028 IEMOP_BITCH_ABOUT_STUB();
6029 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6030
6031 default:
6032 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6033 IEMOP_RAISE_INVALID_OPCODE_RET();
6034 }
6035 }
6036 else
6037 Log(("XOP: Invalid prefix mix!\n"));
6038 }
6039 else
6040 Log(("XOP: XOP support disabled!\n"));
6041 IEMOP_RAISE_INVALID_OPCODE_RET();
6042}
6043
6044
6045/**
6046 * Common 'xchg reg,rAX' helper.
6047 */
6048FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6049{
6050 iReg |= pVCpu->iem.s.uRexB;
6051 switch (pVCpu->iem.s.enmEffOpSize)
6052 {
6053 case IEMMODE_16BIT:
6054 IEM_MC_BEGIN(0, 2, 0, 0);
6055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6056 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6057 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6058 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6059 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6060 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6061 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6062 IEM_MC_ADVANCE_RIP_AND_FINISH();
6063 IEM_MC_END();
6064 break;
6065
6066 case IEMMODE_32BIT:
6067 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6070 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6071 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6072 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6073 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6074 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6075 IEM_MC_ADVANCE_RIP_AND_FINISH();
6076 IEM_MC_END();
6077 break;
6078
6079 case IEMMODE_64BIT:
6080 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6082 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6083 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6084 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6085 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6086 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6087 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6088 IEM_MC_ADVANCE_RIP_AND_FINISH();
6089 IEM_MC_END();
6090 break;
6091
6092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6093 }
6094}
6095
6096
6097/**
6098 * @opcode 0x90
6099 */
6100FNIEMOP_DEF(iemOp_nop)
6101{
6102 /* R8/R8D and RAX/EAX can be exchanged. */
6103 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6104 {
6105 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6106 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6107 }
6108
6109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6110 {
6111 IEMOP_MNEMONIC(pause, "pause");
6112 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6113 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6114 if (!IEM_IS_IN_GUEST(pVCpu))
6115 { /* probable */ }
6116#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6117 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6118 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6119#endif
6120#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6121 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6122 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6123#endif
6124 }
6125 else
6126 IEMOP_MNEMONIC(nop, "nop");
6127 /** @todo testcase: lock nop; lock pause */
6128 IEM_MC_BEGIN(0, 0, 0, 0);
6129 IEMOP_HLP_DONE_DECODING();
6130 IEM_MC_ADVANCE_RIP_AND_FINISH();
6131 IEM_MC_END();
6132}
6133
6134
6135/**
6136 * @opcode 0x91
6137 */
6138FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6139{
6140 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6141 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6142}
6143
6144
6145/**
6146 * @opcode 0x92
6147 */
6148FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6149{
6150 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6151 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6152}
6153
6154
6155/**
6156 * @opcode 0x93
6157 */
6158FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6159{
6160 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6161 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6162}
6163
6164
6165/**
6166 * @opcode 0x94
6167 */
6168FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6169{
6170 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6171 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6172}
6173
6174
6175/**
6176 * @opcode 0x95
6177 */
6178FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6179{
6180 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6181 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6182}
6183
6184
6185/**
6186 * @opcode 0x96
6187 */
6188FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6189{
6190 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6191 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6192}
6193
6194
6195/**
6196 * @opcode 0x97
6197 */
6198FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6199{
6200 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6201 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6202}
6203
6204
6205/**
6206 * @opcode 0x98
6207 */
6208FNIEMOP_DEF(iemOp_cbw)
6209{
6210 switch (pVCpu->iem.s.enmEffOpSize)
6211 {
6212 case IEMMODE_16BIT:
6213 IEMOP_MNEMONIC(cbw, "cbw");
6214 IEM_MC_BEGIN(0, 1, 0, 0);
6215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6216 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6217 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6218 } IEM_MC_ELSE() {
6219 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6220 } IEM_MC_ENDIF();
6221 IEM_MC_ADVANCE_RIP_AND_FINISH();
6222 IEM_MC_END();
6223 break;
6224
6225 case IEMMODE_32BIT:
6226 IEMOP_MNEMONIC(cwde, "cwde");
6227 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6229 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6230 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6231 } IEM_MC_ELSE() {
6232 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6233 } IEM_MC_ENDIF();
6234 IEM_MC_ADVANCE_RIP_AND_FINISH();
6235 IEM_MC_END();
6236 break;
6237
6238 case IEMMODE_64BIT:
6239 IEMOP_MNEMONIC(cdqe, "cdqe");
6240 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6242 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6243 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6244 } IEM_MC_ELSE() {
6245 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6246 } IEM_MC_ENDIF();
6247 IEM_MC_ADVANCE_RIP_AND_FINISH();
6248 IEM_MC_END();
6249 break;
6250
6251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6252 }
6253}
6254
6255
6256/**
6257 * @opcode 0x99
6258 */
6259FNIEMOP_DEF(iemOp_cwd)
6260{
6261 switch (pVCpu->iem.s.enmEffOpSize)
6262 {
6263 case IEMMODE_16BIT:
6264 IEMOP_MNEMONIC(cwd, "cwd");
6265 IEM_MC_BEGIN(0, 1, 0, 0);
6266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6267 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6268 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6269 } IEM_MC_ELSE() {
6270 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6271 } IEM_MC_ENDIF();
6272 IEM_MC_ADVANCE_RIP_AND_FINISH();
6273 IEM_MC_END();
6274 break;
6275
6276 case IEMMODE_32BIT:
6277 IEMOP_MNEMONIC(cdq, "cdq");
6278 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6280 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6281 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6282 } IEM_MC_ELSE() {
6283 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6284 } IEM_MC_ENDIF();
6285 IEM_MC_ADVANCE_RIP_AND_FINISH();
6286 IEM_MC_END();
6287 break;
6288
6289 case IEMMODE_64BIT:
6290 IEMOP_MNEMONIC(cqo, "cqo");
6291 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6293 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6294 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6295 } IEM_MC_ELSE() {
6296 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6297 } IEM_MC_ENDIF();
6298 IEM_MC_ADVANCE_RIP_AND_FINISH();
6299 IEM_MC_END();
6300 break;
6301
6302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6303 }
6304}
6305
6306
6307/**
6308 * @opcode 0x9a
6309 */
6310FNIEMOP_DEF(iemOp_call_Ap)
6311{
6312 IEMOP_MNEMONIC(call_Ap, "call Ap");
6313 IEMOP_HLP_NO_64BIT();
6314
6315 /* Decode the far pointer address and pass it on to the far call C implementation. */
6316 uint32_t off32Seg;
6317 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6318 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6319 else
6320 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6321 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6323 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6324 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6325 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6326}
6327
6328
6329/** Opcode 0x9b. (aka fwait) */
6330FNIEMOP_DEF(iemOp_wait)
6331{
6332 IEMOP_MNEMONIC(wait, "wait");
6333 IEM_MC_BEGIN(0, 0, 0, 0);
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6335 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6336 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6337 IEM_MC_ADVANCE_RIP_AND_FINISH();
6338 IEM_MC_END();
6339}
6340
6341
6342/**
6343 * @opcode 0x9c
6344 */
6345FNIEMOP_DEF(iemOp_pushf_Fv)
6346{
6347 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6349 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6350 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6351}
6352
6353
6354/**
6355 * @opcode 0x9d
6356 */
6357FNIEMOP_DEF(iemOp_popf_Fv)
6358{
6359 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6362 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6363 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6364}
6365
6366
6367/**
6368 * @opcode 0x9e
6369 */
6370FNIEMOP_DEF(iemOp_sahf)
6371{
6372 IEMOP_MNEMONIC(sahf, "sahf");
6373 if ( IEM_IS_64BIT_CODE(pVCpu)
6374 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6375 IEMOP_RAISE_INVALID_OPCODE_RET();
6376 IEM_MC_BEGIN(0, 2, 0, 0);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_LOCAL(uint32_t, u32Flags);
6379 IEM_MC_LOCAL(uint32_t, EFlags);
6380 IEM_MC_FETCH_EFLAGS(EFlags);
6381 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6382 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6383 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6384 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6385 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6386 IEM_MC_COMMIT_EFLAGS(EFlags);
6387 IEM_MC_ADVANCE_RIP_AND_FINISH();
6388 IEM_MC_END();
6389}
6390
6391
6392/**
6393 * @opcode 0x9f
6394 */
6395FNIEMOP_DEF(iemOp_lahf)
6396{
6397 IEMOP_MNEMONIC(lahf, "lahf");
6398 if ( IEM_IS_64BIT_CODE(pVCpu)
6399 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6400 IEMOP_RAISE_INVALID_OPCODE_RET();
6401 IEM_MC_BEGIN(0, 1, 0, 0);
6402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6403 IEM_MC_LOCAL(uint8_t, u8Flags);
6404 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6405 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6406 IEM_MC_ADVANCE_RIP_AND_FINISH();
6407 IEM_MC_END();
6408}
6409
6410
6411/**
6412 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6413 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6414 * Will return/throw on failures.
6415 * @param a_GCPtrMemOff The variable to store the offset in.
6416 */
6417#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6418 do \
6419 { \
6420 switch (pVCpu->iem.s.enmEffAddrMode) \
6421 { \
6422 case IEMMODE_16BIT: \
6423 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6424 break; \
6425 case IEMMODE_32BIT: \
6426 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6427 break; \
6428 case IEMMODE_64BIT: \
6429 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6430 break; \
6431 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6432 } \
6433 } while (0)
6434
6435/**
6436 * @opcode 0xa0
6437 */
6438FNIEMOP_DEF(iemOp_mov_AL_Ob)
6439{
6440 /*
6441 * Get the offset.
6442 */
6443 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6444 RTGCPTR GCPtrMemOff;
6445 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6446
6447 /*
6448 * Fetch AL.
6449 */
6450 IEM_MC_BEGIN(0, 1, 0, 0);
6451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6452 IEM_MC_LOCAL(uint8_t, u8Tmp);
6453 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6454 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6455 IEM_MC_ADVANCE_RIP_AND_FINISH();
6456 IEM_MC_END();
6457}
6458
6459
6460/**
6461 * @opcode 0xa1
6462 */
6463FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6464{
6465 /*
6466 * Get the offset.
6467 */
6468 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6469 RTGCPTR GCPtrMemOff;
6470 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6471
6472 /*
6473 * Fetch rAX.
6474 */
6475 switch (pVCpu->iem.s.enmEffOpSize)
6476 {
6477 case IEMMODE_16BIT:
6478 IEM_MC_BEGIN(0, 1, 0, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_LOCAL(uint16_t, u16Tmp);
6481 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6482 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6483 IEM_MC_ADVANCE_RIP_AND_FINISH();
6484 IEM_MC_END();
6485 break;
6486
6487 case IEMMODE_32BIT:
6488 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6490 IEM_MC_LOCAL(uint32_t, u32Tmp);
6491 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6492 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6493 IEM_MC_ADVANCE_RIP_AND_FINISH();
6494 IEM_MC_END();
6495 break;
6496
6497 case IEMMODE_64BIT:
6498 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6500 IEM_MC_LOCAL(uint64_t, u64Tmp);
6501 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6502 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6503 IEM_MC_ADVANCE_RIP_AND_FINISH();
6504 IEM_MC_END();
6505 break;
6506
6507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6508 }
6509}
6510
6511
6512/**
6513 * @opcode 0xa2
6514 */
6515FNIEMOP_DEF(iemOp_mov_Ob_AL)
6516{
6517 /*
6518 * Get the offset.
6519 */
6520 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6521 RTGCPTR GCPtrMemOff;
6522 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6523
6524 /*
6525 * Store AL.
6526 */
6527 IEM_MC_BEGIN(0, 1, 0, 0);
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6529 IEM_MC_LOCAL(uint8_t, u8Tmp);
6530 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6531 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6532 IEM_MC_ADVANCE_RIP_AND_FINISH();
6533 IEM_MC_END();
6534}
6535
6536
6537/**
6538 * @opcode 0xa3
6539 */
6540FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6541{
6542 /*
6543 * Get the offset.
6544 */
6545 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6546 RTGCPTR GCPtrMemOff;
6547 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6548
6549 /*
6550 * Store rAX.
6551 */
6552 switch (pVCpu->iem.s.enmEffOpSize)
6553 {
6554 case IEMMODE_16BIT:
6555 IEM_MC_BEGIN(0, 1, 0, 0);
6556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6557 IEM_MC_LOCAL(uint16_t, u16Tmp);
6558 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6559 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6560 IEM_MC_ADVANCE_RIP_AND_FINISH();
6561 IEM_MC_END();
6562 break;
6563
6564 case IEMMODE_32BIT:
6565 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6567 IEM_MC_LOCAL(uint32_t, u32Tmp);
6568 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6569 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6570 IEM_MC_ADVANCE_RIP_AND_FINISH();
6571 IEM_MC_END();
6572 break;
6573
6574 case IEMMODE_64BIT:
6575 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6577 IEM_MC_LOCAL(uint64_t, u64Tmp);
6578 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6579 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6580 IEM_MC_ADVANCE_RIP_AND_FINISH();
6581 IEM_MC_END();
6582 break;
6583
6584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6585 }
6586}
6587
6588/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6589#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6590 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6592 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6593 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6594 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6595 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6596 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6597 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6599 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6600 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6601 } IEM_MC_ELSE() { \
6602 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6603 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6604 } IEM_MC_ENDIF(); \
6605 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6606 IEM_MC_END() \
6607
6608/**
6609 * @opcode 0xa4
6610 */
6611FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6612{
6613 /*
6614 * Use the C implementation if a repeat prefix is encountered.
6615 */
6616 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6617 {
6618 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6620 switch (pVCpu->iem.s.enmEffAddrMode)
6621 {
6622 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6623 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6624 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6626 }
6627 }
6628
6629 /*
6630 * Sharing case implementation with movs[wdq] below.
6631 */
6632 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6633 switch (pVCpu->iem.s.enmEffAddrMode)
6634 {
6635 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6636 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6637 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6639 }
6640}
6641
6642
6643/**
6644 * @opcode 0xa5
6645 */
6646FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6647{
6648
6649 /*
6650 * Use the C implementation if a repeat prefix is encountered.
6651 */
6652 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6653 {
6654 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6656 switch (pVCpu->iem.s.enmEffOpSize)
6657 {
6658 case IEMMODE_16BIT:
6659 switch (pVCpu->iem.s.enmEffAddrMode)
6660 {
6661 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6662 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6663 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6665 }
6666 break;
6667 case IEMMODE_32BIT:
6668 switch (pVCpu->iem.s.enmEffAddrMode)
6669 {
6670 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6671 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6672 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6674 }
6675 case IEMMODE_64BIT:
6676 switch (pVCpu->iem.s.enmEffAddrMode)
6677 {
6678 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6679 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6680 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6682 }
6683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6684 }
6685 }
6686
6687 /*
6688 * Annoying double switch here.
6689 * Using ugly macro for implementing the cases, sharing it with movsb.
6690 */
6691 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6692 switch (pVCpu->iem.s.enmEffOpSize)
6693 {
6694 case IEMMODE_16BIT:
6695 switch (pVCpu->iem.s.enmEffAddrMode)
6696 {
6697 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6698 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6699 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6701 }
6702 break;
6703
6704 case IEMMODE_32BIT:
6705 switch (pVCpu->iem.s.enmEffAddrMode)
6706 {
6707 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6708 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6709 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6711 }
6712 break;
6713
6714 case IEMMODE_64BIT:
6715 switch (pVCpu->iem.s.enmEffAddrMode)
6716 {
6717 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6718 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6719 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6721 }
6722 break;
6723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6724 }
6725}
6726
6727#undef IEM_MOVS_CASE
6728
6729/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6730#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6731 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6733 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6734 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6735 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6736 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6737 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6738 \
6739 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6740 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6741 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6742 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6743 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6744 IEM_MC_REF_EFLAGS(pEFlags); \
6745 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6746 \
6747 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6748 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6749 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6750 } IEM_MC_ELSE() { \
6751 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6752 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6753 } IEM_MC_ENDIF(); \
6754 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6755 IEM_MC_END() \
6756
6757/**
6758 * @opcode 0xa6
6759 */
6760FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6761{
6762
6763 /*
6764 * Use the C implementation if a repeat prefix is encountered.
6765 */
6766 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6767 {
6768 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6770 switch (pVCpu->iem.s.enmEffAddrMode)
6771 {
6772 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6773 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6774 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6776 }
6777 }
6778 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6779 {
6780 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6782 switch (pVCpu->iem.s.enmEffAddrMode)
6783 {
6784 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6785 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6786 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6788 }
6789 }
6790
6791 /*
6792 * Sharing case implementation with cmps[wdq] below.
6793 */
6794 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6795 switch (pVCpu->iem.s.enmEffAddrMode)
6796 {
6797 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6798 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6799 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
6800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6801 }
6802}
6803
6804
6805/**
6806 * @opcode 0xa7
6807 */
6808FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6809{
6810 /*
6811 * Use the C implementation if a repeat prefix is encountered.
6812 */
6813 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6814 {
6815 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6817 switch (pVCpu->iem.s.enmEffOpSize)
6818 {
6819 case IEMMODE_16BIT:
6820 switch (pVCpu->iem.s.enmEffAddrMode)
6821 {
6822 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6823 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6824 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6826 }
6827 break;
6828 case IEMMODE_32BIT:
6829 switch (pVCpu->iem.s.enmEffAddrMode)
6830 {
6831 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6832 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6833 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6835 }
6836 case IEMMODE_64BIT:
6837 switch (pVCpu->iem.s.enmEffAddrMode)
6838 {
6839 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6840 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6841 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6843 }
6844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6845 }
6846 }
6847
6848 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6849 {
6850 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6852 switch (pVCpu->iem.s.enmEffOpSize)
6853 {
6854 case IEMMODE_16BIT:
6855 switch (pVCpu->iem.s.enmEffAddrMode)
6856 {
6857 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6858 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6859 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6861 }
6862 break;
6863 case IEMMODE_32BIT:
6864 switch (pVCpu->iem.s.enmEffAddrMode)
6865 {
6866 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6867 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6868 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6870 }
6871 case IEMMODE_64BIT:
6872 switch (pVCpu->iem.s.enmEffAddrMode)
6873 {
6874 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6875 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6876 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6878 }
6879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6880 }
6881 }
6882
6883 /*
6884 * Annoying double switch here.
6885 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6886 */
6887 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6888 switch (pVCpu->iem.s.enmEffOpSize)
6889 {
6890 case IEMMODE_16BIT:
6891 switch (pVCpu->iem.s.enmEffAddrMode)
6892 {
6893 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6894 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6895 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
6896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6897 }
6898 break;
6899
6900 case IEMMODE_32BIT:
6901 switch (pVCpu->iem.s.enmEffAddrMode)
6902 {
6903 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6904 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6905 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
6906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6907 }
6908 break;
6909
6910 case IEMMODE_64BIT:
6911 switch (pVCpu->iem.s.enmEffAddrMode)
6912 {
6913 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6914 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
6915 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
6916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6917 }
6918 break;
6919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6920 }
6921}
6922
6923#undef IEM_CMPS_CASE
6924
6925/**
6926 * @opcode 0xa8
6927 */
6928FNIEMOP_DEF(iemOp_test_AL_Ib)
6929{
6930 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6932 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6933}
6934
6935
6936/**
6937 * @opcode 0xa9
6938 */
6939FNIEMOP_DEF(iemOp_test_eAX_Iz)
6940{
6941 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6943 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6944}
6945
6946
6947/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6948#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
6949 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6951 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6952 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6953 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6954 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6955 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6957 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6958 } IEM_MC_ELSE() { \
6959 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6960 } IEM_MC_ENDIF(); \
6961 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6962 IEM_MC_END() \
6963
6964/**
6965 * @opcode 0xaa
6966 */
6967FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6968{
6969 /*
6970 * Use the C implementation if a repeat prefix is encountered.
6971 */
6972 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6973 {
6974 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6976 switch (pVCpu->iem.s.enmEffAddrMode)
6977 {
6978 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6979 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6980 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6982 }
6983 }
6984
6985 /*
6986 * Sharing case implementation with stos[wdq] below.
6987 */
6988 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6989 switch (pVCpu->iem.s.enmEffAddrMode)
6990 {
6991 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6992 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6993 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
6994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6995 }
6996}
6997
6998
6999/**
7000 * @opcode 0xab
7001 */
7002FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7003{
7004 /*
7005 * Use the C implementation if a repeat prefix is encountered.
7006 */
7007 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7008 {
7009 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7011 switch (pVCpu->iem.s.enmEffOpSize)
7012 {
7013 case IEMMODE_16BIT:
7014 switch (pVCpu->iem.s.enmEffAddrMode)
7015 {
7016 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
7017 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
7018 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
7019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7020 }
7021 break;
7022 case IEMMODE_32BIT:
7023 switch (pVCpu->iem.s.enmEffAddrMode)
7024 {
7025 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
7026 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
7027 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
7028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7029 }
7030 case IEMMODE_64BIT:
7031 switch (pVCpu->iem.s.enmEffAddrMode)
7032 {
7033 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7034 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
7035 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
7036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7037 }
7038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7039 }
7040 }
7041
7042 /*
7043 * Annoying double switch here.
7044 * Using ugly macro for implementing the cases, sharing it with stosb.
7045 */
7046 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7047 switch (pVCpu->iem.s.enmEffOpSize)
7048 {
7049 case IEMMODE_16BIT:
7050 switch (pVCpu->iem.s.enmEffAddrMode)
7051 {
7052 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7053 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7054 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7056 }
7057 break;
7058
7059 case IEMMODE_32BIT:
7060 switch (pVCpu->iem.s.enmEffAddrMode)
7061 {
7062 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7063 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7064 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7066 }
7067 break;
7068
7069 case IEMMODE_64BIT:
7070 switch (pVCpu->iem.s.enmEffAddrMode)
7071 {
7072 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7073 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7074 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7076 }
7077 break;
7078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7079 }
7080}
7081
7082#undef IEM_STOS_CASE
7083
7084/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7085#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7086 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7088 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7089 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7090 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7091 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7092 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7094 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7095 } IEM_MC_ELSE() { \
7096 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7097 } IEM_MC_ENDIF(); \
7098 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7099 IEM_MC_END() \
7100
7101/**
7102 * @opcode 0xac
7103 */
7104FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7105{
7106 /*
7107 * Use the C implementation if a repeat prefix is encountered.
7108 */
7109 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7110 {
7111 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7113 switch (pVCpu->iem.s.enmEffAddrMode)
7114 {
7115 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7116 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7117 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7119 }
7120 }
7121
7122 /*
7123 * Sharing case implementation with stos[wdq] below.
7124 */
7125 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7126 switch (pVCpu->iem.s.enmEffAddrMode)
7127 {
7128 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7129 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7130 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7132 }
7133}
7134
7135
7136/**
7137 * @opcode 0xad
7138 */
7139FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7140{
7141 /*
7142 * Use the C implementation if a repeat prefix is encountered.
7143 */
7144 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7145 {
7146 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7148 switch (pVCpu->iem.s.enmEffOpSize)
7149 {
7150 case IEMMODE_16BIT:
7151 switch (pVCpu->iem.s.enmEffAddrMode)
7152 {
7153 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7154 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7155 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7157 }
7158 break;
7159 case IEMMODE_32BIT:
7160 switch (pVCpu->iem.s.enmEffAddrMode)
7161 {
7162 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7163 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7164 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7166 }
7167 case IEMMODE_64BIT:
7168 switch (pVCpu->iem.s.enmEffAddrMode)
7169 {
7170 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7171 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7172 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7174 }
7175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7176 }
7177 }
7178
7179 /*
7180 * Annoying double switch here.
7181 * Using ugly macro for implementing the cases, sharing it with lodsb.
7182 */
7183 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7184 switch (pVCpu->iem.s.enmEffOpSize)
7185 {
7186 case IEMMODE_16BIT:
7187 switch (pVCpu->iem.s.enmEffAddrMode)
7188 {
7189 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7190 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7191 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7193 }
7194 break;
7195
7196 case IEMMODE_32BIT:
7197 switch (pVCpu->iem.s.enmEffAddrMode)
7198 {
7199 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7200 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7201 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7203 }
7204 break;
7205
7206 case IEMMODE_64BIT:
7207 switch (pVCpu->iem.s.enmEffAddrMode)
7208 {
7209 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7210 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7211 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7213 }
7214 break;
7215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7216 }
7217}
7218
7219#undef IEM_LODS_CASE
7220
7221/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7222#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7223 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7225 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7226 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7227 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7228 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7229 \
7230 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7231 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7232 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7233 IEM_MC_REF_EFLAGS(pEFlags); \
7234 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7235 \
7236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7237 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7238 } IEM_MC_ELSE() { \
7239 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7240 } IEM_MC_ENDIF(); \
7241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7242 IEM_MC_END();
7243
7244/**
7245 * @opcode 0xae
7246 */
7247FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7248{
7249 /*
7250 * Use the C implementation if a repeat prefix is encountered.
7251 */
7252 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7253 {
7254 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7256 switch (pVCpu->iem.s.enmEffAddrMode)
7257 {
7258 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7259 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7260 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7262 }
7263 }
7264 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7265 {
7266 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7268 switch (pVCpu->iem.s.enmEffAddrMode)
7269 {
7270 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7271 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7272 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7274 }
7275 }
7276
7277 /*
7278 * Sharing case implementation with stos[wdq] below.
7279 */
7280 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7281 switch (pVCpu->iem.s.enmEffAddrMode)
7282 {
7283 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7284 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7285 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7287 }
7288}
7289
7290
7291/**
7292 * @opcode 0xaf
7293 */
7294FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7295{
7296 /*
7297 * Use the C implementation if a repeat prefix is encountered.
7298 */
7299 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7300 {
7301 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7303 switch (pVCpu->iem.s.enmEffOpSize)
7304 {
7305 case IEMMODE_16BIT:
7306 switch (pVCpu->iem.s.enmEffAddrMode)
7307 {
7308 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7309 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7310 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7312 }
7313 break;
7314 case IEMMODE_32BIT:
7315 switch (pVCpu->iem.s.enmEffAddrMode)
7316 {
7317 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7318 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7319 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7321 }
7322 case IEMMODE_64BIT:
7323 switch (pVCpu->iem.s.enmEffAddrMode)
7324 {
7325 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7326 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7327 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7329 }
7330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7331 }
7332 }
7333 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7334 {
7335 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7337 switch (pVCpu->iem.s.enmEffOpSize)
7338 {
7339 case IEMMODE_16BIT:
7340 switch (pVCpu->iem.s.enmEffAddrMode)
7341 {
7342 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7343 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7344 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7346 }
7347 break;
7348 case IEMMODE_32BIT:
7349 switch (pVCpu->iem.s.enmEffAddrMode)
7350 {
7351 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7352 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7353 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7355 }
7356 case IEMMODE_64BIT:
7357 switch (pVCpu->iem.s.enmEffAddrMode)
7358 {
7359 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7360 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7361 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7363 }
7364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7365 }
7366 }
7367
7368 /*
7369 * Annoying double switch here.
7370 * Using ugly macro for implementing the cases, sharing it with scasb.
7371 */
7372 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7373 switch (pVCpu->iem.s.enmEffOpSize)
7374 {
7375 case IEMMODE_16BIT:
7376 switch (pVCpu->iem.s.enmEffAddrMode)
7377 {
7378 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7379 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7380 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7382 }
7383 break;
7384
7385 case IEMMODE_32BIT:
7386 switch (pVCpu->iem.s.enmEffAddrMode)
7387 {
7388 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7389 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7390 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7392 }
7393 break;
7394
7395 case IEMMODE_64BIT:
7396 switch (pVCpu->iem.s.enmEffAddrMode)
7397 {
7398 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7399 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7400 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7402 }
7403 break;
7404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7405 }
7406}
7407
7408#undef IEM_SCAS_CASE
7409
7410/**
7411 * Common 'mov r8, imm8' helper.
7412 */
7413FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7414{
7415 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7416 IEM_MC_BEGIN(0, 0, 0, 0);
7417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7418 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7419 IEM_MC_ADVANCE_RIP_AND_FINISH();
7420 IEM_MC_END();
7421}
7422
7423
7424/**
7425 * @opcode 0xb0
7426 */
7427FNIEMOP_DEF(iemOp_mov_AL_Ib)
7428{
7429 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7430 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7431}
7432
7433
7434/**
7435 * @opcode 0xb1
7436 */
7437FNIEMOP_DEF(iemOp_CL_Ib)
7438{
7439 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7440 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7441}
7442
7443
7444/**
7445 * @opcode 0xb2
7446 */
7447FNIEMOP_DEF(iemOp_DL_Ib)
7448{
7449 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7450 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7451}
7452
7453
7454/**
7455 * @opcode 0xb3
7456 */
7457FNIEMOP_DEF(iemOp_BL_Ib)
7458{
7459 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7460 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7461}
7462
7463
7464/**
7465 * @opcode 0xb4
7466 */
7467FNIEMOP_DEF(iemOp_mov_AH_Ib)
7468{
7469 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7470 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7471}
7472
7473
7474/**
7475 * @opcode 0xb5
7476 */
7477FNIEMOP_DEF(iemOp_CH_Ib)
7478{
7479 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7480 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7481}
7482
7483
7484/**
7485 * @opcode 0xb6
7486 */
7487FNIEMOP_DEF(iemOp_DH_Ib)
7488{
7489 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7490 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7491}
7492
7493
7494/**
7495 * @opcode 0xb7
7496 */
7497FNIEMOP_DEF(iemOp_BH_Ib)
7498{
7499 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7500 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7501}
7502
7503
7504/**
7505 * Common 'mov regX,immX' helper.
7506 */
7507FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7508{
7509 switch (pVCpu->iem.s.enmEffOpSize)
7510 {
7511 case IEMMODE_16BIT:
7512 IEM_MC_BEGIN(0, 0, 0, 0);
7513 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7515 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7516 IEM_MC_ADVANCE_RIP_AND_FINISH();
7517 IEM_MC_END();
7518 break;
7519
7520 case IEMMODE_32BIT:
7521 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7522 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7524 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
7525 IEM_MC_ADVANCE_RIP_AND_FINISH();
7526 IEM_MC_END();
7527 break;
7528
7529 case IEMMODE_64BIT:
7530 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7531 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7533 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
7534 IEM_MC_ADVANCE_RIP_AND_FINISH();
7535 IEM_MC_END();
7536 break;
7537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7538 }
7539}
7540
7541
7542/**
7543 * @opcode 0xb8
7544 */
7545FNIEMOP_DEF(iemOp_eAX_Iv)
7546{
7547 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7548 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7549}
7550
7551
7552/**
7553 * @opcode 0xb9
7554 */
7555FNIEMOP_DEF(iemOp_eCX_Iv)
7556{
7557 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7558 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7559}
7560
7561
7562/**
7563 * @opcode 0xba
7564 */
7565FNIEMOP_DEF(iemOp_eDX_Iv)
7566{
7567 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7568 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7569}
7570
7571
7572/**
7573 * @opcode 0xbb
7574 */
7575FNIEMOP_DEF(iemOp_eBX_Iv)
7576{
7577 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7578 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7579}
7580
7581
7582/**
7583 * @opcode 0xbc
7584 */
7585FNIEMOP_DEF(iemOp_eSP_Iv)
7586{
7587 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7588 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7589}
7590
7591
7592/**
7593 * @opcode 0xbd
7594 */
7595FNIEMOP_DEF(iemOp_eBP_Iv)
7596{
7597 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7598 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7599}
7600
7601
7602/**
7603 * @opcode 0xbe
7604 */
7605FNIEMOP_DEF(iemOp_eSI_Iv)
7606{
7607 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7608 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7609}
7610
7611
7612/**
7613 * @opcode 0xbf
7614 */
7615FNIEMOP_DEF(iemOp_eDI_Iv)
7616{
7617 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7618 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7619}
7620
7621
7622/**
7623 * @opcode 0xc0
7624 */
7625FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7626{
7627 IEMOP_HLP_MIN_186();
7628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7629 PCIEMOPSHIFTSIZES pImpl;
7630 switch (IEM_GET_MODRM_REG_8(bRm))
7631 {
7632 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7633 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7634 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7635 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7636 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7637 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7638 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7639 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7640 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7641 }
7642 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7643
7644 if (IEM_IS_MODRM_REG_MODE(bRm))
7645 {
7646 /* register */
7647 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7648 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7650 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7651 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7652 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7653 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7654 IEM_MC_REF_EFLAGS(pEFlags);
7655 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7656 IEM_MC_ADVANCE_RIP_AND_FINISH();
7657 IEM_MC_END();
7658 }
7659 else
7660 {
7661 /* memory */
7662 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
7663 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7664 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7665 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7667 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7668
7669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7670 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7671 IEM_MC_ASSIGN(cShiftArg, cShift);
7672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7673 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7674 IEM_MC_FETCH_EFLAGS(EFlags);
7675 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7676
7677 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7678 IEM_MC_COMMIT_EFLAGS(EFlags);
7679 IEM_MC_ADVANCE_RIP_AND_FINISH();
7680 IEM_MC_END();
7681 }
7682}
7683
7684
7685/**
7686 * @opcode 0xc1
7687 */
7688FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7689{
7690 IEMOP_HLP_MIN_186();
7691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7692 PCIEMOPSHIFTSIZES pImpl;
7693 switch (IEM_GET_MODRM_REG_8(bRm))
7694 {
7695 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7696 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7697 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7698 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7699 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7700 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7701 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7702 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7703 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7704 }
7705 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7706
7707 if (IEM_IS_MODRM_REG_MODE(bRm))
7708 {
7709 /* register */
7710 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7711 switch (pVCpu->iem.s.enmEffOpSize)
7712 {
7713 case IEMMODE_16BIT:
7714 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7717 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7718 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7719 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7720 IEM_MC_REF_EFLAGS(pEFlags);
7721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7722 IEM_MC_ADVANCE_RIP_AND_FINISH();
7723 IEM_MC_END();
7724 break;
7725
7726 case IEMMODE_32BIT:
7727 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
7728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7729 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7730 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7731 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7732 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7733 IEM_MC_REF_EFLAGS(pEFlags);
7734 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7735 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
7736 IEM_MC_ADVANCE_RIP_AND_FINISH();
7737 IEM_MC_END();
7738 break;
7739
7740 case IEMMODE_64BIT:
7741 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7743 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7744 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7745 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7746 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7747 IEM_MC_REF_EFLAGS(pEFlags);
7748 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7749 IEM_MC_ADVANCE_RIP_AND_FINISH();
7750 IEM_MC_END();
7751 break;
7752
7753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7754 }
7755 }
7756 else
7757 {
7758 /* memory */
7759 switch (pVCpu->iem.s.enmEffOpSize)
7760 {
7761 case IEMMODE_16BIT:
7762 IEM_MC_BEGIN(3, 3, 0, 0);
7763 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7764 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7765 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7767 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7768
7769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7770 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7771 IEM_MC_ASSIGN(cShiftArg, cShift);
7772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7773 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7774 IEM_MC_FETCH_EFLAGS(EFlags);
7775 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7776
7777 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
7778 IEM_MC_COMMIT_EFLAGS(EFlags);
7779 IEM_MC_ADVANCE_RIP_AND_FINISH();
7780 IEM_MC_END();
7781 break;
7782
7783 case IEMMODE_32BIT:
7784 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
7785 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7786 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7787 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7789 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7790
7791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7792 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7793 IEM_MC_ASSIGN(cShiftArg, cShift);
7794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7795 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7796 IEM_MC_FETCH_EFLAGS(EFlags);
7797 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7798
7799 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
7800 IEM_MC_COMMIT_EFLAGS(EFlags);
7801 IEM_MC_ADVANCE_RIP_AND_FINISH();
7802 IEM_MC_END();
7803 break;
7804
7805 case IEMMODE_64BIT:
7806 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
7807 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7808 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7809 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7811 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7812
7813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7814 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7815 IEM_MC_ASSIGN(cShiftArg, cShift);
7816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7817 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7818 IEM_MC_FETCH_EFLAGS(EFlags);
7819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7820
7821 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
7822 IEM_MC_COMMIT_EFLAGS(EFlags);
7823 IEM_MC_ADVANCE_RIP_AND_FINISH();
7824 IEM_MC_END();
7825 break;
7826
7827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7828 }
7829 }
7830}
7831
7832
7833/**
7834 * @opcode 0xc2
7835 */
7836FNIEMOP_DEF(iemOp_retn_Iw)
7837{
7838 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7839 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7840 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7842 switch (pVCpu->iem.s.enmEffOpSize)
7843 {
7844 case IEMMODE_16BIT:
7845 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7846 case IEMMODE_32BIT:
7847 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7848 case IEMMODE_64BIT:
7849 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7851 }
7852}
7853
7854
7855/**
7856 * @opcode 0xc3
7857 */
7858FNIEMOP_DEF(iemOp_retn)
7859{
7860 IEMOP_MNEMONIC(retn, "retn");
7861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7863 switch (pVCpu->iem.s.enmEffOpSize)
7864 {
7865 case IEMMODE_16BIT:
7866 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7867 case IEMMODE_32BIT:
7868 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7869 case IEMMODE_64BIT:
7870 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7872 }
7873}
7874
7875
7876/**
7877 * @opcode 0xc4
7878 */
7879FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7880{
7881 /* The LDS instruction is invalid 64-bit mode. In legacy and
7882 compatability mode it is invalid with MOD=3.
7883 The use as a VEX prefix is made possible by assigning the inverted
7884 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7885 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7887 if ( IEM_IS_64BIT_CODE(pVCpu)
7888 || IEM_IS_MODRM_REG_MODE(bRm) )
7889 {
7890 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7891 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7892 {
7893 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7894 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7895 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7896 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7897 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7898 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7899 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7900 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7901 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7902 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7903 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7904 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7905 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7906
7907 switch (bRm & 0x1f)
7908 {
7909 case 1: /* 0x0f lead opcode byte. */
7910#ifdef IEM_WITH_VEX
7911 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7912#else
7913 IEMOP_BITCH_ABOUT_STUB();
7914 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7915#endif
7916
7917 case 2: /* 0x0f 0x38 lead opcode bytes. */
7918#ifdef IEM_WITH_VEX
7919 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7920#else
7921 IEMOP_BITCH_ABOUT_STUB();
7922 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7923#endif
7924
7925 case 3: /* 0x0f 0x3a lead opcode bytes. */
7926#ifdef IEM_WITH_VEX
7927 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7928#else
7929 IEMOP_BITCH_ABOUT_STUB();
7930 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7931#endif
7932
7933 default:
7934 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7935 IEMOP_RAISE_INVALID_OPCODE_RET();
7936 }
7937 }
7938 Log(("VEX3: VEX support disabled!\n"));
7939 IEMOP_RAISE_INVALID_OPCODE_RET();
7940 }
7941
7942 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7943 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7944}
7945
7946
7947/**
7948 * @opcode 0xc5
7949 */
7950FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7951{
7952 /* The LES instruction is invalid 64-bit mode. In legacy and
7953 compatability mode it is invalid with MOD=3.
7954 The use as a VEX prefix is made possible by assigning the inverted
7955 REX.R to the top MOD bit, and the top bit in the inverted register
7956 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7957 to accessing registers 0..7 in this VEX form. */
7958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7959 if ( IEM_IS_64BIT_CODE(pVCpu)
7960 || IEM_IS_MODRM_REG_MODE(bRm))
7961 {
7962 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7963 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7964 {
7965 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7966 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7967 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7968 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7969 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7970 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7971 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7972 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7973
7974#ifdef IEM_WITH_VEX
7975 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7976#else
7977 IEMOP_BITCH_ABOUT_STUB();
7978 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7979#endif
7980 }
7981
7982 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7983 Log(("VEX2: VEX support disabled!\n"));
7984 IEMOP_RAISE_INVALID_OPCODE_RET();
7985 }
7986
7987 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7988 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7989}
7990
7991
7992/**
7993 * @opcode 0xc6
7994 */
7995FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7996{
7997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7998 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7999 IEMOP_RAISE_INVALID_OPCODE_RET();
8000 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8001
8002 if (IEM_IS_MODRM_REG_MODE(bRm))
8003 {
8004 /* register access */
8005 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8006 IEM_MC_BEGIN(0, 0, 0, 0);
8007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8008 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8009 IEM_MC_ADVANCE_RIP_AND_FINISH();
8010 IEM_MC_END();
8011 }
8012 else
8013 {
8014 /* memory access. */
8015 IEM_MC_BEGIN(0, 1, 0, 0);
8016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8018 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8020 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8021 IEM_MC_ADVANCE_RIP_AND_FINISH();
8022 IEM_MC_END();
8023 }
8024}
8025
8026
8027/**
8028 * @opcode 0xc7
8029 */
8030FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8031{
8032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8033 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8034 IEMOP_RAISE_INVALID_OPCODE_RET();
8035 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8036
8037 if (IEM_IS_MODRM_REG_MODE(bRm))
8038 {
8039 /* register access */
8040 switch (pVCpu->iem.s.enmEffOpSize)
8041 {
8042 case IEMMODE_16BIT:
8043 IEM_MC_BEGIN(0, 0, 0, 0);
8044 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8046 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8047 IEM_MC_ADVANCE_RIP_AND_FINISH();
8048 IEM_MC_END();
8049 break;
8050
8051 case IEMMODE_32BIT:
8052 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8053 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8055 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8056 IEM_MC_ADVANCE_RIP_AND_FINISH();
8057 IEM_MC_END();
8058 break;
8059
8060 case IEMMODE_64BIT:
8061 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8062 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8065 IEM_MC_ADVANCE_RIP_AND_FINISH();
8066 IEM_MC_END();
8067 break;
8068
8069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8070 }
8071 }
8072 else
8073 {
8074 /* memory access. */
8075 switch (pVCpu->iem.s.enmEffOpSize)
8076 {
8077 case IEMMODE_16BIT:
8078 IEM_MC_BEGIN(0, 1, 0, 0);
8079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8081 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8083 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8084 IEM_MC_ADVANCE_RIP_AND_FINISH();
8085 IEM_MC_END();
8086 break;
8087
8088 case IEMMODE_32BIT:
8089 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8092 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8094 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8095 IEM_MC_ADVANCE_RIP_AND_FINISH();
8096 IEM_MC_END();
8097 break;
8098
8099 case IEMMODE_64BIT:
8100 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8103 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8105 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8106 IEM_MC_ADVANCE_RIP_AND_FINISH();
8107 IEM_MC_END();
8108 break;
8109
8110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8111 }
8112 }
8113}
8114
8115
8116
8117
8118/**
8119 * @opcode 0xc8
8120 */
8121FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8122{
8123 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8124 IEMOP_HLP_MIN_186();
8125 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8126 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8127 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8129 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8130}
8131
8132
8133/**
8134 * @opcode 0xc9
8135 */
8136FNIEMOP_DEF(iemOp_leave)
8137{
8138 IEMOP_MNEMONIC(leave, "leave");
8139 IEMOP_HLP_MIN_186();
8140 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8142 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8143}
8144
8145
8146/**
8147 * @opcode 0xca
8148 */
8149FNIEMOP_DEF(iemOp_retf_Iw)
8150{
8151 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8152 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8154 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8155 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8156}
8157
8158
8159/**
8160 * @opcode 0xcb
8161 */
8162FNIEMOP_DEF(iemOp_retf)
8163{
8164 IEMOP_MNEMONIC(retf, "retf");
8165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8166 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8167 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8168}
8169
8170
8171/**
8172 * @opcode 0xcc
8173 */
8174FNIEMOP_DEF(iemOp_int3)
8175{
8176 IEMOP_MNEMONIC(int3, "int3");
8177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8178 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8179 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8180 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8181}
8182
8183
8184/**
8185 * @opcode 0xcd
8186 */
8187FNIEMOP_DEF(iemOp_int_Ib)
8188{
8189 IEMOP_MNEMONIC(int_Ib, "int Ib");
8190 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8193 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8194 iemCImpl_int, u8Int, IEMINT_INTN);
8195}
8196
8197
8198/**
8199 * @opcode 0xce
8200 */
8201FNIEMOP_DEF(iemOp_into)
8202{
8203 IEMOP_MNEMONIC(into, "into");
8204 IEMOP_HLP_NO_64BIT();
8205 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8206 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8207 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8208}
8209
8210
8211/**
8212 * @opcode 0xcf
8213 */
8214FNIEMOP_DEF(iemOp_iret)
8215{
8216 IEMOP_MNEMONIC(iret, "iret");
8217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8218 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8219 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8220 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8221}
8222
8223
8224/**
8225 * @opcode 0xd0
8226 */
8227FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8228{
8229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8230 PCIEMOPSHIFTSIZES pImpl;
8231 switch (IEM_GET_MODRM_REG_8(bRm))
8232 {
8233 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8234 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8235 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8236 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8237 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8238 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8239 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8240 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8241 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8242 }
8243 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8244
8245 if (IEM_IS_MODRM_REG_MODE(bRm))
8246 {
8247 /* register */
8248 IEM_MC_BEGIN(3, 0, 0, 0);
8249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8250 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8251 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8253 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8254 IEM_MC_REF_EFLAGS(pEFlags);
8255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8256 IEM_MC_ADVANCE_RIP_AND_FINISH();
8257 IEM_MC_END();
8258 }
8259 else
8260 {
8261 /* memory */
8262 IEM_MC_BEGIN(3, 3, 0, 0);
8263 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8264 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8265 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8267 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8268
8269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8271 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8272 IEM_MC_FETCH_EFLAGS(EFlags);
8273 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8274
8275 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8276 IEM_MC_COMMIT_EFLAGS(EFlags);
8277 IEM_MC_ADVANCE_RIP_AND_FINISH();
8278 IEM_MC_END();
8279 }
8280}
8281
8282
8283
8284/**
8285 * @opcode 0xd1
8286 */
8287FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8288{
8289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8290 PCIEMOPSHIFTSIZES pImpl;
8291 switch (IEM_GET_MODRM_REG_8(bRm))
8292 {
8293 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8294 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8295 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8296 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8297 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8298 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8299 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8300 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8301 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8302 }
8303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8304
8305 if (IEM_IS_MODRM_REG_MODE(bRm))
8306 {
8307 /* register */
8308 switch (pVCpu->iem.s.enmEffOpSize)
8309 {
8310 case IEMMODE_16BIT:
8311 IEM_MC_BEGIN(3, 0, 0, 0);
8312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8313 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8314 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8315 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8316 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8317 IEM_MC_REF_EFLAGS(pEFlags);
8318 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8319 IEM_MC_ADVANCE_RIP_AND_FINISH();
8320 IEM_MC_END();
8321 break;
8322
8323 case IEMMODE_32BIT:
8324 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8326 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8327 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8328 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8329 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8330 IEM_MC_REF_EFLAGS(pEFlags);
8331 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8332 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8333 IEM_MC_ADVANCE_RIP_AND_FINISH();
8334 IEM_MC_END();
8335 break;
8336
8337 case IEMMODE_64BIT:
8338 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8340 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8341 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8343 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8344 IEM_MC_REF_EFLAGS(pEFlags);
8345 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8346 IEM_MC_ADVANCE_RIP_AND_FINISH();
8347 IEM_MC_END();
8348 break;
8349
8350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8351 }
8352 }
8353 else
8354 {
8355 /* memory */
8356 switch (pVCpu->iem.s.enmEffOpSize)
8357 {
8358 case IEMMODE_16BIT:
8359 IEM_MC_BEGIN(3, 3, 0, 0);
8360 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8361 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8362 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8364 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8365
8366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8368 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8369 IEM_MC_FETCH_EFLAGS(EFlags);
8370 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8371
8372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8373 IEM_MC_COMMIT_EFLAGS(EFlags);
8374 IEM_MC_ADVANCE_RIP_AND_FINISH();
8375 IEM_MC_END();
8376 break;
8377
8378 case IEMMODE_32BIT:
8379 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8380 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8381 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8384 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8385
8386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8388 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8389 IEM_MC_FETCH_EFLAGS(EFlags);
8390 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8391
8392 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8393 IEM_MC_COMMIT_EFLAGS(EFlags);
8394 IEM_MC_ADVANCE_RIP_AND_FINISH();
8395 IEM_MC_END();
8396 break;
8397
8398 case IEMMODE_64BIT:
8399 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8400 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8401 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8402 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8404 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8405
8406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8408 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8409 IEM_MC_FETCH_EFLAGS(EFlags);
8410 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8411
8412 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8413 IEM_MC_COMMIT_EFLAGS(EFlags);
8414 IEM_MC_ADVANCE_RIP_AND_FINISH();
8415 IEM_MC_END();
8416 break;
8417
8418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8419 }
8420 }
8421}
8422
8423
8424/**
8425 * @opcode 0xd2
8426 */
8427FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8428{
8429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8430 PCIEMOPSHIFTSIZES pImpl;
8431 switch (IEM_GET_MODRM_REG_8(bRm))
8432 {
8433 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8434 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8435 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8436 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8437 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8438 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8439 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8440 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8441 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8442 }
8443 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8444
8445 if (IEM_IS_MODRM_REG_MODE(bRm))
8446 {
8447 /* register */
8448 IEM_MC_BEGIN(3, 0, 0, 0);
8449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8450 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8451 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8452 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8453 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8454 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8455 IEM_MC_REF_EFLAGS(pEFlags);
8456 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8457 IEM_MC_ADVANCE_RIP_AND_FINISH();
8458 IEM_MC_END();
8459 }
8460 else
8461 {
8462 /* memory */
8463 IEM_MC_BEGIN(3, 3, 0, 0);
8464 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8465 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8466 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8468 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8469
8470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8472 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8473 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8474 IEM_MC_FETCH_EFLAGS(EFlags);
8475 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8476
8477 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8478 IEM_MC_COMMIT_EFLAGS(EFlags);
8479 IEM_MC_ADVANCE_RIP_AND_FINISH();
8480 IEM_MC_END();
8481 }
8482}
8483
8484
8485/**
8486 * @opcode 0xd3
8487 */
8488FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8489{
8490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8491 PCIEMOPSHIFTSIZES pImpl;
8492 switch (IEM_GET_MODRM_REG_8(bRm))
8493 {
8494 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8495 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8496 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8497 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8498 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8499 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8500 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8501 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8502 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8503 }
8504 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8505
8506 if (IEM_IS_MODRM_REG_MODE(bRm))
8507 {
8508 /* register */
8509 switch (pVCpu->iem.s.enmEffOpSize)
8510 {
8511 case IEMMODE_16BIT:
8512 IEM_MC_BEGIN(3, 0, 0, 0);
8513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8514 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8515 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8516 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8517 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8518 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8519 IEM_MC_REF_EFLAGS(pEFlags);
8520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8521 IEM_MC_ADVANCE_RIP_AND_FINISH();
8522 IEM_MC_END();
8523 break;
8524
8525 case IEMMODE_32BIT:
8526 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8528 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8529 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8530 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8531 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8532 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8533 IEM_MC_REF_EFLAGS(pEFlags);
8534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8535 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8536 IEM_MC_ADVANCE_RIP_AND_FINISH();
8537 IEM_MC_END();
8538 break;
8539
8540 case IEMMODE_64BIT:
8541 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8543 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8544 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8545 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8546 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8547 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8548 IEM_MC_REF_EFLAGS(pEFlags);
8549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8550 IEM_MC_ADVANCE_RIP_AND_FINISH();
8551 IEM_MC_END();
8552 break;
8553
8554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8555 }
8556 }
8557 else
8558 {
8559 /* memory */
8560 switch (pVCpu->iem.s.enmEffOpSize)
8561 {
8562 case IEMMODE_16BIT:
8563 IEM_MC_BEGIN(3, 3, 0, 0);
8564 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8565 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8566 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8568 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8569
8570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8572 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8573 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8574 IEM_MC_FETCH_EFLAGS(EFlags);
8575 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8576
8577 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8578 IEM_MC_COMMIT_EFLAGS(EFlags);
8579 IEM_MC_ADVANCE_RIP_AND_FINISH();
8580 IEM_MC_END();
8581 break;
8582
8583 case IEMMODE_32BIT:
8584 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8585 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8586 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8587 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8589 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8590
8591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8593 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8594 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8595 IEM_MC_FETCH_EFLAGS(EFlags);
8596 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8597
8598 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8599 IEM_MC_COMMIT_EFLAGS(EFlags);
8600 IEM_MC_ADVANCE_RIP_AND_FINISH();
8601 IEM_MC_END();
8602 break;
8603
8604 case IEMMODE_64BIT:
8605 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8606 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8607 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8608 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8610 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8611
8612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8614 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8615 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8616 IEM_MC_FETCH_EFLAGS(EFlags);
8617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8618
8619 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8620 IEM_MC_COMMIT_EFLAGS(EFlags);
8621 IEM_MC_ADVANCE_RIP_AND_FINISH();
8622 IEM_MC_END();
8623 break;
8624
8625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8626 }
8627 }
8628}
8629
8630/**
8631 * @opcode 0xd4
8632 */
8633FNIEMOP_DEF(iemOp_aam_Ib)
8634{
8635 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8636 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8638 IEMOP_HLP_NO_64BIT();
8639 if (!bImm)
8640 IEMOP_RAISE_DIVIDE_ERROR_RET();
8641 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8642}
8643
8644
8645/**
8646 * @opcode 0xd5
8647 */
8648FNIEMOP_DEF(iemOp_aad_Ib)
8649{
8650 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8651 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8653 IEMOP_HLP_NO_64BIT();
8654 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8655}
8656
8657
8658/**
8659 * @opcode 0xd6
8660 */
8661FNIEMOP_DEF(iemOp_salc)
8662{
8663 IEMOP_MNEMONIC(salc, "salc");
8664 IEMOP_HLP_NO_64BIT();
8665
8666 IEM_MC_BEGIN(0, 0, 0, 0);
8667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8668 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8669 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8670 } IEM_MC_ELSE() {
8671 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8672 } IEM_MC_ENDIF();
8673 IEM_MC_ADVANCE_RIP_AND_FINISH();
8674 IEM_MC_END();
8675}
8676
8677
8678/**
8679 * @opcode 0xd7
8680 */
8681FNIEMOP_DEF(iemOp_xlat)
8682{
8683 IEMOP_MNEMONIC(xlat, "xlat");
8684 switch (pVCpu->iem.s.enmEffAddrMode)
8685 {
8686 case IEMMODE_16BIT:
8687 IEM_MC_BEGIN(2, 0, 0, 0);
8688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8689 IEM_MC_LOCAL(uint8_t, u8Tmp);
8690 IEM_MC_LOCAL(uint16_t, u16Addr);
8691 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8692 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8693 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8694 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8695 IEM_MC_ADVANCE_RIP_AND_FINISH();
8696 IEM_MC_END();
8697 break;
8698
8699 case IEMMODE_32BIT:
8700 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
8701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8702 IEM_MC_LOCAL(uint8_t, u8Tmp);
8703 IEM_MC_LOCAL(uint32_t, u32Addr);
8704 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8705 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8706 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8707 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8708 IEM_MC_ADVANCE_RIP_AND_FINISH();
8709 IEM_MC_END();
8710 break;
8711
8712 case IEMMODE_64BIT:
8713 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
8714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8715 IEM_MC_LOCAL(uint8_t, u8Tmp);
8716 IEM_MC_LOCAL(uint64_t, u64Addr);
8717 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8718 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8719 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8720 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8721 IEM_MC_ADVANCE_RIP_AND_FINISH();
8722 IEM_MC_END();
8723 break;
8724
8725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8726 }
8727}
8728
8729
8730/**
8731 * Common worker for FPU instructions working on ST0 and STn, and storing the
8732 * result in ST0.
8733 *
8734 * @param bRm Mod R/M byte.
8735 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8736 */
8737FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8738{
8739 IEM_MC_BEGIN(3, 1, 0, 0);
8740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8741 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8742 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8743 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8744 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8745
8746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8748 IEM_MC_PREPARE_FPU_USAGE();
8749 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8750 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8751 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8752 } IEM_MC_ELSE() {
8753 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8754 } IEM_MC_ENDIF();
8755 IEM_MC_ADVANCE_RIP_AND_FINISH();
8756
8757 IEM_MC_END();
8758}
8759
8760
8761/**
8762 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8763 * flags.
8764 *
8765 * @param bRm Mod R/M byte.
8766 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8767 */
8768FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8769{
8770 IEM_MC_BEGIN(3, 1, 0, 0);
8771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8772 IEM_MC_LOCAL(uint16_t, u16Fsw);
8773 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8774 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8775 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8776
8777 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8778 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8779 IEM_MC_PREPARE_FPU_USAGE();
8780 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8781 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8782 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8783 } IEM_MC_ELSE() {
8784 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8785 } IEM_MC_ENDIF();
8786 IEM_MC_ADVANCE_RIP_AND_FINISH();
8787
8788 IEM_MC_END();
8789}
8790
8791
8792/**
8793 * Common worker for FPU instructions working on ST0 and STn, only affecting
8794 * flags, and popping when done.
8795 *
8796 * @param bRm Mod R/M byte.
8797 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8798 */
8799FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8800{
8801 IEM_MC_BEGIN(3, 1, 0, 0);
8802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8803 IEM_MC_LOCAL(uint16_t, u16Fsw);
8804 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8805 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8806 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8807
8808 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8809 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8810 IEM_MC_PREPARE_FPU_USAGE();
8811 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8812 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8813 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8814 } IEM_MC_ELSE() {
8815 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8816 } IEM_MC_ENDIF();
8817 IEM_MC_ADVANCE_RIP_AND_FINISH();
8818
8819 IEM_MC_END();
8820}
8821
8822
8823/** Opcode 0xd8 11/0. */
8824FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8825{
8826 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8827 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8828}
8829
8830
8831/** Opcode 0xd8 11/1. */
8832FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8833{
8834 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8835 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8836}
8837
8838
8839/** Opcode 0xd8 11/2. */
8840FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8841{
8842 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8843 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8844}
8845
8846
8847/** Opcode 0xd8 11/3. */
8848FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8849{
8850 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8851 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8852}
8853
8854
8855/** Opcode 0xd8 11/4. */
8856FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8857{
8858 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8859 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8860}
8861
8862
8863/** Opcode 0xd8 11/5. */
8864FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8865{
8866 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8867 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8868}
8869
8870
8871/** Opcode 0xd8 11/6. */
8872FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8873{
8874 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8875 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8876}
8877
8878
8879/** Opcode 0xd8 11/7. */
8880FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8881{
8882 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8883 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8884}
8885
8886
8887/**
8888 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8889 * the result in ST0.
8890 *
8891 * @param bRm Mod R/M byte.
8892 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8893 */
8894FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8895{
8896 IEM_MC_BEGIN(3, 3, 0, 0);
8897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8898 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8899 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8900 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8901 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8902 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8903
8904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8906
8907 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8908 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8909 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8910
8911 IEM_MC_PREPARE_FPU_USAGE();
8912 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8913 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8914 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8915 } IEM_MC_ELSE() {
8916 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8917 } IEM_MC_ENDIF();
8918 IEM_MC_ADVANCE_RIP_AND_FINISH();
8919
8920 IEM_MC_END();
8921}
8922
8923
8924/** Opcode 0xd8 !11/0. */
8925FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8926{
8927 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8928 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8929}
8930
8931
8932/** Opcode 0xd8 !11/1. */
8933FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8934{
8935 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8936 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8937}
8938
8939
8940/** Opcode 0xd8 !11/2. */
8941FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8942{
8943 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8944
8945 IEM_MC_BEGIN(3, 3, 0, 0);
8946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8947 IEM_MC_LOCAL(uint16_t, u16Fsw);
8948 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8949 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8950 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8951 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8952
8953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8955
8956 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8957 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8958 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8959
8960 IEM_MC_PREPARE_FPU_USAGE();
8961 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8962 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8963 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8964 } IEM_MC_ELSE() {
8965 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8966 } IEM_MC_ENDIF();
8967 IEM_MC_ADVANCE_RIP_AND_FINISH();
8968
8969 IEM_MC_END();
8970}
8971
8972
8973/** Opcode 0xd8 !11/3. */
8974FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8975{
8976 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8977
8978 IEM_MC_BEGIN(3, 3, 0, 0);
8979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8980 IEM_MC_LOCAL(uint16_t, u16Fsw);
8981 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8982 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8983 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8984 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8985
8986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8988
8989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8990 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8991 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8992
8993 IEM_MC_PREPARE_FPU_USAGE();
8994 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8995 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8996 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8997 } IEM_MC_ELSE() {
8998 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8999 } IEM_MC_ENDIF();
9000 IEM_MC_ADVANCE_RIP_AND_FINISH();
9001
9002 IEM_MC_END();
9003}
9004
9005
9006/** Opcode 0xd8 !11/4. */
9007FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9008{
9009 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9010 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9011}
9012
9013
9014/** Opcode 0xd8 !11/5. */
9015FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9016{
9017 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9018 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9019}
9020
9021
9022/** Opcode 0xd8 !11/6. */
9023FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9024{
9025 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9026 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9027}
9028
9029
9030/** Opcode 0xd8 !11/7. */
9031FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9032{
9033 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9035}
9036
9037
9038/**
9039 * @opcode 0xd8
9040 */
9041FNIEMOP_DEF(iemOp_EscF0)
9042{
9043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9044 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9045
9046 if (IEM_IS_MODRM_REG_MODE(bRm))
9047 {
9048 switch (IEM_GET_MODRM_REG_8(bRm))
9049 {
9050 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9051 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9052 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9053 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9054 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9055 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9056 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9057 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9058 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9059 }
9060 }
9061 else
9062 {
9063 switch (IEM_GET_MODRM_REG_8(bRm))
9064 {
9065 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9066 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9067 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9068 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9069 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9070 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9071 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9072 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9074 }
9075 }
9076}
9077
9078
9079/** Opcode 0xd9 /0 mem32real
9080 * @sa iemOp_fld_m64r */
9081FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9082{
9083 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9084
9085 IEM_MC_BEGIN(2, 3, 0, 0);
9086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9087 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9088 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9089 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9090 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9091
9092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9094
9095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9097 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9098 IEM_MC_PREPARE_FPU_USAGE();
9099 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9100 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9101 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9102 } IEM_MC_ELSE() {
9103 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9104 } IEM_MC_ENDIF();
9105 IEM_MC_ADVANCE_RIP_AND_FINISH();
9106
9107 IEM_MC_END();
9108}
9109
9110
9111/** Opcode 0xd9 !11/2 mem32real */
9112FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9113{
9114 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9115 IEM_MC_BEGIN(3, 2, 0, 0);
9116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9117 IEM_MC_LOCAL(uint16_t, u16Fsw);
9118 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9119 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9120 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9121
9122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9126
9127 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9128 IEM_MC_PREPARE_FPU_USAGE();
9129 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9130 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9131 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9132 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9133 } IEM_MC_ELSE() {
9134 IEM_MC_IF_FCW_IM() {
9135 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9136 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9137 } IEM_MC_ENDIF();
9138 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9139 } IEM_MC_ENDIF();
9140 IEM_MC_ADVANCE_RIP_AND_FINISH();
9141
9142 IEM_MC_END();
9143}
9144
9145
9146/** Opcode 0xd9 !11/3 */
9147FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9148{
9149 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9150 IEM_MC_BEGIN(3, 2, 0, 0);
9151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9152 IEM_MC_LOCAL(uint16_t, u16Fsw);
9153 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9154 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9155 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9156
9157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9159 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9160 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9161
9162 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9163 IEM_MC_PREPARE_FPU_USAGE();
9164 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9165 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9166 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9167 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9168 } IEM_MC_ELSE() {
9169 IEM_MC_IF_FCW_IM() {
9170 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9171 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9172 } IEM_MC_ENDIF();
9173 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9174 } IEM_MC_ENDIF();
9175 IEM_MC_ADVANCE_RIP_AND_FINISH();
9176
9177 IEM_MC_END();
9178}
9179
9180
9181/** Opcode 0xd9 !11/4 */
9182FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9183{
9184 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9185 IEM_MC_BEGIN(3, 0, 0, 0);
9186 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9187 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9188 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9191 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9192 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9193 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9194 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9195 IEM_MC_END();
9196}
9197
9198
9199/** Opcode 0xd9 !11/5 */
9200FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9201{
9202 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9203 IEM_MC_BEGIN(1, 1, 0, 0);
9204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9205 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9208 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9209 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9210 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9211 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9212 IEM_MC_END();
9213}
9214
9215
9216/** Opcode 0xd9 !11/6 */
9217FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9218{
9219 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9220 IEM_MC_BEGIN(3, 0, 0, 0);
9221 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9222 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9223 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9226 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9227 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9228 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9229 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9230 IEM_MC_END();
9231}
9232
9233
9234/** Opcode 0xd9 !11/7 */
9235FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9236{
9237 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9238 IEM_MC_BEGIN(2, 0, 0, 0);
9239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9240 IEM_MC_LOCAL(uint16_t, u16Fcw);
9241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9243 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9244 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9245 IEM_MC_FETCH_FCW(u16Fcw);
9246 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9247 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9248 IEM_MC_END();
9249}
9250
9251
9252/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9253FNIEMOP_DEF(iemOp_fnop)
9254{
9255 IEMOP_MNEMONIC(fnop, "fnop");
9256 IEM_MC_BEGIN(0, 0, 0, 0);
9257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9259 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9260 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9261 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9262 * intel optimizations. Investigate. */
9263 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9264 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9265 IEM_MC_END();
9266}
9267
9268
9269/** Opcode 0xd9 11/0 stN */
9270FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9271{
9272 IEMOP_MNEMONIC(fld_stN, "fld stN");
9273 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9274 * indicates that it does. */
9275 IEM_MC_BEGIN(0, 2, 0, 0);
9276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9277 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9278 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9281
9282 IEM_MC_PREPARE_FPU_USAGE();
9283 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9284 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9285 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9286 } IEM_MC_ELSE() {
9287 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9288 } IEM_MC_ENDIF();
9289
9290 IEM_MC_ADVANCE_RIP_AND_FINISH();
9291 IEM_MC_END();
9292}
9293
9294
9295/** Opcode 0xd9 11/3 stN */
9296FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9297{
9298 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9299 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9300 * indicates that it does. */
9301 IEM_MC_BEGIN(2, 3, 0, 0);
9302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9303 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9304 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9305 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9306 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9307 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9310
9311 IEM_MC_PREPARE_FPU_USAGE();
9312 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9313 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9314 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9315 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9316 } IEM_MC_ELSE() {
9317 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9318 } IEM_MC_ENDIF();
9319
9320 IEM_MC_ADVANCE_RIP_AND_FINISH();
9321 IEM_MC_END();
9322}
9323
9324
9325/** Opcode 0xd9 11/4, 0xdd 11/2. */
9326FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9327{
9328 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9329
9330 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9331 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9332 if (!iDstReg)
9333 {
9334 IEM_MC_BEGIN(0, 1, 0, 0);
9335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9336 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9337 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9338 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9339
9340 IEM_MC_PREPARE_FPU_USAGE();
9341 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9342 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9343 } IEM_MC_ELSE() {
9344 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9345 } IEM_MC_ENDIF();
9346
9347 IEM_MC_ADVANCE_RIP_AND_FINISH();
9348 IEM_MC_END();
9349 }
9350 else
9351 {
9352 IEM_MC_BEGIN(0, 2, 0, 0);
9353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9354 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9355 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9356 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9357 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9358
9359 IEM_MC_PREPARE_FPU_USAGE();
9360 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9361 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9362 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9363 } IEM_MC_ELSE() {
9364 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9365 } IEM_MC_ENDIF();
9366
9367 IEM_MC_ADVANCE_RIP_AND_FINISH();
9368 IEM_MC_END();
9369 }
9370}
9371
9372
9373/**
9374 * Common worker for FPU instructions working on ST0 and replaces it with the
9375 * result, i.e. unary operators.
9376 *
9377 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9378 */
9379FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9380{
9381 IEM_MC_BEGIN(2, 1, 0, 0);
9382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9383 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9384 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9385 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9386
9387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9389 IEM_MC_PREPARE_FPU_USAGE();
9390 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9391 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9392 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9393 } IEM_MC_ELSE() {
9394 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9395 } IEM_MC_ENDIF();
9396 IEM_MC_ADVANCE_RIP_AND_FINISH();
9397
9398 IEM_MC_END();
9399}
9400
9401
9402/** Opcode 0xd9 0xe0. */
9403FNIEMOP_DEF(iemOp_fchs)
9404{
9405 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9406 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9407}
9408
9409
9410/** Opcode 0xd9 0xe1. */
9411FNIEMOP_DEF(iemOp_fabs)
9412{
9413 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9414 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9415}
9416
9417
9418/** Opcode 0xd9 0xe4. */
9419FNIEMOP_DEF(iemOp_ftst)
9420{
9421 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9422 IEM_MC_BEGIN(2, 1, 0, 0);
9423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9424 IEM_MC_LOCAL(uint16_t, u16Fsw);
9425 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9426 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9427
9428 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9429 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9430 IEM_MC_PREPARE_FPU_USAGE();
9431 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9432 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9433 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9434 } IEM_MC_ELSE() {
9435 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9436 } IEM_MC_ENDIF();
9437 IEM_MC_ADVANCE_RIP_AND_FINISH();
9438
9439 IEM_MC_END();
9440}
9441
9442
9443/** Opcode 0xd9 0xe5. */
9444FNIEMOP_DEF(iemOp_fxam)
9445{
9446 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9447 IEM_MC_BEGIN(2, 1, 0, 0);
9448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9449 IEM_MC_LOCAL(uint16_t, u16Fsw);
9450 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9451 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9452
9453 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9454 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9455 IEM_MC_PREPARE_FPU_USAGE();
9456 IEM_MC_REF_FPUREG(pr80Value, 0);
9457 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9458 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9459 IEM_MC_ADVANCE_RIP_AND_FINISH();
9460
9461 IEM_MC_END();
9462}
9463
9464
9465/**
9466 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9467 *
9468 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9469 */
9470FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9471{
9472 IEM_MC_BEGIN(1, 1, 0, 0);
9473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9474 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9475 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9476
9477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9478 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9479 IEM_MC_PREPARE_FPU_USAGE();
9480 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9481 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9482 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9483 } IEM_MC_ELSE() {
9484 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9485 } IEM_MC_ENDIF();
9486 IEM_MC_ADVANCE_RIP_AND_FINISH();
9487
9488 IEM_MC_END();
9489}
9490
9491
9492/** Opcode 0xd9 0xe8. */
9493FNIEMOP_DEF(iemOp_fld1)
9494{
9495 IEMOP_MNEMONIC(fld1, "fld1");
9496 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9497}
9498
9499
9500/** Opcode 0xd9 0xe9. */
9501FNIEMOP_DEF(iemOp_fldl2t)
9502{
9503 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9504 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9505}
9506
9507
9508/** Opcode 0xd9 0xea. */
9509FNIEMOP_DEF(iemOp_fldl2e)
9510{
9511 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9512 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9513}
9514
9515/** Opcode 0xd9 0xeb. */
9516FNIEMOP_DEF(iemOp_fldpi)
9517{
9518 IEMOP_MNEMONIC(fldpi, "fldpi");
9519 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9520}
9521
9522
9523/** Opcode 0xd9 0xec. */
9524FNIEMOP_DEF(iemOp_fldlg2)
9525{
9526 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9527 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9528}
9529
9530/** Opcode 0xd9 0xed. */
9531FNIEMOP_DEF(iemOp_fldln2)
9532{
9533 IEMOP_MNEMONIC(fldln2, "fldln2");
9534 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9535}
9536
9537
9538/** Opcode 0xd9 0xee. */
9539FNIEMOP_DEF(iemOp_fldz)
9540{
9541 IEMOP_MNEMONIC(fldz, "fldz");
9542 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9543}
9544
9545
9546/** Opcode 0xd9 0xf0.
9547 *
9548 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9549 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9550 * to produce proper results for +Inf and -Inf.
9551 *
9552 * This is probably usful in the implementation pow() and similar.
9553 */
9554FNIEMOP_DEF(iemOp_f2xm1)
9555{
9556 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9557 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9558}
9559
9560
9561/**
9562 * Common worker for FPU instructions working on STn and ST0, storing the result
9563 * in STn, and popping the stack unless IE, DE or ZE was raised.
9564 *
9565 * @param bRm Mod R/M byte.
9566 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9567 */
9568FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9569{
9570 IEM_MC_BEGIN(3, 1, 0, 0);
9571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9572 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9573 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9574 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9575 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9576
9577 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9578 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9579
9580 IEM_MC_PREPARE_FPU_USAGE();
9581 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9582 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9583 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9584 } IEM_MC_ELSE() {
9585 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9586 } IEM_MC_ENDIF();
9587 IEM_MC_ADVANCE_RIP_AND_FINISH();
9588
9589 IEM_MC_END();
9590}
9591
9592
9593/** Opcode 0xd9 0xf1. */
9594FNIEMOP_DEF(iemOp_fyl2x)
9595{
9596 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9597 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9598}
9599
9600
9601/**
9602 * Common worker for FPU instructions working on ST0 and having two outputs, one
9603 * replacing ST0 and one pushed onto the stack.
9604 *
9605 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9606 */
9607FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9608{
9609 IEM_MC_BEGIN(2, 1, 0, 0);
9610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9611 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9612 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9613 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9614
9615 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9616 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9617 IEM_MC_PREPARE_FPU_USAGE();
9618 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9619 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9620 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9621 } IEM_MC_ELSE() {
9622 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9623 } IEM_MC_ENDIF();
9624 IEM_MC_ADVANCE_RIP_AND_FINISH();
9625
9626 IEM_MC_END();
9627}
9628
9629
9630/** Opcode 0xd9 0xf2. */
9631FNIEMOP_DEF(iemOp_fptan)
9632{
9633 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9634 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9635}
9636
9637
9638/** Opcode 0xd9 0xf3. */
9639FNIEMOP_DEF(iemOp_fpatan)
9640{
9641 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9642 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9643}
9644
9645
9646/** Opcode 0xd9 0xf4. */
9647FNIEMOP_DEF(iemOp_fxtract)
9648{
9649 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9650 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9651}
9652
9653
9654/** Opcode 0xd9 0xf5. */
9655FNIEMOP_DEF(iemOp_fprem1)
9656{
9657 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9658 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9659}
9660
9661
9662/** Opcode 0xd9 0xf6. */
9663FNIEMOP_DEF(iemOp_fdecstp)
9664{
9665 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9666 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9667 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9668 * FINCSTP and FDECSTP. */
9669 IEM_MC_BEGIN(0, 0, 0, 0);
9670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9671
9672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9674
9675 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9676 IEM_MC_FPU_STACK_DEC_TOP();
9677 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9678
9679 IEM_MC_ADVANCE_RIP_AND_FINISH();
9680 IEM_MC_END();
9681}
9682
9683
9684/** Opcode 0xd9 0xf7. */
9685FNIEMOP_DEF(iemOp_fincstp)
9686{
9687 IEMOP_MNEMONIC(fincstp, "fincstp");
9688 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9689 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9690 * FINCSTP and FDECSTP. */
9691 IEM_MC_BEGIN(0, 0, 0, 0);
9692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9693
9694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9696
9697 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9698 IEM_MC_FPU_STACK_INC_TOP();
9699 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9700
9701 IEM_MC_ADVANCE_RIP_AND_FINISH();
9702 IEM_MC_END();
9703}
9704
9705
9706/** Opcode 0xd9 0xf8. */
9707FNIEMOP_DEF(iemOp_fprem)
9708{
9709 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9710 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9711}
9712
9713
9714/** Opcode 0xd9 0xf9. */
9715FNIEMOP_DEF(iemOp_fyl2xp1)
9716{
9717 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9718 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9719}
9720
9721
9722/** Opcode 0xd9 0xfa. */
9723FNIEMOP_DEF(iemOp_fsqrt)
9724{
9725 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9726 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9727}
9728
9729
9730/** Opcode 0xd9 0xfb. */
9731FNIEMOP_DEF(iemOp_fsincos)
9732{
9733 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9734 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9735}
9736
9737
9738/** Opcode 0xd9 0xfc. */
9739FNIEMOP_DEF(iemOp_frndint)
9740{
9741 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9742 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9743}
9744
9745
9746/** Opcode 0xd9 0xfd. */
9747FNIEMOP_DEF(iemOp_fscale)
9748{
9749 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9750 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9751}
9752
9753
9754/** Opcode 0xd9 0xfe. */
9755FNIEMOP_DEF(iemOp_fsin)
9756{
9757 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9758 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9759}
9760
9761
9762/** Opcode 0xd9 0xff. */
9763FNIEMOP_DEF(iemOp_fcos)
9764{
9765 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9766 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9767}
9768
9769
9770/** Used by iemOp_EscF1. */
9771IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9772{
9773 /* 0xe0 */ iemOp_fchs,
9774 /* 0xe1 */ iemOp_fabs,
9775 /* 0xe2 */ iemOp_Invalid,
9776 /* 0xe3 */ iemOp_Invalid,
9777 /* 0xe4 */ iemOp_ftst,
9778 /* 0xe5 */ iemOp_fxam,
9779 /* 0xe6 */ iemOp_Invalid,
9780 /* 0xe7 */ iemOp_Invalid,
9781 /* 0xe8 */ iemOp_fld1,
9782 /* 0xe9 */ iemOp_fldl2t,
9783 /* 0xea */ iemOp_fldl2e,
9784 /* 0xeb */ iemOp_fldpi,
9785 /* 0xec */ iemOp_fldlg2,
9786 /* 0xed */ iemOp_fldln2,
9787 /* 0xee */ iemOp_fldz,
9788 /* 0xef */ iemOp_Invalid,
9789 /* 0xf0 */ iemOp_f2xm1,
9790 /* 0xf1 */ iemOp_fyl2x,
9791 /* 0xf2 */ iemOp_fptan,
9792 /* 0xf3 */ iemOp_fpatan,
9793 /* 0xf4 */ iemOp_fxtract,
9794 /* 0xf5 */ iemOp_fprem1,
9795 /* 0xf6 */ iemOp_fdecstp,
9796 /* 0xf7 */ iemOp_fincstp,
9797 /* 0xf8 */ iemOp_fprem,
9798 /* 0xf9 */ iemOp_fyl2xp1,
9799 /* 0xfa */ iemOp_fsqrt,
9800 /* 0xfb */ iemOp_fsincos,
9801 /* 0xfc */ iemOp_frndint,
9802 /* 0xfd */ iemOp_fscale,
9803 /* 0xfe */ iemOp_fsin,
9804 /* 0xff */ iemOp_fcos
9805};
9806
9807
9808/**
9809 * @opcode 0xd9
9810 */
9811FNIEMOP_DEF(iemOp_EscF1)
9812{
9813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9814 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9815
9816 if (IEM_IS_MODRM_REG_MODE(bRm))
9817 {
9818 switch (IEM_GET_MODRM_REG_8(bRm))
9819 {
9820 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9821 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9822 case 2:
9823 if (bRm == 0xd0)
9824 return FNIEMOP_CALL(iemOp_fnop);
9825 IEMOP_RAISE_INVALID_OPCODE_RET();
9826 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9827 case 4:
9828 case 5:
9829 case 6:
9830 case 7:
9831 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9832 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9834 }
9835 }
9836 else
9837 {
9838 switch (IEM_GET_MODRM_REG_8(bRm))
9839 {
9840 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9841 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9842 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9843 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9844 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9845 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9846 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9847 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9849 }
9850 }
9851}
9852
9853
9854/** Opcode 0xda 11/0. */
9855FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9856{
9857 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9858 IEM_MC_BEGIN(0, 1, 0, 0);
9859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9860 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9861
9862 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9863 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9864
9865 IEM_MC_PREPARE_FPU_USAGE();
9866 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9868 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9869 } IEM_MC_ENDIF();
9870 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9871 } IEM_MC_ELSE() {
9872 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9873 } IEM_MC_ENDIF();
9874 IEM_MC_ADVANCE_RIP_AND_FINISH();
9875
9876 IEM_MC_END();
9877}
9878
9879
9880/** Opcode 0xda 11/1. */
9881FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9882{
9883 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9884 IEM_MC_BEGIN(0, 1, 0, 0);
9885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9886 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9887
9888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9889 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9890
9891 IEM_MC_PREPARE_FPU_USAGE();
9892 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9894 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9895 } IEM_MC_ENDIF();
9896 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9897 } IEM_MC_ELSE() {
9898 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9899 } IEM_MC_ENDIF();
9900 IEM_MC_ADVANCE_RIP_AND_FINISH();
9901
9902 IEM_MC_END();
9903}
9904
9905
9906/** Opcode 0xda 11/2. */
9907FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9908{
9909 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9910 IEM_MC_BEGIN(0, 1, 0, 0);
9911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9912 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9913
9914 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9915 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9916
9917 IEM_MC_PREPARE_FPU_USAGE();
9918 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9919 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9920 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9921 } IEM_MC_ENDIF();
9922 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9923 } IEM_MC_ELSE() {
9924 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9925 } IEM_MC_ENDIF();
9926 IEM_MC_ADVANCE_RIP_AND_FINISH();
9927
9928 IEM_MC_END();
9929}
9930
9931
9932/** Opcode 0xda 11/3. */
9933FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9934{
9935 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9936 IEM_MC_BEGIN(0, 1, 0, 0);
9937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9938 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9939
9940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9942
9943 IEM_MC_PREPARE_FPU_USAGE();
9944 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9945 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9946 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9947 } IEM_MC_ENDIF();
9948 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9949 } IEM_MC_ELSE() {
9950 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9951 } IEM_MC_ENDIF();
9952 IEM_MC_ADVANCE_RIP_AND_FINISH();
9953
9954 IEM_MC_END();
9955}
9956
9957
9958/**
9959 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9960 * flags, and popping twice when done.
9961 *
9962 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9963 */
9964FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9965{
9966 IEM_MC_BEGIN(3, 1, 0, 0);
9967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9968 IEM_MC_LOCAL(uint16_t, u16Fsw);
9969 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9970 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9971 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9972
9973 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9974 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9975
9976 IEM_MC_PREPARE_FPU_USAGE();
9977 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9978 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9979 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9980 } IEM_MC_ELSE() {
9981 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9982 } IEM_MC_ENDIF();
9983 IEM_MC_ADVANCE_RIP_AND_FINISH();
9984
9985 IEM_MC_END();
9986}
9987
9988
9989/** Opcode 0xda 0xe9. */
9990FNIEMOP_DEF(iemOp_fucompp)
9991{
9992 IEMOP_MNEMONIC(fucompp, "fucompp");
9993 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9994}
9995
9996
9997/**
9998 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9999 * the result in ST0.
10000 *
10001 * @param bRm Mod R/M byte.
10002 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10003 */
10004FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10005{
10006 IEM_MC_BEGIN(3, 3, 0, 0);
10007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10008 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10009 IEM_MC_LOCAL(int32_t, i32Val2);
10010 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10011 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10012 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10013
10014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10016
10017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10019 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10020
10021 IEM_MC_PREPARE_FPU_USAGE();
10022 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10023 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10024 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10025 } IEM_MC_ELSE() {
10026 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10027 } IEM_MC_ENDIF();
10028 IEM_MC_ADVANCE_RIP_AND_FINISH();
10029
10030 IEM_MC_END();
10031}
10032
10033
10034/** Opcode 0xda !11/0. */
10035FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10036{
10037 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10038 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10039}
10040
10041
10042/** Opcode 0xda !11/1. */
10043FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10044{
10045 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10046 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10047}
10048
10049
10050/** Opcode 0xda !11/2. */
10051FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10052{
10053 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10054
10055 IEM_MC_BEGIN(3, 3, 0, 0);
10056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10057 IEM_MC_LOCAL(uint16_t, u16Fsw);
10058 IEM_MC_LOCAL(int32_t, i32Val2);
10059 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10060 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10061 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10062
10063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10065
10066 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10067 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10068 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10069
10070 IEM_MC_PREPARE_FPU_USAGE();
10071 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10072 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10073 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10074 } IEM_MC_ELSE() {
10075 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10076 } IEM_MC_ENDIF();
10077 IEM_MC_ADVANCE_RIP_AND_FINISH();
10078
10079 IEM_MC_END();
10080}
10081
10082
10083/** Opcode 0xda !11/3. */
10084FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10085{
10086 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10087
10088 IEM_MC_BEGIN(3, 3, 0, 0);
10089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10090 IEM_MC_LOCAL(uint16_t, u16Fsw);
10091 IEM_MC_LOCAL(int32_t, i32Val2);
10092 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10093 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10094 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10095
10096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10098
10099 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10100 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10101 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10102
10103 IEM_MC_PREPARE_FPU_USAGE();
10104 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10105 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10106 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10107 } IEM_MC_ELSE() {
10108 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10109 } IEM_MC_ENDIF();
10110 IEM_MC_ADVANCE_RIP_AND_FINISH();
10111
10112 IEM_MC_END();
10113}
10114
10115
10116/** Opcode 0xda !11/4. */
10117FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10118{
10119 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10120 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10121}
10122
10123
10124/** Opcode 0xda !11/5. */
10125FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10126{
10127 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10128 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10129}
10130
10131
10132/** Opcode 0xda !11/6. */
10133FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10134{
10135 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10136 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10137}
10138
10139
10140/** Opcode 0xda !11/7. */
10141FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10142{
10143 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10144 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10145}
10146
10147
10148/**
10149 * @opcode 0xda
10150 */
10151FNIEMOP_DEF(iemOp_EscF2)
10152{
10153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10154 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10155 if (IEM_IS_MODRM_REG_MODE(bRm))
10156 {
10157 switch (IEM_GET_MODRM_REG_8(bRm))
10158 {
10159 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10160 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10161 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10162 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10163 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10164 case 5:
10165 if (bRm == 0xe9)
10166 return FNIEMOP_CALL(iemOp_fucompp);
10167 IEMOP_RAISE_INVALID_OPCODE_RET();
10168 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10169 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10171 }
10172 }
10173 else
10174 {
10175 switch (IEM_GET_MODRM_REG_8(bRm))
10176 {
10177 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10178 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10179 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10180 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10181 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10182 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10183 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10184 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10186 }
10187 }
10188}
10189
10190
10191/** Opcode 0xdb !11/0. */
10192FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10193{
10194 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10195
10196 IEM_MC_BEGIN(2, 3, 0, 0);
10197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10198 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10199 IEM_MC_LOCAL(int32_t, i32Val);
10200 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10201 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10202
10203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10205
10206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10208 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10209
10210 IEM_MC_PREPARE_FPU_USAGE();
10211 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10212 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10213 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10214 } IEM_MC_ELSE() {
10215 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10216 } IEM_MC_ENDIF();
10217 IEM_MC_ADVANCE_RIP_AND_FINISH();
10218
10219 IEM_MC_END();
10220}
10221
10222
10223/** Opcode 0xdb !11/1. */
10224FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10225{
10226 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10227 IEM_MC_BEGIN(3, 2, 0, 0);
10228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10229 IEM_MC_LOCAL(uint16_t, u16Fsw);
10230 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10231 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10232 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10233
10234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10237 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10238
10239 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10240 IEM_MC_PREPARE_FPU_USAGE();
10241 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10242 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10243 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10244 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10245 } IEM_MC_ELSE() {
10246 IEM_MC_IF_FCW_IM() {
10247 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10248 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10249 } IEM_MC_ENDIF();
10250 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10251 } IEM_MC_ENDIF();
10252 IEM_MC_ADVANCE_RIP_AND_FINISH();
10253
10254 IEM_MC_END();
10255}
10256
10257
10258/** Opcode 0xdb !11/2. */
10259FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10260{
10261 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10262 IEM_MC_BEGIN(3, 2, 0, 0);
10263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10264 IEM_MC_LOCAL(uint16_t, u16Fsw);
10265 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10266 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10267 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10268
10269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10273
10274 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10275 IEM_MC_PREPARE_FPU_USAGE();
10276 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10277 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10278 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10279 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10280 } IEM_MC_ELSE() {
10281 IEM_MC_IF_FCW_IM() {
10282 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10283 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10284 } IEM_MC_ENDIF();
10285 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10286 } IEM_MC_ENDIF();
10287 IEM_MC_ADVANCE_RIP_AND_FINISH();
10288
10289 IEM_MC_END();
10290}
10291
10292
10293/** Opcode 0xdb !11/3. */
10294FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10295{
10296 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10297 IEM_MC_BEGIN(3, 2, 0, 0);
10298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10299 IEM_MC_LOCAL(uint16_t, u16Fsw);
10300 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10301 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10302 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10303
10304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10308
10309 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10310 IEM_MC_PREPARE_FPU_USAGE();
10311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10312 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10313 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10314 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10315 } IEM_MC_ELSE() {
10316 IEM_MC_IF_FCW_IM() {
10317 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10318 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10319 } IEM_MC_ENDIF();
10320 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10321 } IEM_MC_ENDIF();
10322 IEM_MC_ADVANCE_RIP_AND_FINISH();
10323
10324 IEM_MC_END();
10325}
10326
10327
10328/** Opcode 0xdb !11/5. */
10329FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10330{
10331 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10332
10333 IEM_MC_BEGIN(2, 3, 0, 0);
10334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10335 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10336 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10337 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10338 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10339
10340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10342
10343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10345 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10346
10347 IEM_MC_PREPARE_FPU_USAGE();
10348 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10349 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10350 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10351 } IEM_MC_ELSE() {
10352 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10353 } IEM_MC_ENDIF();
10354 IEM_MC_ADVANCE_RIP_AND_FINISH();
10355
10356 IEM_MC_END();
10357}
10358
10359
10360/** Opcode 0xdb !11/7. */
10361FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10362{
10363 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10364 IEM_MC_BEGIN(3, 2, 0, 0);
10365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10366 IEM_MC_LOCAL(uint16_t, u16Fsw);
10367 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10368 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10369 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10370
10371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10374 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10375
10376 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10377 IEM_MC_PREPARE_FPU_USAGE();
10378 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10379 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10380 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10381 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10382 } IEM_MC_ELSE() {
10383 IEM_MC_IF_FCW_IM() {
10384 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10385 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10386 } IEM_MC_ENDIF();
10387 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10388 } IEM_MC_ENDIF();
10389 IEM_MC_ADVANCE_RIP_AND_FINISH();
10390
10391 IEM_MC_END();
10392}
10393
10394
10395/** Opcode 0xdb 11/0. */
10396FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10397{
10398 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10399 IEM_MC_BEGIN(0, 1, 0, 0);
10400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10401 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10402
10403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10404 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10405
10406 IEM_MC_PREPARE_FPU_USAGE();
10407 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10408 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10409 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10410 } IEM_MC_ENDIF();
10411 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10412 } IEM_MC_ELSE() {
10413 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10414 } IEM_MC_ENDIF();
10415 IEM_MC_ADVANCE_RIP_AND_FINISH();
10416
10417 IEM_MC_END();
10418}
10419
10420
10421/** Opcode 0xdb 11/1. */
10422FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10423{
10424 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10425 IEM_MC_BEGIN(0, 1, 0, 0);
10426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10427 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10428
10429 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10430 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10431
10432 IEM_MC_PREPARE_FPU_USAGE();
10433 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10434 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10435 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10436 } IEM_MC_ENDIF();
10437 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10438 } IEM_MC_ELSE() {
10439 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10440 } IEM_MC_ENDIF();
10441 IEM_MC_ADVANCE_RIP_AND_FINISH();
10442
10443 IEM_MC_END();
10444}
10445
10446
10447/** Opcode 0xdb 11/2. */
10448FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10449{
10450 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10451 IEM_MC_BEGIN(0, 1, 0, 0);
10452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10453 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10454
10455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10457
10458 IEM_MC_PREPARE_FPU_USAGE();
10459 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10460 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10461 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10462 } IEM_MC_ENDIF();
10463 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10464 } IEM_MC_ELSE() {
10465 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10466 } IEM_MC_ENDIF();
10467 IEM_MC_ADVANCE_RIP_AND_FINISH();
10468
10469 IEM_MC_END();
10470}
10471
10472
10473/** Opcode 0xdb 11/3. */
10474FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10475{
10476 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10477 IEM_MC_BEGIN(0, 1, 0, 0);
10478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10479 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10480
10481 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10482 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10483
10484 IEM_MC_PREPARE_FPU_USAGE();
10485 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10486 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10487 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10488 } IEM_MC_ENDIF();
10489 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10490 } IEM_MC_ELSE() {
10491 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10492 } IEM_MC_ENDIF();
10493 IEM_MC_ADVANCE_RIP_AND_FINISH();
10494
10495 IEM_MC_END();
10496}
10497
10498
10499/** Opcode 0xdb 0xe0. */
10500FNIEMOP_DEF(iemOp_fneni)
10501{
10502 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10503 IEM_MC_BEGIN(0, 0, 0, 0);
10504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10506 IEM_MC_ADVANCE_RIP_AND_FINISH();
10507 IEM_MC_END();
10508}
10509
10510
10511/** Opcode 0xdb 0xe1. */
10512FNIEMOP_DEF(iemOp_fndisi)
10513{
10514 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10515 IEM_MC_BEGIN(0, 0, 0, 0);
10516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10517 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10518 IEM_MC_ADVANCE_RIP_AND_FINISH();
10519 IEM_MC_END();
10520}
10521
10522
10523/** Opcode 0xdb 0xe2. */
10524FNIEMOP_DEF(iemOp_fnclex)
10525{
10526 IEMOP_MNEMONIC(fnclex, "fnclex");
10527 IEM_MC_BEGIN(0, 0, 0, 0);
10528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10529 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10530 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10531 IEM_MC_CLEAR_FSW_EX();
10532 IEM_MC_ADVANCE_RIP_AND_FINISH();
10533 IEM_MC_END();
10534}
10535
10536
10537/** Opcode 0xdb 0xe3. */
10538FNIEMOP_DEF(iemOp_fninit)
10539{
10540 IEMOP_MNEMONIC(fninit, "fninit");
10541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10542 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10543}
10544
10545
10546/** Opcode 0xdb 0xe4. */
10547FNIEMOP_DEF(iemOp_fnsetpm)
10548{
10549 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10550 IEM_MC_BEGIN(0, 0, 0, 0);
10551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10553 IEM_MC_ADVANCE_RIP_AND_FINISH();
10554 IEM_MC_END();
10555}
10556
10557
10558/** Opcode 0xdb 0xe5. */
10559FNIEMOP_DEF(iemOp_frstpm)
10560{
10561 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10562#if 0 /* #UDs on newer CPUs */
10563 IEM_MC_BEGIN(0, 0, 0, 0);
10564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10566 IEM_MC_ADVANCE_RIP_AND_FINISH();
10567 IEM_MC_END();
10568 return VINF_SUCCESS;
10569#else
10570 IEMOP_RAISE_INVALID_OPCODE_RET();
10571#endif
10572}
10573
10574
10575/** Opcode 0xdb 11/5. */
10576FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10577{
10578 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10579 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10580 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
10581 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10582}
10583
10584
10585/** Opcode 0xdb 11/6. */
10586FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10587{
10588 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10589 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10590 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
10591 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10592}
10593
10594
10595/**
10596 * @opcode 0xdb
10597 */
10598FNIEMOP_DEF(iemOp_EscF3)
10599{
10600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10601 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10602 if (IEM_IS_MODRM_REG_MODE(bRm))
10603 {
10604 switch (IEM_GET_MODRM_REG_8(bRm))
10605 {
10606 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10607 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10608 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10609 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10610 case 4:
10611 switch (bRm)
10612 {
10613 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10614 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10615 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10616 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10617 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10618 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10619 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10620 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10622 }
10623 break;
10624 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10625 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10626 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10628 }
10629 }
10630 else
10631 {
10632 switch (IEM_GET_MODRM_REG_8(bRm))
10633 {
10634 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10635 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10636 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10637 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10638 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10639 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10640 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10641 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10643 }
10644 }
10645}
10646
10647
10648/**
10649 * Common worker for FPU instructions working on STn and ST0, and storing the
10650 * result in STn unless IE, DE or ZE was raised.
10651 *
10652 * @param bRm Mod R/M byte.
10653 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10654 */
10655FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10656{
10657 IEM_MC_BEGIN(3, 1, 0, 0);
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10660 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10661 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10662 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10663
10664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10666
10667 IEM_MC_PREPARE_FPU_USAGE();
10668 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10669 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10670 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10671 } IEM_MC_ELSE() {
10672 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10673 } IEM_MC_ENDIF();
10674 IEM_MC_ADVANCE_RIP_AND_FINISH();
10675
10676 IEM_MC_END();
10677}
10678
10679
10680/** Opcode 0xdc 11/0. */
10681FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10682{
10683 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10684 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10685}
10686
10687
10688/** Opcode 0xdc 11/1. */
10689FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10690{
10691 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10692 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10693}
10694
10695
10696/** Opcode 0xdc 11/4. */
10697FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10698{
10699 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10700 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10701}
10702
10703
10704/** Opcode 0xdc 11/5. */
10705FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10706{
10707 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10708 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10709}
10710
10711
10712/** Opcode 0xdc 11/6. */
10713FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10714{
10715 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10716 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10717}
10718
10719
10720/** Opcode 0xdc 11/7. */
10721FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10722{
10723 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10724 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10725}
10726
10727
10728/**
10729 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10730 * memory operand, and storing the result in ST0.
10731 *
10732 * @param bRm Mod R/M byte.
10733 * @param pfnImpl Pointer to the instruction implementation (assembly).
10734 */
10735FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10736{
10737 IEM_MC_BEGIN(3, 3, 0, 0);
10738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10739 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10740 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10741 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10742 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10743 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10744
10745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10749
10750 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10751 IEM_MC_PREPARE_FPU_USAGE();
10752 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10753 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10754 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10755 } IEM_MC_ELSE() {
10756 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10757 } IEM_MC_ENDIF();
10758 IEM_MC_ADVANCE_RIP_AND_FINISH();
10759
10760 IEM_MC_END();
10761}
10762
10763
10764/** Opcode 0xdc !11/0. */
10765FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10766{
10767 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10768 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10769}
10770
10771
10772/** Opcode 0xdc !11/1. */
10773FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10774{
10775 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10776 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10777}
10778
10779
10780/** Opcode 0xdc !11/2. */
10781FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10782{
10783 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10784
10785 IEM_MC_BEGIN(3, 3, 0, 0);
10786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10787 IEM_MC_LOCAL(uint16_t, u16Fsw);
10788 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10789 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10790 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10791 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10792
10793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10795
10796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10798 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10799
10800 IEM_MC_PREPARE_FPU_USAGE();
10801 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10802 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10803 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10804 } IEM_MC_ELSE() {
10805 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10806 } IEM_MC_ENDIF();
10807 IEM_MC_ADVANCE_RIP_AND_FINISH();
10808
10809 IEM_MC_END();
10810}
10811
10812
10813/** Opcode 0xdc !11/3. */
10814FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10815{
10816 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10817
10818 IEM_MC_BEGIN(3, 3, 0, 0);
10819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10820 IEM_MC_LOCAL(uint16_t, u16Fsw);
10821 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10822 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10823 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10824 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10825
10826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10828
10829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10831 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10832
10833 IEM_MC_PREPARE_FPU_USAGE();
10834 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10835 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10836 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10837 } IEM_MC_ELSE() {
10838 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10839 } IEM_MC_ENDIF();
10840 IEM_MC_ADVANCE_RIP_AND_FINISH();
10841
10842 IEM_MC_END();
10843}
10844
10845
10846/** Opcode 0xdc !11/4. */
10847FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10848{
10849 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10850 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10851}
10852
10853
10854/** Opcode 0xdc !11/5. */
10855FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10856{
10857 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10858 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10859}
10860
10861
10862/** Opcode 0xdc !11/6. */
10863FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10864{
10865 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10866 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10867}
10868
10869
10870/** Opcode 0xdc !11/7. */
10871FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10872{
10873 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10874 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10875}
10876
10877
10878/**
10879 * @opcode 0xdc
10880 */
10881FNIEMOP_DEF(iemOp_EscF4)
10882{
10883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10884 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10885 if (IEM_IS_MODRM_REG_MODE(bRm))
10886 {
10887 switch (IEM_GET_MODRM_REG_8(bRm))
10888 {
10889 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10890 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10891 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10892 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10893 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10894 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10895 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10896 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10898 }
10899 }
10900 else
10901 {
10902 switch (IEM_GET_MODRM_REG_8(bRm))
10903 {
10904 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10905 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10906 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10907 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10908 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10909 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10910 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10911 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10913 }
10914 }
10915}
10916
10917
10918/** Opcode 0xdd !11/0.
10919 * @sa iemOp_fld_m32r */
10920FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10921{
10922 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10923
10924 IEM_MC_BEGIN(2, 3, 0, 0);
10925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10926 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10927 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10928 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10929 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10930
10931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10934 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10935
10936 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10937 IEM_MC_PREPARE_FPU_USAGE();
10938 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10939 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10940 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10941 } IEM_MC_ELSE() {
10942 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10943 } IEM_MC_ENDIF();
10944 IEM_MC_ADVANCE_RIP_AND_FINISH();
10945
10946 IEM_MC_END();
10947}
10948
10949
10950/** Opcode 0xdd !11/0. */
10951FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10952{
10953 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10954 IEM_MC_BEGIN(3, 2, 0, 0);
10955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10956 IEM_MC_LOCAL(uint16_t, u16Fsw);
10957 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10958 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10959 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10960
10961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10963 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10964 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10965
10966 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10967 IEM_MC_PREPARE_FPU_USAGE();
10968 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10969 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10970 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10971 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10972 } IEM_MC_ELSE() {
10973 IEM_MC_IF_FCW_IM() {
10974 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10975 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10976 } IEM_MC_ENDIF();
10977 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10978 } IEM_MC_ENDIF();
10979 IEM_MC_ADVANCE_RIP_AND_FINISH();
10980
10981 IEM_MC_END();
10982}
10983
10984
10985/** Opcode 0xdd !11/0. */
10986FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10987{
10988 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10989 IEM_MC_BEGIN(3, 2, 0, 0);
10990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10991 IEM_MC_LOCAL(uint16_t, u16Fsw);
10992 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10993 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10995
10996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10999 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11000
11001 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11002 IEM_MC_PREPARE_FPU_USAGE();
11003 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11004 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11005 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11006 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11007 } IEM_MC_ELSE() {
11008 IEM_MC_IF_FCW_IM() {
11009 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11010 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11011 } IEM_MC_ENDIF();
11012 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11013 } IEM_MC_ENDIF();
11014 IEM_MC_ADVANCE_RIP_AND_FINISH();
11015
11016 IEM_MC_END();
11017}
11018
11019
11020
11021
11022/** Opcode 0xdd !11/0. */
11023FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11024{
11025 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11026 IEM_MC_BEGIN(3, 2, 0, 0);
11027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11028 IEM_MC_LOCAL(uint16_t, u16Fsw);
11029 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11030 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11031 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11032
11033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11037
11038 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11039 IEM_MC_PREPARE_FPU_USAGE();
11040 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11041 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11042 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11043 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11044 } IEM_MC_ELSE() {
11045 IEM_MC_IF_FCW_IM() {
11046 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11047 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11048 } IEM_MC_ENDIF();
11049 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11050 } IEM_MC_ENDIF();
11051 IEM_MC_ADVANCE_RIP_AND_FINISH();
11052
11053 IEM_MC_END();
11054}
11055
11056
11057/** Opcode 0xdd !11/0. */
11058FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11059{
11060 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11061 IEM_MC_BEGIN(3, 0, 0, 0);
11062 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11063 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11064 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11068 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11069 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11070 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11071 IEM_MC_END();
11072}
11073
11074
11075/** Opcode 0xdd !11/0. */
11076FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11077{
11078 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11079 IEM_MC_BEGIN(3, 0, 0, 0);
11080 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11081 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11082 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11085 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11086 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11087 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11088 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11089 IEM_MC_END();
11090}
11091
11092/** Opcode 0xdd !11/0. */
11093FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11094{
11095 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11096
11097 IEM_MC_BEGIN(0, 2, 0, 0);
11098 IEM_MC_LOCAL(uint16_t, u16Tmp);
11099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11100
11101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11103 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11104
11105 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11106 IEM_MC_FETCH_FSW(u16Tmp);
11107 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11108 IEM_MC_ADVANCE_RIP_AND_FINISH();
11109
11110/** @todo Debug / drop a hint to the verifier that things may differ
11111 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11112 * NT4SP1. (X86_FSW_PE) */
11113 IEM_MC_END();
11114}
11115
11116
11117/** Opcode 0xdd 11/0. */
11118FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11119{
11120 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11121 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11122 unmodified. */
11123 IEM_MC_BEGIN(0, 0, 0, 0);
11124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11125
11126 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11127 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11128
11129 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11130 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11131 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11132
11133 IEM_MC_ADVANCE_RIP_AND_FINISH();
11134 IEM_MC_END();
11135}
11136
11137
11138/** Opcode 0xdd 11/1. */
11139FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11140{
11141 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11142 IEM_MC_BEGIN(0, 2, 0, 0);
11143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11144 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11145 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11147 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11148
11149 IEM_MC_PREPARE_FPU_USAGE();
11150 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11151 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11152 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11153 } IEM_MC_ELSE() {
11154 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11155 } IEM_MC_ENDIF();
11156
11157 IEM_MC_ADVANCE_RIP_AND_FINISH();
11158 IEM_MC_END();
11159}
11160
11161
11162/** Opcode 0xdd 11/3. */
11163FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11164{
11165 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11166 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11167}
11168
11169
11170/** Opcode 0xdd 11/4. */
11171FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11172{
11173 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11174 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11175}
11176
11177
11178/**
11179 * @opcode 0xdd
11180 */
11181FNIEMOP_DEF(iemOp_EscF5)
11182{
11183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11184 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11185 if (IEM_IS_MODRM_REG_MODE(bRm))
11186 {
11187 switch (IEM_GET_MODRM_REG_8(bRm))
11188 {
11189 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11190 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11191 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11192 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11193 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11194 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11195 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11196 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11198 }
11199 }
11200 else
11201 {
11202 switch (IEM_GET_MODRM_REG_8(bRm))
11203 {
11204 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11205 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11206 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11207 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11208 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11209 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11210 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11211 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11213 }
11214 }
11215}
11216
11217
11218/** Opcode 0xde 11/0. */
11219FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11220{
11221 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11222 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11223}
11224
11225
11226/** Opcode 0xde 11/0. */
11227FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11228{
11229 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11230 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11231}
11232
11233
11234/** Opcode 0xde 0xd9. */
11235FNIEMOP_DEF(iemOp_fcompp)
11236{
11237 IEMOP_MNEMONIC(fcompp, "fcompp");
11238 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11239}
11240
11241
11242/** Opcode 0xde 11/4. */
11243FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11244{
11245 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11246 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11247}
11248
11249
11250/** Opcode 0xde 11/5. */
11251FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11252{
11253 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11254 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11255}
11256
11257
11258/** Opcode 0xde 11/6. */
11259FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11260{
11261 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11262 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11263}
11264
11265
11266/** Opcode 0xde 11/7. */
11267FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11268{
11269 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11270 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11271}
11272
11273
11274/**
11275 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11276 * the result in ST0.
11277 *
11278 * @param bRm Mod R/M byte.
11279 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11280 */
11281FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11282{
11283 IEM_MC_BEGIN(3, 3, 0, 0);
11284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11285 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11286 IEM_MC_LOCAL(int16_t, i16Val2);
11287 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11288 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11289 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11290
11291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11293
11294 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11295 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11296 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11297
11298 IEM_MC_PREPARE_FPU_USAGE();
11299 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11300 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11301 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11302 } IEM_MC_ELSE() {
11303 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11304 } IEM_MC_ENDIF();
11305 IEM_MC_ADVANCE_RIP_AND_FINISH();
11306
11307 IEM_MC_END();
11308}
11309
11310
11311/** Opcode 0xde !11/0. */
11312FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11313{
11314 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11315 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11316}
11317
11318
11319/** Opcode 0xde !11/1. */
11320FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11321{
11322 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11323 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11324}
11325
11326
11327/** Opcode 0xde !11/2. */
11328FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11329{
11330 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11331
11332 IEM_MC_BEGIN(3, 3, 0, 0);
11333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11334 IEM_MC_LOCAL(uint16_t, u16Fsw);
11335 IEM_MC_LOCAL(int16_t, i16Val2);
11336 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11337 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11338 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11339
11340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11342
11343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11345 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11346
11347 IEM_MC_PREPARE_FPU_USAGE();
11348 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11349 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11350 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11351 } IEM_MC_ELSE() {
11352 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11353 } IEM_MC_ENDIF();
11354 IEM_MC_ADVANCE_RIP_AND_FINISH();
11355
11356 IEM_MC_END();
11357}
11358
11359
11360/** Opcode 0xde !11/3. */
11361FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11362{
11363 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11364
11365 IEM_MC_BEGIN(3, 3, 0, 0);
11366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11367 IEM_MC_LOCAL(uint16_t, u16Fsw);
11368 IEM_MC_LOCAL(int16_t, i16Val2);
11369 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11370 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11371 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11372
11373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11375
11376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11377 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11378 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11379
11380 IEM_MC_PREPARE_FPU_USAGE();
11381 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11382 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11383 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11384 } IEM_MC_ELSE() {
11385 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11386 } IEM_MC_ENDIF();
11387 IEM_MC_ADVANCE_RIP_AND_FINISH();
11388
11389 IEM_MC_END();
11390}
11391
11392
11393/** Opcode 0xde !11/4. */
11394FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11395{
11396 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11397 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11398}
11399
11400
11401/** Opcode 0xde !11/5. */
11402FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11403{
11404 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11405 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11406}
11407
11408
11409/** Opcode 0xde !11/6. */
11410FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11411{
11412 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11413 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11414}
11415
11416
11417/** Opcode 0xde !11/7. */
11418FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11419{
11420 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11421 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11422}
11423
11424
11425/**
11426 * @opcode 0xde
11427 */
11428FNIEMOP_DEF(iemOp_EscF6)
11429{
11430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11431 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11432 if (IEM_IS_MODRM_REG_MODE(bRm))
11433 {
11434 switch (IEM_GET_MODRM_REG_8(bRm))
11435 {
11436 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11437 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11438 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11439 case 3: if (bRm == 0xd9)
11440 return FNIEMOP_CALL(iemOp_fcompp);
11441 IEMOP_RAISE_INVALID_OPCODE_RET();
11442 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11443 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11444 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11445 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11447 }
11448 }
11449 else
11450 {
11451 switch (IEM_GET_MODRM_REG_8(bRm))
11452 {
11453 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11454 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11455 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11456 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11457 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11458 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11459 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11460 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11462 }
11463 }
11464}
11465
11466
11467/** Opcode 0xdf 11/0.
11468 * Undocument instruction, assumed to work like ffree + fincstp. */
11469FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11470{
11471 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11472 IEM_MC_BEGIN(0, 0, 0, 0);
11473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11474
11475 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11476 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11477
11478 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11479 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11480 IEM_MC_FPU_STACK_INC_TOP();
11481 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11482
11483 IEM_MC_ADVANCE_RIP_AND_FINISH();
11484 IEM_MC_END();
11485}
11486
11487
11488/** Opcode 0xdf 0xe0. */
11489FNIEMOP_DEF(iemOp_fnstsw_ax)
11490{
11491 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11492 IEM_MC_BEGIN(0, 1, 0, 0);
11493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11494 IEM_MC_LOCAL(uint16_t, u16Tmp);
11495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11496 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11497 IEM_MC_FETCH_FSW(u16Tmp);
11498 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11499 IEM_MC_ADVANCE_RIP_AND_FINISH();
11500 IEM_MC_END();
11501}
11502
11503
11504/** Opcode 0xdf 11/5. */
11505FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11506{
11507 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11508 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11509 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11510 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11511}
11512
11513
11514/** Opcode 0xdf 11/6. */
11515FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11516{
11517 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11518 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11519 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11520 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11521}
11522
11523
11524/** Opcode 0xdf !11/0. */
11525FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11526{
11527 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11528
11529 IEM_MC_BEGIN(2, 3, 0, 0);
11530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11531 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11532 IEM_MC_LOCAL(int16_t, i16Val);
11533 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11534 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11535
11536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11538
11539 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11540 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11541 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11542
11543 IEM_MC_PREPARE_FPU_USAGE();
11544 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11545 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11546 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11547 } IEM_MC_ELSE() {
11548 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11549 } IEM_MC_ENDIF();
11550 IEM_MC_ADVANCE_RIP_AND_FINISH();
11551
11552 IEM_MC_END();
11553}
11554
11555
11556/** Opcode 0xdf !11/1. */
11557FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11558{
11559 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11560 IEM_MC_BEGIN(3, 2, 0, 0);
11561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11562 IEM_MC_LOCAL(uint16_t, u16Fsw);
11563 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11564 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11565 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11566
11567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11569 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11570 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11571
11572 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11573 IEM_MC_PREPARE_FPU_USAGE();
11574 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11575 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11576 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11577 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11578 } IEM_MC_ELSE() {
11579 IEM_MC_IF_FCW_IM() {
11580 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11581 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11582 } IEM_MC_ENDIF();
11583 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11584 } IEM_MC_ENDIF();
11585 IEM_MC_ADVANCE_RIP_AND_FINISH();
11586
11587 IEM_MC_END();
11588}
11589
11590
11591/** Opcode 0xdf !11/2. */
11592FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11593{
11594 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11595 IEM_MC_BEGIN(3, 2, 0, 0);
11596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11597 IEM_MC_LOCAL(uint16_t, u16Fsw);
11598 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11599 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11600 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11601
11602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11604 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11605 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11606
11607 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11608 IEM_MC_PREPARE_FPU_USAGE();
11609 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11610 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11611 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11612 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11613 } IEM_MC_ELSE() {
11614 IEM_MC_IF_FCW_IM() {
11615 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11616 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11617 } IEM_MC_ENDIF();
11618 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11619 } IEM_MC_ENDIF();
11620 IEM_MC_ADVANCE_RIP_AND_FINISH();
11621
11622 IEM_MC_END();
11623}
11624
11625
11626/** Opcode 0xdf !11/3. */
11627FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11628{
11629 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11630 IEM_MC_BEGIN(3, 2, 0, 0);
11631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11632 IEM_MC_LOCAL(uint16_t, u16Fsw);
11633 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11634 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11635 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11636
11637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11641
11642 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11643 IEM_MC_PREPARE_FPU_USAGE();
11644 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11645 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11646 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11647 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11648 } IEM_MC_ELSE() {
11649 IEM_MC_IF_FCW_IM() {
11650 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11651 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11652 } IEM_MC_ENDIF();
11653 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11654 } IEM_MC_ENDIF();
11655 IEM_MC_ADVANCE_RIP_AND_FINISH();
11656
11657 IEM_MC_END();
11658}
11659
11660
11661/** Opcode 0xdf !11/4. */
11662FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11663{
11664 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11665
11666 IEM_MC_BEGIN(2, 3, 0, 0);
11667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11668 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11669 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11670 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11671 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11672
11673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11675
11676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11677 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11678 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11679
11680 IEM_MC_PREPARE_FPU_USAGE();
11681 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11682 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11683 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11684 } IEM_MC_ELSE() {
11685 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11686 } IEM_MC_ENDIF();
11687 IEM_MC_ADVANCE_RIP_AND_FINISH();
11688
11689 IEM_MC_END();
11690}
11691
11692
11693/** Opcode 0xdf !11/5. */
11694FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11695{
11696 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11697
11698 IEM_MC_BEGIN(2, 3, 0, 0);
11699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11700 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11701 IEM_MC_LOCAL(int64_t, i64Val);
11702 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11703 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11704
11705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11707
11708 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11709 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11710 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11711
11712 IEM_MC_PREPARE_FPU_USAGE();
11713 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11714 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11715 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11716 } IEM_MC_ELSE() {
11717 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11718 } IEM_MC_ENDIF();
11719 IEM_MC_ADVANCE_RIP_AND_FINISH();
11720
11721 IEM_MC_END();
11722}
11723
11724
11725/** Opcode 0xdf !11/6. */
11726FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11727{
11728 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11729 IEM_MC_BEGIN(3, 2, 0, 0);
11730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11731 IEM_MC_LOCAL(uint16_t, u16Fsw);
11732 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11733 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11734 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11735
11736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11740
11741 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11742 IEM_MC_PREPARE_FPU_USAGE();
11743 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11744 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11745 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11746 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11747 } IEM_MC_ELSE() {
11748 IEM_MC_IF_FCW_IM() {
11749 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11750 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11751 } IEM_MC_ENDIF();
11752 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11753 } IEM_MC_ENDIF();
11754 IEM_MC_ADVANCE_RIP_AND_FINISH();
11755
11756 IEM_MC_END();
11757}
11758
11759
11760/** Opcode 0xdf !11/7. */
11761FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11762{
11763 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11764 IEM_MC_BEGIN(3, 2, 0, 0);
11765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11766 IEM_MC_LOCAL(uint16_t, u16Fsw);
11767 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11768 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11769 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11770
11771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11775
11776 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11777 IEM_MC_PREPARE_FPU_USAGE();
11778 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11779 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11780 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11781 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11782 } IEM_MC_ELSE() {
11783 IEM_MC_IF_FCW_IM() {
11784 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11785 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11786 } IEM_MC_ENDIF();
11787 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11788 } IEM_MC_ENDIF();
11789 IEM_MC_ADVANCE_RIP_AND_FINISH();
11790
11791 IEM_MC_END();
11792}
11793
11794
11795/**
11796 * @opcode 0xdf
11797 */
11798FNIEMOP_DEF(iemOp_EscF7)
11799{
11800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11801 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11802 if (IEM_IS_MODRM_REG_MODE(bRm))
11803 {
11804 switch (IEM_GET_MODRM_REG_8(bRm))
11805 {
11806 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11807 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11808 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11809 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11810 case 4: if (bRm == 0xe0)
11811 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11812 IEMOP_RAISE_INVALID_OPCODE_RET();
11813 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11814 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11815 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11817 }
11818 }
11819 else
11820 {
11821 switch (IEM_GET_MODRM_REG_8(bRm))
11822 {
11823 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11824 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11825 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11826 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11827 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11828 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11829 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11830 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11832 }
11833 }
11834}
11835
11836
11837/**
11838 * @opcode 0xe0
11839 */
11840FNIEMOP_DEF(iemOp_loopne_Jb)
11841{
11842 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11843 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11844 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11845
11846 switch (pVCpu->iem.s.enmEffAddrMode)
11847 {
11848 case IEMMODE_16BIT:
11849 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11851 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11852 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11853 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11854 } IEM_MC_ELSE() {
11855 IEM_MC_ADVANCE_RIP_AND_FINISH();
11856 } IEM_MC_ENDIF();
11857 IEM_MC_END();
11858 break;
11859
11860 case IEMMODE_32BIT:
11861 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11863 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11864 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11865 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11866 } IEM_MC_ELSE() {
11867 IEM_MC_ADVANCE_RIP_AND_FINISH();
11868 } IEM_MC_ENDIF();
11869 IEM_MC_END();
11870 break;
11871
11872 case IEMMODE_64BIT:
11873 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11875 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11876 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11877 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11878 } IEM_MC_ELSE() {
11879 IEM_MC_ADVANCE_RIP_AND_FINISH();
11880 } IEM_MC_ENDIF();
11881 IEM_MC_END();
11882 break;
11883
11884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11885 }
11886}
11887
11888
11889/**
11890 * @opcode 0xe1
11891 */
11892FNIEMOP_DEF(iemOp_loope_Jb)
11893{
11894 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11895 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11896 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11897
11898 switch (pVCpu->iem.s.enmEffAddrMode)
11899 {
11900 case IEMMODE_16BIT:
11901 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11903 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11904 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11905 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11906 } IEM_MC_ELSE() {
11907 IEM_MC_ADVANCE_RIP_AND_FINISH();
11908 } IEM_MC_ENDIF();
11909 IEM_MC_END();
11910 break;
11911
11912 case IEMMODE_32BIT:
11913 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11915 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11916 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11917 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11918 } IEM_MC_ELSE() {
11919 IEM_MC_ADVANCE_RIP_AND_FINISH();
11920 } IEM_MC_ENDIF();
11921 IEM_MC_END();
11922 break;
11923
11924 case IEMMODE_64BIT:
11925 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11927 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11928 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11929 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11930 } IEM_MC_ELSE() {
11931 IEM_MC_ADVANCE_RIP_AND_FINISH();
11932 } IEM_MC_ENDIF();
11933 IEM_MC_END();
11934 break;
11935
11936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11937 }
11938}
11939
11940
11941/**
11942 * @opcode 0xe2
11943 */
11944FNIEMOP_DEF(iemOp_loop_Jb)
11945{
11946 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11947 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11948 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11949
11950 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11951 * using the 32-bit operand size override. How can that be restarted? See
11952 * weird pseudo code in intel manual. */
11953
11954 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11955 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11956 * the loop causes guest crashes, but when logging it's nice to skip a few million
11957 * lines of useless output. */
11958#if defined(LOG_ENABLED)
11959 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11960 switch (pVCpu->iem.s.enmEffAddrMode)
11961 {
11962 case IEMMODE_16BIT:
11963 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11965 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11966 IEM_MC_ADVANCE_RIP_AND_FINISH();
11967 IEM_MC_END();
11968 break;
11969
11970 case IEMMODE_32BIT:
11971 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11973 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11974 IEM_MC_ADVANCE_RIP_AND_FINISH();
11975 IEM_MC_END();
11976 break;
11977
11978 case IEMMODE_64BIT:
11979 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11981 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11982 IEM_MC_ADVANCE_RIP_AND_FINISH();
11983 IEM_MC_END();
11984 break;
11985
11986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11987 }
11988#endif
11989
11990 switch (pVCpu->iem.s.enmEffAddrMode)
11991 {
11992 case IEMMODE_16BIT:
11993 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11995 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11996 IEM_MC_IF_CX_IS_NZ() {
11997 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11998 } IEM_MC_ELSE() {
11999 IEM_MC_ADVANCE_RIP_AND_FINISH();
12000 } IEM_MC_ENDIF();
12001 IEM_MC_END();
12002 break;
12003
12004 case IEMMODE_32BIT:
12005 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12007 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12008 IEM_MC_IF_ECX_IS_NZ() {
12009 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12010 } IEM_MC_ELSE() {
12011 IEM_MC_ADVANCE_RIP_AND_FINISH();
12012 } IEM_MC_ENDIF();
12013 IEM_MC_END();
12014 break;
12015
12016 case IEMMODE_64BIT:
12017 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12019 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12020 IEM_MC_IF_RCX_IS_NZ() {
12021 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12022 } IEM_MC_ELSE() {
12023 IEM_MC_ADVANCE_RIP_AND_FINISH();
12024 } IEM_MC_ENDIF();
12025 IEM_MC_END();
12026 break;
12027
12028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12029 }
12030}
12031
12032
12033/**
12034 * @opcode 0xe3
12035 */
12036FNIEMOP_DEF(iemOp_jecxz_Jb)
12037{
12038 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12039 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12040 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12041
12042 switch (pVCpu->iem.s.enmEffAddrMode)
12043 {
12044 case IEMMODE_16BIT:
12045 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12047 IEM_MC_IF_CX_IS_NZ() {
12048 IEM_MC_ADVANCE_RIP_AND_FINISH();
12049 } IEM_MC_ELSE() {
12050 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12051 } IEM_MC_ENDIF();
12052 IEM_MC_END();
12053 break;
12054
12055 case IEMMODE_32BIT:
12056 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12058 IEM_MC_IF_ECX_IS_NZ() {
12059 IEM_MC_ADVANCE_RIP_AND_FINISH();
12060 } IEM_MC_ELSE() {
12061 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12062 } IEM_MC_ENDIF();
12063 IEM_MC_END();
12064 break;
12065
12066 case IEMMODE_64BIT:
12067 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12069 IEM_MC_IF_RCX_IS_NZ() {
12070 IEM_MC_ADVANCE_RIP_AND_FINISH();
12071 } IEM_MC_ELSE() {
12072 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12073 } IEM_MC_ENDIF();
12074 IEM_MC_END();
12075 break;
12076
12077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12078 }
12079}
12080
12081
12082/** Opcode 0xe4 */
12083FNIEMOP_DEF(iemOp_in_AL_Ib)
12084{
12085 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12086 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12088 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12089 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12090}
12091
12092
12093/** Opcode 0xe5 */
12094FNIEMOP_DEF(iemOp_in_eAX_Ib)
12095{
12096 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12097 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12099 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12100 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12101 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12102}
12103
12104
12105/** Opcode 0xe6 */
12106FNIEMOP_DEF(iemOp_out_Ib_AL)
12107{
12108 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12109 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12111 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12112 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12113}
12114
12115
12116/** Opcode 0xe7 */
12117FNIEMOP_DEF(iemOp_out_Ib_eAX)
12118{
12119 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12120 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12122 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12123 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12124 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12125}
12126
12127
12128/**
12129 * @opcode 0xe8
12130 */
12131FNIEMOP_DEF(iemOp_call_Jv)
12132{
12133 IEMOP_MNEMONIC(call_Jv, "call Jv");
12134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12135 switch (pVCpu->iem.s.enmEffOpSize)
12136 {
12137 case IEMMODE_16BIT:
12138 {
12139 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12140 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12141 }
12142
12143 case IEMMODE_32BIT:
12144 {
12145 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12146 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12147 }
12148
12149 case IEMMODE_64BIT:
12150 {
12151 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12152 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12153 }
12154
12155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12156 }
12157}
12158
12159
12160/**
12161 * @opcode 0xe9
12162 */
12163FNIEMOP_DEF(iemOp_jmp_Jv)
12164{
12165 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12166 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12167 switch (pVCpu->iem.s.enmEffOpSize)
12168 {
12169 case IEMMODE_16BIT:
12170 IEM_MC_BEGIN(0, 0, 0, 0);
12171 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12173 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12174 IEM_MC_END();
12175 break;
12176
12177 case IEMMODE_64BIT:
12178 case IEMMODE_32BIT:
12179 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12180 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12182 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12183 IEM_MC_END();
12184 break;
12185
12186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12187 }
12188}
12189
12190
12191/**
12192 * @opcode 0xea
12193 */
12194FNIEMOP_DEF(iemOp_jmp_Ap)
12195{
12196 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12197 IEMOP_HLP_NO_64BIT();
12198
12199 /* Decode the far pointer address and pass it on to the far call C implementation. */
12200 uint32_t off32Seg;
12201 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12202 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12203 else
12204 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12205 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12207 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12208 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12209 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12210}
12211
12212
12213/**
12214 * @opcode 0xeb
12215 */
12216FNIEMOP_DEF(iemOp_jmp_Jb)
12217{
12218 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12219 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12221
12222 IEM_MC_BEGIN(0, 0, 0, 0);
12223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12224 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12225 IEM_MC_END();
12226}
12227
12228
12229/** Opcode 0xec */
12230FNIEMOP_DEF(iemOp_in_AL_DX)
12231{
12232 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12234 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12235 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12236}
12237
12238
12239/** Opcode 0xed */
12240FNIEMOP_DEF(iemOp_in_eAX_DX)
12241{
12242 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12244 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12245 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12246 pVCpu->iem.s.enmEffAddrMode);
12247}
12248
12249
12250/** Opcode 0xee */
12251FNIEMOP_DEF(iemOp_out_DX_AL)
12252{
12253 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12255 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12256 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12257}
12258
12259
12260/** Opcode 0xef */
12261FNIEMOP_DEF(iemOp_out_DX_eAX)
12262{
12263 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12265 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12266 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12267 pVCpu->iem.s.enmEffAddrMode);
12268}
12269
12270
12271/**
12272 * @opcode 0xf0
12273 */
12274FNIEMOP_DEF(iemOp_lock)
12275{
12276 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12277 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12278 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12279
12280 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12281 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12282}
12283
12284
12285/**
12286 * @opcode 0xf1
12287 */
12288FNIEMOP_DEF(iemOp_int1)
12289{
12290 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12291 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12292 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12293 * LOADALL memo. Needs some testing. */
12294 IEMOP_HLP_MIN_386();
12295 /** @todo testcase! */
12296 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12297 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12298 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12299}
12300
12301
12302/**
12303 * @opcode 0xf2
12304 */
12305FNIEMOP_DEF(iemOp_repne)
12306{
12307 /* This overrides any previous REPE prefix. */
12308 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12309 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12310 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12311
12312 /* For the 4 entry opcode tables, REPNZ overrides any previous
12313 REPZ and operand size prefixes. */
12314 pVCpu->iem.s.idxPrefix = 3;
12315
12316 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12317 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12318}
12319
12320
12321/**
12322 * @opcode 0xf3
12323 */
12324FNIEMOP_DEF(iemOp_repe)
12325{
12326 /* This overrides any previous REPNE prefix. */
12327 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12328 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12329 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12330
12331 /* For the 4 entry opcode tables, REPNZ overrides any previous
12332 REPNZ and operand size prefixes. */
12333 pVCpu->iem.s.idxPrefix = 2;
12334
12335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12337}
12338
12339
12340/**
12341 * @opcode 0xf4
12342 */
12343FNIEMOP_DEF(iemOp_hlt)
12344{
12345 IEMOP_MNEMONIC(hlt, "hlt");
12346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12347 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12348}
12349
12350
12351/**
12352 * @opcode 0xf5
12353 */
12354FNIEMOP_DEF(iemOp_cmc)
12355{
12356 IEMOP_MNEMONIC(cmc, "cmc");
12357 IEM_MC_BEGIN(0, 0, 0, 0);
12358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12359 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12360 IEM_MC_ADVANCE_RIP_AND_FINISH();
12361 IEM_MC_END();
12362}
12363
12364
12365/**
12366 * Body for of 'inc/dec/not/neg Eb'.
12367 */
12368#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12369 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12370 { \
12371 /* register access */ \
12372 IEM_MC_BEGIN(2, 0, 0, 0); \
12373 IEMOP_HLP_DONE_DECODING(); \
12374 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12375 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12376 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12377 IEM_MC_REF_EFLAGS(pEFlags); \
12378 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12380 IEM_MC_END(); \
12381 } \
12382 else \
12383 { \
12384 /* memory access. */ \
12385 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12386 { \
12387 IEM_MC_BEGIN(2, 2, 0, 0); \
12388 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12389 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12391 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12392 \
12393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12394 IEMOP_HLP_DONE_DECODING(); \
12395 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12396 IEM_MC_FETCH_EFLAGS(EFlags); \
12397 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12398 \
12399 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12400 IEM_MC_COMMIT_EFLAGS(EFlags); \
12401 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12402 IEM_MC_END(); \
12403 } \
12404 else \
12405 { \
12406 IEM_MC_BEGIN(2, 2, 0, 0); \
12407 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12408 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12410 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12411 \
12412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12413 IEMOP_HLP_DONE_DECODING(); \
12414 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12415 IEM_MC_FETCH_EFLAGS(EFlags); \
12416 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12417 \
12418 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12419 IEM_MC_COMMIT_EFLAGS(EFlags); \
12420 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12421 IEM_MC_END(); \
12422 } \
12423 } \
12424 (void)0
12425
12426
12427/**
12428 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12429 */
12430#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12431 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12432 { \
12433 /* \
12434 * Register target \
12435 */ \
12436 switch (pVCpu->iem.s.enmEffOpSize) \
12437 { \
12438 case IEMMODE_16BIT: \
12439 IEM_MC_BEGIN(2, 0, 0, 0); \
12440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12441 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12442 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12443 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12444 IEM_MC_REF_EFLAGS(pEFlags); \
12445 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12446 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12447 IEM_MC_END(); \
12448 break; \
12449 \
12450 case IEMMODE_32BIT: \
12451 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
12452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12453 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12454 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12455 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12456 IEM_MC_REF_EFLAGS(pEFlags); \
12457 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12458 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
12459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12460 IEM_MC_END(); \
12461 break; \
12462 \
12463 case IEMMODE_64BIT: \
12464 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
12465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12466 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12467 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12468 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12469 IEM_MC_REF_EFLAGS(pEFlags); \
12470 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12471 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12472 IEM_MC_END(); \
12473 break; \
12474 \
12475 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12476 } \
12477 } \
12478 else \
12479 { \
12480 /* \
12481 * Memory target. \
12482 */ \
12483 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12484 { \
12485 switch (pVCpu->iem.s.enmEffOpSize) \
12486 { \
12487 case IEMMODE_16BIT: \
12488 IEM_MC_BEGIN(2, 3, 0, 0); \
12489 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12490 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12492 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12493 \
12494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12496 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12497 IEM_MC_FETCH_EFLAGS(EFlags); \
12498 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12499 \
12500 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12501 IEM_MC_COMMIT_EFLAGS(EFlags); \
12502 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12503 IEM_MC_END(); \
12504 break; \
12505 \
12506 case IEMMODE_32BIT: \
12507 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12508 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12509 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12511 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12512 \
12513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12515 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12516 IEM_MC_FETCH_EFLAGS(EFlags); \
12517 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12518 \
12519 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12520 IEM_MC_COMMIT_EFLAGS(EFlags); \
12521 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12522 IEM_MC_END(); \
12523 break; \
12524 \
12525 case IEMMODE_64BIT: \
12526 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12528 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12530 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12531 \
12532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12534 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12535 IEM_MC_FETCH_EFLAGS(EFlags); \
12536 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12537 \
12538 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12539 IEM_MC_COMMIT_EFLAGS(EFlags); \
12540 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12541 IEM_MC_END(); \
12542 break; \
12543 \
12544 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12545 } \
12546 } \
12547 else \
12548 { \
12549 (void)0
12550
12551#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12552 switch (pVCpu->iem.s.enmEffOpSize) \
12553 { \
12554 case IEMMODE_16BIT: \
12555 IEM_MC_BEGIN(2, 3, 0, 0); \
12556 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12559 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12560 \
12561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12562 IEMOP_HLP_DONE_DECODING(); \
12563 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12564 IEM_MC_FETCH_EFLAGS(EFlags); \
12565 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12566 \
12567 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12568 IEM_MC_COMMIT_EFLAGS(EFlags); \
12569 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12570 IEM_MC_END(); \
12571 break; \
12572 \
12573 case IEMMODE_32BIT: \
12574 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12575 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12576 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12578 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12579 \
12580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12581 IEMOP_HLP_DONE_DECODING(); \
12582 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12583 IEM_MC_FETCH_EFLAGS(EFlags); \
12584 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12585 \
12586 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12587 IEM_MC_COMMIT_EFLAGS(EFlags); \
12588 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12589 IEM_MC_END(); \
12590 break; \
12591 \
12592 case IEMMODE_64BIT: \
12593 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12594 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12595 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12597 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12598 \
12599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12600 IEMOP_HLP_DONE_DECODING(); \
12601 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12602 IEM_MC_FETCH_EFLAGS(EFlags); \
12603 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12604 \
12605 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12606 IEM_MC_COMMIT_EFLAGS(EFlags); \
12607 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12608 IEM_MC_END(); \
12609 break; \
12610 \
12611 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12612 } \
12613 } \
12614 } \
12615 (void)0
12616
12617
12618/**
12619 * @opmaps grp3_f6
12620 * @opcode /0
12621 * @todo also /1
12622 */
12623FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12624{
12625 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12626 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12627
12628 if (IEM_IS_MODRM_REG_MODE(bRm))
12629 {
12630 /* register access */
12631 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12632 IEM_MC_BEGIN(3, 0, 0, 0);
12633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12634 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12635 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12636 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12637 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12638 IEM_MC_REF_EFLAGS(pEFlags);
12639 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12640 IEM_MC_ADVANCE_RIP_AND_FINISH();
12641 IEM_MC_END();
12642 }
12643 else
12644 {
12645 /* memory access. */
12646 IEM_MC_BEGIN(3, 3, 0, 0);
12647 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12648 IEM_MC_ARG(uint8_t, u8Src, 1);
12649 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12651 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12652
12653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12654 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12655 IEM_MC_ASSIGN(u8Src, u8Imm);
12656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12657 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12658 IEM_MC_FETCH_EFLAGS(EFlags);
12659 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12660
12661 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12662 IEM_MC_COMMIT_EFLAGS(EFlags);
12663 IEM_MC_ADVANCE_RIP_AND_FINISH();
12664 IEM_MC_END();
12665 }
12666}
12667
12668
12669/** Opcode 0xf6 /4, /5, /6 and /7. */
12670FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12671{
12672 if (IEM_IS_MODRM_REG_MODE(bRm))
12673 {
12674 /* register access */
12675 IEM_MC_BEGIN(3, 1, 0, 0);
12676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12677 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12678 IEM_MC_ARG(uint8_t, u8Value, 1);
12679 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12680 IEM_MC_LOCAL(int32_t, rc);
12681
12682 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12683 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12684 IEM_MC_REF_EFLAGS(pEFlags);
12685 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12686 IEM_MC_IF_LOCAL_IS_Z(rc) {
12687 IEM_MC_ADVANCE_RIP_AND_FINISH();
12688 } IEM_MC_ELSE() {
12689 IEM_MC_RAISE_DIVIDE_ERROR();
12690 } IEM_MC_ENDIF();
12691
12692 IEM_MC_END();
12693 }
12694 else
12695 {
12696 /* memory access. */
12697 IEM_MC_BEGIN(3, 2, 0, 0);
12698 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12699 IEM_MC_ARG(uint8_t, u8Value, 1);
12700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12702 IEM_MC_LOCAL(int32_t, rc);
12703
12704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12706 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12707 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12708 IEM_MC_REF_EFLAGS(pEFlags);
12709 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12710 IEM_MC_IF_LOCAL_IS_Z(rc) {
12711 IEM_MC_ADVANCE_RIP_AND_FINISH();
12712 } IEM_MC_ELSE() {
12713 IEM_MC_RAISE_DIVIDE_ERROR();
12714 } IEM_MC_ENDIF();
12715
12716 IEM_MC_END();
12717 }
12718}
12719
12720
12721/** Opcode 0xf7 /4, /5, /6 and /7. */
12722FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12723{
12724 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12725
12726 if (IEM_IS_MODRM_REG_MODE(bRm))
12727 {
12728 /* register access */
12729 switch (pVCpu->iem.s.enmEffOpSize)
12730 {
12731 case IEMMODE_16BIT:
12732 IEM_MC_BEGIN(4, 1, 0, 0);
12733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12734 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12735 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12736 IEM_MC_ARG(uint16_t, u16Value, 2);
12737 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12738 IEM_MC_LOCAL(int32_t, rc);
12739
12740 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12741 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12742 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12743 IEM_MC_REF_EFLAGS(pEFlags);
12744 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12745 IEM_MC_IF_LOCAL_IS_Z(rc) {
12746 IEM_MC_ADVANCE_RIP_AND_FINISH();
12747 } IEM_MC_ELSE() {
12748 IEM_MC_RAISE_DIVIDE_ERROR();
12749 } IEM_MC_ENDIF();
12750
12751 IEM_MC_END();
12752 break;
12753
12754 case IEMMODE_32BIT:
12755 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
12756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12757 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12758 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12759 IEM_MC_ARG(uint32_t, u32Value, 2);
12760 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12761 IEM_MC_LOCAL(int32_t, rc);
12762
12763 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12764 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12765 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12766 IEM_MC_REF_EFLAGS(pEFlags);
12767 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12768 IEM_MC_IF_LOCAL_IS_Z(rc) {
12769 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
12770 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
12771 IEM_MC_ADVANCE_RIP_AND_FINISH();
12772 } IEM_MC_ELSE() {
12773 IEM_MC_RAISE_DIVIDE_ERROR();
12774 } IEM_MC_ENDIF();
12775
12776 IEM_MC_END();
12777 break;
12778
12779 case IEMMODE_64BIT:
12780 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
12781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12782 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12783 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12784 IEM_MC_ARG(uint64_t, u64Value, 2);
12785 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12786 IEM_MC_LOCAL(int32_t, rc);
12787
12788 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12789 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12790 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12791 IEM_MC_REF_EFLAGS(pEFlags);
12792 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12793 IEM_MC_IF_LOCAL_IS_Z(rc) {
12794 IEM_MC_ADVANCE_RIP_AND_FINISH();
12795 } IEM_MC_ELSE() {
12796 IEM_MC_RAISE_DIVIDE_ERROR();
12797 } IEM_MC_ENDIF();
12798
12799 IEM_MC_END();
12800 break;
12801
12802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12803 }
12804 }
12805 else
12806 {
12807 /* memory access. */
12808 switch (pVCpu->iem.s.enmEffOpSize)
12809 {
12810 case IEMMODE_16BIT:
12811 IEM_MC_BEGIN(4, 2, 0, 0);
12812 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12813 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12814 IEM_MC_ARG(uint16_t, u16Value, 2);
12815 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12817 IEM_MC_LOCAL(int32_t, rc);
12818
12819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12821 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12822 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12823 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12824 IEM_MC_REF_EFLAGS(pEFlags);
12825 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12826 IEM_MC_IF_LOCAL_IS_Z(rc) {
12827 IEM_MC_ADVANCE_RIP_AND_FINISH();
12828 } IEM_MC_ELSE() {
12829 IEM_MC_RAISE_DIVIDE_ERROR();
12830 } IEM_MC_ENDIF();
12831
12832 IEM_MC_END();
12833 break;
12834
12835 case IEMMODE_32BIT:
12836 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
12837 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12838 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12839 IEM_MC_ARG(uint32_t, u32Value, 2);
12840 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12842 IEM_MC_LOCAL(int32_t, rc);
12843
12844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12846 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12847 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12848 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12849 IEM_MC_REF_EFLAGS(pEFlags);
12850 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12851 IEM_MC_IF_LOCAL_IS_Z(rc) {
12852 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
12853 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
12854 IEM_MC_ADVANCE_RIP_AND_FINISH();
12855 } IEM_MC_ELSE() {
12856 IEM_MC_RAISE_DIVIDE_ERROR();
12857 } IEM_MC_ENDIF();
12858
12859 IEM_MC_END();
12860 break;
12861
12862 case IEMMODE_64BIT:
12863 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
12864 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12865 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12866 IEM_MC_ARG(uint64_t, u64Value, 2);
12867 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12869 IEM_MC_LOCAL(int32_t, rc);
12870
12871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12873 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12874 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12875 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12876 IEM_MC_REF_EFLAGS(pEFlags);
12877 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12878 IEM_MC_IF_LOCAL_IS_Z(rc) {
12879 IEM_MC_ADVANCE_RIP_AND_FINISH();
12880 } IEM_MC_ELSE() {
12881 IEM_MC_RAISE_DIVIDE_ERROR();
12882 } IEM_MC_ENDIF();
12883
12884 IEM_MC_END();
12885 break;
12886
12887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12888 }
12889 }
12890}
12891
12892
12893/**
12894 * @opmaps grp3_f6
12895 * @opcode /2
12896 */
12897FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12898{
12899 IEMOP_MNEMONIC(not_Eb, "not Eb");
12900 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12901}
12902
12903
12904/**
12905 * @opmaps grp3_f6
12906 * @opcode /3
12907 */
12908FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12909{
12910 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12911 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12912}
12913
12914
12915/**
12916 * @opcode 0xf6
12917 */
12918FNIEMOP_DEF(iemOp_Grp3_Eb)
12919{
12920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12921 switch (IEM_GET_MODRM_REG_8(bRm))
12922 {
12923 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12924 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12925 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12926 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12927 case 4:
12928 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12930 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12931 case 5:
12932 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12933 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12934 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12935 case 6:
12936 IEMOP_MNEMONIC(div_Eb, "div Eb");
12937 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12938 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12939 case 7:
12940 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12941 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12942 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12944 }
12945}
12946
12947
12948/** Opcode 0xf7 /0. */
12949FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12950{
12951 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12953
12954 if (IEM_IS_MODRM_REG_MODE(bRm))
12955 {
12956 /* register access */
12957 switch (pVCpu->iem.s.enmEffOpSize)
12958 {
12959 case IEMMODE_16BIT:
12960 IEM_MC_BEGIN(3, 0, 0, 0);
12961 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12963 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12964 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12965 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12966 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12967 IEM_MC_REF_EFLAGS(pEFlags);
12968 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12969 IEM_MC_ADVANCE_RIP_AND_FINISH();
12970 IEM_MC_END();
12971 break;
12972
12973 case IEMMODE_32BIT:
12974 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
12975 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12977 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12978 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12979 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12980 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12981 IEM_MC_REF_EFLAGS(pEFlags);
12982 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12983 /* No clearing the high dword here - test doesn't write back the result. */
12984 IEM_MC_ADVANCE_RIP_AND_FINISH();
12985 IEM_MC_END();
12986 break;
12987
12988 case IEMMODE_64BIT:
12989 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
12990 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12992 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12993 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12995 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12996 IEM_MC_REF_EFLAGS(pEFlags);
12997 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12998 IEM_MC_ADVANCE_RIP_AND_FINISH();
12999 IEM_MC_END();
13000 break;
13001
13002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13003 }
13004 }
13005 else
13006 {
13007 /* memory access. */
13008 switch (pVCpu->iem.s.enmEffOpSize)
13009 {
13010 case IEMMODE_16BIT:
13011 IEM_MC_BEGIN(3, 3, 0, 0);
13012 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13013 IEM_MC_ARG(uint16_t, u16Src, 1);
13014 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13016 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13017
13018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13019 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13020 IEM_MC_ASSIGN(u16Src, u16Imm);
13021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13022 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13023 IEM_MC_FETCH_EFLAGS(EFlags);
13024 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13025
13026 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13027 IEM_MC_COMMIT_EFLAGS(EFlags);
13028 IEM_MC_ADVANCE_RIP_AND_FINISH();
13029 IEM_MC_END();
13030 break;
13031
13032 case IEMMODE_32BIT:
13033 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13034 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13035 IEM_MC_ARG(uint32_t, u32Src, 1);
13036 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13038 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13039
13040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13041 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13042 IEM_MC_ASSIGN(u32Src, u32Imm);
13043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13044 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13045 IEM_MC_FETCH_EFLAGS(EFlags);
13046 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13047
13048 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13049 IEM_MC_COMMIT_EFLAGS(EFlags);
13050 IEM_MC_ADVANCE_RIP_AND_FINISH();
13051 IEM_MC_END();
13052 break;
13053
13054 case IEMMODE_64BIT:
13055 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13056 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13057 IEM_MC_ARG(uint64_t, u64Src, 1);
13058 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13060 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13061
13062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13063 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13064 IEM_MC_ASSIGN(u64Src, u64Imm);
13065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13066 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13067 IEM_MC_FETCH_EFLAGS(EFlags);
13068 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13069
13070 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13071 IEM_MC_COMMIT_EFLAGS(EFlags);
13072 IEM_MC_ADVANCE_RIP_AND_FINISH();
13073 IEM_MC_END();
13074 break;
13075
13076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13077 }
13078 }
13079}
13080
13081
13082/** Opcode 0xf7 /2. */
13083FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13084{
13085 IEMOP_MNEMONIC(not_Ev, "not Ev");
13086 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13087 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13088}
13089
13090
13091/** Opcode 0xf7 /3. */
13092FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13093{
13094 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13095 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13096 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13097}
13098
13099
13100/**
13101 * @opcode 0xf7
13102 */
13103FNIEMOP_DEF(iemOp_Grp3_Ev)
13104{
13105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13106 switch (IEM_GET_MODRM_REG_8(bRm))
13107 {
13108 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13109 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13110 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13111 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13112 case 4:
13113 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13114 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13115 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13116 case 5:
13117 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13118 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13119 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13120 case 6:
13121 IEMOP_MNEMONIC(div_Ev, "div Ev");
13122 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13123 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13124 case 7:
13125 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13126 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13127 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13129 }
13130}
13131
13132
13133/**
13134 * @opcode 0xf8
13135 */
13136FNIEMOP_DEF(iemOp_clc)
13137{
13138 IEMOP_MNEMONIC(clc, "clc");
13139 IEM_MC_BEGIN(0, 0, 0, 0);
13140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13141 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13142 IEM_MC_ADVANCE_RIP_AND_FINISH();
13143 IEM_MC_END();
13144}
13145
13146
13147/**
13148 * @opcode 0xf9
13149 */
13150FNIEMOP_DEF(iemOp_stc)
13151{
13152 IEMOP_MNEMONIC(stc, "stc");
13153 IEM_MC_BEGIN(0, 0, 0, 0);
13154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13155 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13156 IEM_MC_ADVANCE_RIP_AND_FINISH();
13157 IEM_MC_END();
13158}
13159
13160
13161/**
13162 * @opcode 0xfa
13163 */
13164FNIEMOP_DEF(iemOp_cli)
13165{
13166 IEMOP_MNEMONIC(cli, "cli");
13167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13168 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13169}
13170
13171
13172FNIEMOP_DEF(iemOp_sti)
13173{
13174 IEMOP_MNEMONIC(sti, "sti");
13175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13176 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13177 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, iemCImpl_sti);
13178}
13179
13180
13181/**
13182 * @opcode 0xfc
13183 */
13184FNIEMOP_DEF(iemOp_cld)
13185{
13186 IEMOP_MNEMONIC(cld, "cld");
13187 IEM_MC_BEGIN(0, 0, 0, 0);
13188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13189 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13190 IEM_MC_ADVANCE_RIP_AND_FINISH();
13191 IEM_MC_END();
13192}
13193
13194
13195/**
13196 * @opcode 0xfd
13197 */
13198FNIEMOP_DEF(iemOp_std)
13199{
13200 IEMOP_MNEMONIC(std, "std");
13201 IEM_MC_BEGIN(0, 0, 0, 0);
13202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13203 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13204 IEM_MC_ADVANCE_RIP_AND_FINISH();
13205 IEM_MC_END();
13206}
13207
13208
13209/**
13210 * @opmaps grp4
13211 * @opcode /0
13212 */
13213FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13214{
13215 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13216 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13217}
13218
13219
13220/**
13221 * @opmaps grp4
13222 * @opcode /1
13223 */
13224FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13225{
13226 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13227 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13228}
13229
13230
13231/**
13232 * @opcode 0xfe
13233 */
13234FNIEMOP_DEF(iemOp_Grp4)
13235{
13236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13237 switch (IEM_GET_MODRM_REG_8(bRm))
13238 {
13239 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13240 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13241 default:
13242 /** @todo is the eff-addr decoded? */
13243 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13244 IEMOP_RAISE_INVALID_OPCODE_RET();
13245 }
13246}
13247
13248/** Opcode 0xff /0. */
13249FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13250{
13251 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13252 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13253 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13254}
13255
13256
13257/** Opcode 0xff /1. */
13258FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13259{
13260 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13261 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13262 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13263}
13264
13265
13266/**
13267 * Opcode 0xff /2.
13268 * @param bRm The RM byte.
13269 */
13270FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13271{
13272 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13273 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13274
13275 if (IEM_IS_MODRM_REG_MODE(bRm))
13276 {
13277 /* The new RIP is taken from a register. */
13278 switch (pVCpu->iem.s.enmEffOpSize)
13279 {
13280 case IEMMODE_16BIT:
13281 IEM_MC_BEGIN(1, 0, 0, 0);
13282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13283 IEM_MC_ARG(uint16_t, u16Target, 0);
13284 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13285 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13286 IEM_MC_END();
13287 break;
13288
13289 case IEMMODE_32BIT:
13290 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13292 IEM_MC_ARG(uint32_t, u32Target, 0);
13293 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13294 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13295 IEM_MC_END();
13296 break;
13297
13298 case IEMMODE_64BIT:
13299 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13301 IEM_MC_ARG(uint64_t, u64Target, 0);
13302 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13303 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13304 IEM_MC_END();
13305 break;
13306
13307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13308 }
13309 }
13310 else
13311 {
13312 /* The new RIP is taken from a register. */
13313 switch (pVCpu->iem.s.enmEffOpSize)
13314 {
13315 case IEMMODE_16BIT:
13316 IEM_MC_BEGIN(1, 1, 0, 0);
13317 IEM_MC_ARG(uint16_t, u16Target, 0);
13318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13321 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13322 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13323 IEM_MC_END();
13324 break;
13325
13326 case IEMMODE_32BIT:
13327 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13328 IEM_MC_ARG(uint32_t, u32Target, 0);
13329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13332 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13333 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13334 IEM_MC_END();
13335 break;
13336
13337 case IEMMODE_64BIT:
13338 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13339 IEM_MC_ARG(uint64_t, u64Target, 0);
13340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13343 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13344 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13345 IEM_MC_END();
13346 break;
13347
13348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13349 }
13350 }
13351}
13352
13353#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13354 /* Registers? How?? */ \
13355 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13356 { /* likely */ } \
13357 else \
13358 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13359 \
13360 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13361 /** @todo what does VIA do? */ \
13362 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13363 { /* likely */ } \
13364 else \
13365 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13366 \
13367 /* Far pointer loaded from memory. */ \
13368 switch (pVCpu->iem.s.enmEffOpSize) \
13369 { \
13370 case IEMMODE_16BIT: \
13371 IEM_MC_BEGIN(3, 1, 0, 0); \
13372 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13373 IEM_MC_ARG(uint16_t, offSeg, 1); \
13374 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13378 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13379 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13380 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13381 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13382 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13383 IEM_MC_END(); \
13384 break; \
13385 \
13386 case IEMMODE_32BIT: \
13387 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13388 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13389 IEM_MC_ARG(uint32_t, offSeg, 1); \
13390 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13394 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13395 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13396 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13397 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13398 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13399 IEM_MC_END(); \
13400 break; \
13401 \
13402 case IEMMODE_64BIT: \
13403 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13404 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13405 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13406 IEM_MC_ARG(uint64_t, offSeg, 1); \
13407 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13411 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13412 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13413 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13414 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13415 IEM_MC_END(); \
13416 break; \
13417 \
13418 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13419 } do {} while (0)
13420
13421
13422/**
13423 * Opcode 0xff /3.
13424 * @param bRm The RM byte.
13425 */
13426FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13427{
13428 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13429 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13430}
13431
13432
13433/**
13434 * Opcode 0xff /4.
13435 * @param bRm The RM byte.
13436 */
13437FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13438{
13439 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13440 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13441
13442 if (IEM_IS_MODRM_REG_MODE(bRm))
13443 {
13444 /* The new RIP is taken from a register. */
13445 switch (pVCpu->iem.s.enmEffOpSize)
13446 {
13447 case IEMMODE_16BIT:
13448 IEM_MC_BEGIN(0, 1, 0, 0);
13449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13450 IEM_MC_LOCAL(uint16_t, u16Target);
13451 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13452 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13453 IEM_MC_END();
13454 break;
13455
13456 case IEMMODE_32BIT:
13457 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
13458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13459 IEM_MC_LOCAL(uint32_t, u32Target);
13460 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13461 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13462 IEM_MC_END();
13463 break;
13464
13465 case IEMMODE_64BIT:
13466 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
13467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13468 IEM_MC_LOCAL(uint64_t, u64Target);
13469 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13470 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13471 IEM_MC_END();
13472 break;
13473
13474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13475 }
13476 }
13477 else
13478 {
13479 /* The new RIP is taken from a memory location. */
13480 switch (pVCpu->iem.s.enmEffOpSize)
13481 {
13482 case IEMMODE_16BIT:
13483 IEM_MC_BEGIN(0, 2, 0, 0);
13484 IEM_MC_LOCAL(uint16_t, u16Target);
13485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13488 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13489 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13490 IEM_MC_END();
13491 break;
13492
13493 case IEMMODE_32BIT:
13494 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
13495 IEM_MC_LOCAL(uint32_t, u32Target);
13496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13499 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13500 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13501 IEM_MC_END();
13502 break;
13503
13504 case IEMMODE_64BIT:
13505 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13506 IEM_MC_LOCAL(uint64_t, u64Target);
13507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13510 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13511 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13512 IEM_MC_END();
13513 break;
13514
13515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13516 }
13517 }
13518}
13519
13520
13521/**
13522 * Opcode 0xff /5.
13523 * @param bRm The RM byte.
13524 */
13525FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13526{
13527 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13528 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13529}
13530
13531
13532/**
13533 * Opcode 0xff /6.
13534 * @param bRm The RM byte.
13535 */
13536FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13537{
13538 IEMOP_MNEMONIC(push_Ev, "push Ev");
13539
13540 /* Registers are handled by a common worker. */
13541 if (IEM_IS_MODRM_REG_MODE(bRm))
13542 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13543
13544 /* Memory we do here. */
13545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13546 switch (pVCpu->iem.s.enmEffOpSize)
13547 {
13548 case IEMMODE_16BIT:
13549 IEM_MC_BEGIN(0, 2, 0, 0);
13550 IEM_MC_LOCAL(uint16_t, u16Src);
13551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13554 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13555 IEM_MC_PUSH_U16(u16Src);
13556 IEM_MC_ADVANCE_RIP_AND_FINISH();
13557 IEM_MC_END();
13558 break;
13559
13560 case IEMMODE_32BIT:
13561 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
13562 IEM_MC_LOCAL(uint32_t, u32Src);
13563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13566 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13567 IEM_MC_PUSH_U32(u32Src);
13568 IEM_MC_ADVANCE_RIP_AND_FINISH();
13569 IEM_MC_END();
13570 break;
13571
13572 case IEMMODE_64BIT:
13573 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13574 IEM_MC_LOCAL(uint64_t, u64Src);
13575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13578 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13579 IEM_MC_PUSH_U64(u64Src);
13580 IEM_MC_ADVANCE_RIP_AND_FINISH();
13581 IEM_MC_END();
13582 break;
13583
13584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13585 }
13586}
13587
13588
13589/**
13590 * @opcode 0xff
13591 */
13592FNIEMOP_DEF(iemOp_Grp5)
13593{
13594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13595 switch (IEM_GET_MODRM_REG_8(bRm))
13596 {
13597 case 0:
13598 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13599 case 1:
13600 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13601 case 2:
13602 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13603 case 3:
13604 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13605 case 4:
13606 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13607 case 5:
13608 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13609 case 6:
13610 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13611 case 7:
13612 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13613 IEMOP_RAISE_INVALID_OPCODE_RET();
13614 }
13615 AssertFailedReturn(VERR_IEM_IPE_3);
13616}
13617
13618
13619
13620const PFNIEMOP g_apfnOneByteMap[256] =
13621{
13622 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13623 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13624 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13625 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13626 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13627 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13628 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13629 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13630 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13631 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13632 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13633 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13634 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13635 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13636 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13637 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13638 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13639 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13640 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13641 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13642 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13643 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13644 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13645 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13646 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13647 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13648 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13649 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13650 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13651 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13652 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13653 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13654 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13655 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13656 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13657 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13658 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13659 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13660 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13661 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13662 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13663 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13664 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13665 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13666 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13667 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13668 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13669 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13670 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13671 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13672 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13673 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13674 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13675 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13676 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13677 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13678 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13679 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13680 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13681 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13682 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13683 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13684 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13685 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13686};
13687
13688
13689/** @} */
13690
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette