VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 46951

Last change on this file since 46951 was 46951, checked in by vboxsync, 11 years ago

Fixed first bunch of bugs found by the testcase.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 529.7 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 46951 2013-07-03 19:36:50Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2012 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 IEM_MC_ADVANCE_RIP();
133 IEM_MC_END();
134 break;
135
136 case IEMMODE_64BIT:
137 IEM_MC_BEGIN(3, 0);
138 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
139 IEM_MC_ARG(uint64_t, u64Src, 1);
140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
141
142 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
143 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
144 IEM_MC_REF_EFLAGS(pEFlags);
145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
146
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 break;
150 }
151 }
152 else
153 {
154 /*
155 * We're accessing memory.
156 * Note! We're putting the eflags on the stack here so we can commit them
157 * after the memory.
158 */
159 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
160 switch (pIemCpu->enmEffOpSize)
161 {
162 case IEMMODE_16BIT:
163 IEM_MC_BEGIN(3, 2);
164 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
165 IEM_MC_ARG(uint16_t, u16Src, 1);
166 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
168
169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
170 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
171 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
172 IEM_MC_FETCH_EFLAGS(EFlags);
173 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
175 else
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
177
178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
179 IEM_MC_COMMIT_EFLAGS(EFlags);
180 IEM_MC_ADVANCE_RIP();
181 IEM_MC_END();
182 break;
183
184 case IEMMODE_32BIT:
185 IEM_MC_BEGIN(3, 2);
186 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
187 IEM_MC_ARG(uint32_t, u32Src, 1);
188 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
190
191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
192 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
193 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
194 IEM_MC_FETCH_EFLAGS(EFlags);
195 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
197 else
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
199
200 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
201 IEM_MC_COMMIT_EFLAGS(EFlags);
202 IEM_MC_ADVANCE_RIP();
203 IEM_MC_END();
204 break;
205
206 case IEMMODE_64BIT:
207 IEM_MC_BEGIN(3, 2);
208 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
209 IEM_MC_ARG(uint64_t, u64Src, 1);
210 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
212
213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
214 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
215 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
216 IEM_MC_FETCH_EFLAGS(EFlags);
217 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
219 else
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
221
222 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
223 IEM_MC_COMMIT_EFLAGS(EFlags);
224 IEM_MC_ADVANCE_RIP();
225 IEM_MC_END();
226 break;
227 }
228 }
229 return VINF_SUCCESS;
230}
231
232
233/**
234 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
235 * the destination.
236 *
237 * @param pImpl Pointer to the instruction implementation (assembly).
238 */
239FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
240{
241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
242 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
243
244 /*
245 * If rm is denoting a register, no more instruction bytes.
246 */
247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
248 {
249 IEM_MC_BEGIN(3, 0);
250 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
251 IEM_MC_ARG(uint8_t, u8Src, 1);
252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
253
254 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
255 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
256 IEM_MC_REF_EFLAGS(pEFlags);
257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
258
259 IEM_MC_ADVANCE_RIP();
260 IEM_MC_END();
261 }
262 else
263 {
264 /*
265 * We're accessing memory.
266 */
267 IEM_MC_BEGIN(3, 1);
268 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
269 IEM_MC_ARG(uint8_t, u8Src, 1);
270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
272
273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
274 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
275 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
276 IEM_MC_REF_EFLAGS(pEFlags);
277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
288 * register as the destination.
289 *
290 * @param pImpl Pointer to the instruction implementation (assembly).
291 */
292FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
293{
294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
295 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
296
297 /*
298 * If rm is denoting a register, no more instruction bytes.
299 */
300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
301 {
302 switch (pIemCpu->enmEffOpSize)
303 {
304 case IEMMODE_16BIT:
305 IEM_MC_BEGIN(3, 0);
306 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
307 IEM_MC_ARG(uint16_t, u16Src, 1);
308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
309
310 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
311 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
312 IEM_MC_REF_EFLAGS(pEFlags);
313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 break;
318
319 case IEMMODE_32BIT:
320 IEM_MC_BEGIN(3, 0);
321 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
322 IEM_MC_ARG(uint32_t, u32Src, 1);
323 IEM_MC_ARG(uint32_t *, pEFlags, 2);
324
325 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
326 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
327 IEM_MC_REF_EFLAGS(pEFlags);
328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
329
330 IEM_MC_ADVANCE_RIP();
331 IEM_MC_END();
332 break;
333
334 case IEMMODE_64BIT:
335 IEM_MC_BEGIN(3, 0);
336 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
337 IEM_MC_ARG(uint64_t, u64Src, 1);
338 IEM_MC_ARG(uint32_t *, pEFlags, 2);
339
340 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
341 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
342 IEM_MC_REF_EFLAGS(pEFlags);
343 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
344
345 IEM_MC_ADVANCE_RIP();
346 IEM_MC_END();
347 break;
348 }
349 }
350 else
351 {
352 /*
353 * We're accessing memory.
354 */
355 switch (pIemCpu->enmEffOpSize)
356 {
357 case IEMMODE_16BIT:
358 IEM_MC_BEGIN(3, 1);
359 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
360 IEM_MC_ARG(uint16_t, u16Src, 1);
361 IEM_MC_ARG(uint32_t *, pEFlags, 2);
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
363
364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
365 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
366 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
367 IEM_MC_REF_EFLAGS(pEFlags);
368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
369
370 IEM_MC_ADVANCE_RIP();
371 IEM_MC_END();
372 break;
373
374 case IEMMODE_32BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
377 IEM_MC_ARG(uint32_t, u32Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
382 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
383 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
384 IEM_MC_REF_EFLAGS(pEFlags);
385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
386
387 IEM_MC_ADVANCE_RIP();
388 IEM_MC_END();
389 break;
390
391 case IEMMODE_64BIT:
392 IEM_MC_BEGIN(3, 1);
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
394 IEM_MC_ARG(uint64_t, u64Src, 1);
395 IEM_MC_ARG(uint32_t *, pEFlags, 2);
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
397
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
399 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
400 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
401 IEM_MC_REF_EFLAGS(pEFlags);
402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
403
404 IEM_MC_ADVANCE_RIP();
405 IEM_MC_END();
406 break;
407 }
408 }
409 return VINF_SUCCESS;
410}
411
412
413/**
414 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
415 * a byte immediate.
416 *
417 * @param pImpl Pointer to the instruction implementation (assembly).
418 */
419FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
420{
421 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
422 IEMOP_HLP_NO_LOCK_PREFIX();
423
424 IEM_MC_BEGIN(3, 0);
425 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
426 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
427 IEM_MC_ARG(uint32_t *, pEFlags, 2);
428
429 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
430 IEM_MC_REF_EFLAGS(pEFlags);
431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
432
433 IEM_MC_ADVANCE_RIP();
434 IEM_MC_END();
435 return VINF_SUCCESS;
436}
437
438
439/**
440 * Common worker for instructions like ADD, AND, OR, ++ with working on
441 * AX/EAX/RAX with a word/dword immediate.
442 *
443 * @param pImpl Pointer to the instruction implementation (assembly).
444 */
445FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
446{
447 switch (pIemCpu->enmEffOpSize)
448 {
449 case IEMMODE_16BIT:
450 {
451 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
452 IEMOP_HLP_NO_LOCK_PREFIX();
453
454 IEM_MC_BEGIN(3, 0);
455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
456 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
458
459 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
460 IEM_MC_REF_EFLAGS(pEFlags);
461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
462
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 return VINF_SUCCESS;
466 }
467
468 case IEMMODE_32BIT:
469 {
470 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
471 IEMOP_HLP_NO_LOCK_PREFIX();
472
473 IEM_MC_BEGIN(3, 0);
474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
475 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
476 IEM_MC_ARG(uint32_t *, pEFlags, 2);
477
478 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
479 IEM_MC_REF_EFLAGS(pEFlags);
480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
481
482 IEM_MC_ADVANCE_RIP();
483 IEM_MC_END();
484 return VINF_SUCCESS;
485 }
486
487 case IEMMODE_64BIT:
488 {
489 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
490 IEMOP_HLP_NO_LOCK_PREFIX();
491
492 IEM_MC_BEGIN(3, 0);
493 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
494 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
495 IEM_MC_ARG(uint32_t *, pEFlags, 2);
496
497 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
498 IEM_MC_REF_EFLAGS(pEFlags);
499 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
500
501 IEM_MC_ADVANCE_RIP();
502 IEM_MC_END();
503 return VINF_SUCCESS;
504 }
505
506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
507 }
508}
509
510
511/** Opcodes 0xf1, 0xd6. */
512FNIEMOP_DEF(iemOp_Invalid)
513{
514 IEMOP_MNEMONIC("Invalid");
515 return IEMOP_RAISE_INVALID_OPCODE();
516}
517
518
519
520/** @name ..... opcodes.
521 *
522 * @{
523 */
524
525/** @} */
526
527
528/** @name Two byte opcodes (first byte 0x0f).
529 *
530 * @{
531 */
532
533/** Opcode 0x0f 0x00 /0. */
534FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
535{
536 IEMOP_MNEMONIC("sldt Rv/Mw");
537 IEMOP_HLP_NO_REAL_OR_V86_MODE();
538
539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
540 {
541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
542 switch (pIemCpu->enmEffOpSize)
543 {
544 case IEMMODE_16BIT:
545 IEM_MC_BEGIN(0, 1);
546 IEM_MC_LOCAL(uint16_t, u16Ldtr);
547 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
548 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 break;
552
553 case IEMMODE_32BIT:
554 IEM_MC_BEGIN(0, 1);
555 IEM_MC_LOCAL(uint32_t, u32Ldtr);
556 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
557 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
558 IEM_MC_ADVANCE_RIP();
559 IEM_MC_END();
560 break;
561
562 case IEMMODE_64BIT:
563 IEM_MC_BEGIN(0, 1);
564 IEM_MC_LOCAL(uint64_t, u64Ldtr);
565 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
566 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
567 IEM_MC_ADVANCE_RIP();
568 IEM_MC_END();
569 break;
570
571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
572 }
573 }
574 else
575 {
576 IEM_MC_BEGIN(0, 2);
577 IEM_MC_LOCAL(uint16_t, u16Ldtr);
578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
581 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
582 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 }
586 return VINF_SUCCESS;
587}
588
589
590/** Opcode 0x0f 0x00 /1. */
591FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
592{
593 IEMOP_MNEMONIC("str Rv/Mw");
594 IEMOP_HLP_NO_REAL_OR_V86_MODE();
595
596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
597 {
598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
599 switch (pIemCpu->enmEffOpSize)
600 {
601 case IEMMODE_16BIT:
602 IEM_MC_BEGIN(0, 1);
603 IEM_MC_LOCAL(uint16_t, u16Tr);
604 IEM_MC_FETCH_TR_U16(u16Tr);
605 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
606 IEM_MC_ADVANCE_RIP();
607 IEM_MC_END();
608 break;
609
610 case IEMMODE_32BIT:
611 IEM_MC_BEGIN(0, 1);
612 IEM_MC_LOCAL(uint32_t, u32Tr);
613 IEM_MC_FETCH_TR_U32(u32Tr);
614 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
615 IEM_MC_ADVANCE_RIP();
616 IEM_MC_END();
617 break;
618
619 case IEMMODE_64BIT:
620 IEM_MC_BEGIN(0, 1);
621 IEM_MC_LOCAL(uint64_t, u64Tr);
622 IEM_MC_FETCH_TR_U64(u64Tr);
623 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
624 IEM_MC_ADVANCE_RIP();
625 IEM_MC_END();
626 break;
627
628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
629 }
630 }
631 else
632 {
633 IEM_MC_BEGIN(0, 2);
634 IEM_MC_LOCAL(uint16_t, u16Tr);
635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
638 IEM_MC_FETCH_TR_U16(u16Tr);
639 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
640 IEM_MC_ADVANCE_RIP();
641 IEM_MC_END();
642 }
643 return VINF_SUCCESS;
644}
645
646
647/** Opcode 0x0f 0x00 /2. */
648FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
649{
650 IEMOP_MNEMONIC("lldt Ew");
651 IEMOP_HLP_NO_REAL_OR_V86_MODE();
652
653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
654 {
655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
656 IEM_MC_BEGIN(1, 0);
657 IEM_MC_ARG(uint16_t, u16Sel, 0);
658 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
659 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
660 IEM_MC_END();
661 }
662 else
663 {
664 IEM_MC_BEGIN(1, 1);
665 IEM_MC_ARG(uint16_t, u16Sel, 0);
666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
667 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
670 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
671 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
672 IEM_MC_END();
673 }
674 return VINF_SUCCESS;
675}
676
677
678/** Opcode 0x0f 0x00 /3. */
679FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
680{
681 IEMOP_MNEMONIC("ltr Ew");
682 IEMOP_HLP_NO_REAL_OR_V86_MODE();
683
684 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
685 {
686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
687 IEM_MC_BEGIN(1, 0);
688 IEM_MC_ARG(uint16_t, u16Sel, 0);
689 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
690 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
691 IEM_MC_END();
692 }
693 else
694 {
695 IEM_MC_BEGIN(1, 1);
696 IEM_MC_ARG(uint16_t, u16Sel, 0);
697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
698 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
701 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
702 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
703 IEM_MC_END();
704 }
705 return VINF_SUCCESS;
706}
707
708
709/** Opcode 0x0f 0x00 /4. */
710FNIEMOP_STUB_1(iemOp_Grp6_verr, uint8_t, bRm);
711
712
713/** Opcode 0x0f 0x00 /5. */
714FNIEMOP_STUB_1(iemOp_Grp6_verw, uint8_t, bRm);
715
716
717/** Opcode 0x0f 0x00. */
718FNIEMOP_DEF(iemOp_Grp6)
719{
720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
721 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
722 {
723 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
724 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
725 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
726 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
727 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
728 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
729 case 6: return IEMOP_RAISE_INVALID_OPCODE();
730 case 7: return IEMOP_RAISE_INVALID_OPCODE();
731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
732 }
733
734}
735
736
737/** Opcode 0x0f 0x01 /0. */
738FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
739{
740 IEMOP_MNEMONIC("sgdt Ms");
741 IEMOP_HLP_64BIT_OP_SIZE();
742 IEM_MC_BEGIN(3, 1);
743 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
744 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
745 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
748 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
749 IEM_MC_END();
750 return VINF_SUCCESS;
751}
752
753
754/** Opcode 0x0f 0x01 /0. */
755FNIEMOP_DEF(iemOp_Grp7_vmcall)
756{
757 IEMOP_BITCH_ABOUT_STUB();
758 return IEMOP_RAISE_INVALID_OPCODE();
759}
760
761
762/** Opcode 0x0f 0x01 /0. */
763FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
764{
765 IEMOP_BITCH_ABOUT_STUB();
766 return IEMOP_RAISE_INVALID_OPCODE();
767}
768
769
770/** Opcode 0x0f 0x01 /0. */
771FNIEMOP_DEF(iemOp_Grp7_vmresume)
772{
773 IEMOP_BITCH_ABOUT_STUB();
774 return IEMOP_RAISE_INVALID_OPCODE();
775}
776
777
778/** Opcode 0x0f 0x01 /0. */
779FNIEMOP_DEF(iemOp_Grp7_vmxoff)
780{
781 IEMOP_BITCH_ABOUT_STUB();
782 return IEMOP_RAISE_INVALID_OPCODE();
783}
784
785
786/** Opcode 0x0f 0x01 /1. */
787FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
788{
789 IEMOP_MNEMONIC("sidt Ms");
790 IEMOP_HLP_64BIT_OP_SIZE();
791 IEM_MC_BEGIN(3, 1);
792 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
793 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
794 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
797 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
798 IEM_MC_END();
799 return VINF_SUCCESS;
800}
801
802
803/** Opcode 0x0f 0x01 /1. */
804FNIEMOP_DEF(iemOp_Grp7_monitor)
805{
806 NOREF(pIemCpu);
807 IEMOP_BITCH_ABOUT_STUB();
808 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
809}
810
811
812/** Opcode 0x0f 0x01 /1. */
813FNIEMOP_DEF(iemOp_Grp7_mwait)
814{
815 NOREF(pIemCpu);
816 IEMOP_BITCH_ABOUT_STUB();
817 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
818}
819
820
821/** Opcode 0x0f 0x01 /2. */
822FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
823{
824 IEMOP_HLP_NO_LOCK_PREFIX();
825
826 IEMOP_HLP_64BIT_OP_SIZE();
827 IEM_MC_BEGIN(3, 1);
828 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
829 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
830 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
832 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
833 IEM_MC_END();
834 return VINF_SUCCESS;
835}
836
837
838/** Opcode 0x0f 0x01 /2. */
839FNIEMOP_DEF(iemOp_Grp7_xgetbv)
840{
841 AssertFailed();
842 return IEMOP_RAISE_INVALID_OPCODE();
843}
844
845
846/** Opcode 0x0f 0x01 /2. */
847FNIEMOP_DEF(iemOp_Grp7_xsetbv)
848{
849 AssertFailed();
850 return IEMOP_RAISE_INVALID_OPCODE();
851}
852
853
854/** Opcode 0x0f 0x01 /3. */
855FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
856{
857 IEMOP_HLP_NO_LOCK_PREFIX();
858
859 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
860 ? IEMMODE_64BIT
861 : pIemCpu->enmEffOpSize;
862 IEM_MC_BEGIN(3, 1);
863 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
864 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
865 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
867 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
868 IEM_MC_END();
869 return VINF_SUCCESS;
870}
871
872
873/** Opcode 0x0f 0x01 0xd8. */
874FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
875
876/** Opcode 0x0f 0x01 0xd9. */
877FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
878
879/** Opcode 0x0f 0x01 0xda. */
880FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
881
882/** Opcode 0x0f 0x01 0xdb. */
883FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
884
885/** Opcode 0x0f 0x01 0xdc. */
886FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
887
888/** Opcode 0x0f 0x01 0xdd. */
889FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
890
891/** Opcode 0x0f 0x01 0xde. */
892FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
893
894/** Opcode 0x0f 0x01 0xdf. */
895FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
896
897/** Opcode 0x0f 0x01 /4. */
898FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
899{
900 IEMOP_HLP_NO_LOCK_PREFIX();
901 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
902 {
903 switch (pIemCpu->enmEffOpSize)
904 {
905 case IEMMODE_16BIT:
906 IEM_MC_BEGIN(0, 1);
907 IEM_MC_LOCAL(uint16_t, u16Tmp);
908 IEM_MC_FETCH_CR0_U16(u16Tmp);
909 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
910 IEM_MC_ADVANCE_RIP();
911 IEM_MC_END();
912 return VINF_SUCCESS;
913
914 case IEMMODE_32BIT:
915 IEM_MC_BEGIN(0, 1);
916 IEM_MC_LOCAL(uint32_t, u32Tmp);
917 IEM_MC_FETCH_CR0_U32(u32Tmp);
918 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
919 IEM_MC_ADVANCE_RIP();
920 IEM_MC_END();
921 return VINF_SUCCESS;
922
923 case IEMMODE_64BIT:
924 IEM_MC_BEGIN(0, 1);
925 IEM_MC_LOCAL(uint64_t, u64Tmp);
926 IEM_MC_FETCH_CR0_U64(u64Tmp);
927 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
928 IEM_MC_ADVANCE_RIP();
929 IEM_MC_END();
930 return VINF_SUCCESS;
931
932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
933 }
934 }
935 else
936 {
937 /* Ignore operand size here, memory refs are always 16-bit. */
938 IEM_MC_BEGIN(0, 2);
939 IEM_MC_LOCAL(uint16_t, u16Tmp);
940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
942 IEM_MC_FETCH_CR0_U16(u16Tmp);
943 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
944 IEM_MC_ADVANCE_RIP();
945 IEM_MC_END();
946 return VINF_SUCCESS;
947 }
948}
949
950
951/** Opcode 0x0f 0x01 /6. */
952FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
953{
954 /* The operand size is effectively ignored, all is 16-bit and only the
955 lower 3-bits are used. */
956 IEMOP_HLP_NO_LOCK_PREFIX();
957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
958 {
959 IEM_MC_BEGIN(1, 0);
960 IEM_MC_ARG(uint16_t, u16Tmp, 0);
961 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
962 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
963 IEM_MC_END();
964 }
965 else
966 {
967 IEM_MC_BEGIN(1, 1);
968 IEM_MC_ARG(uint16_t, u16Tmp, 0);
969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
971 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
972 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
973 IEM_MC_END();
974 }
975 return VINF_SUCCESS;
976}
977
978
979/** Opcode 0x0f 0x01 /7. */
980FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
981{
982 IEMOP_HLP_NO_LOCK_PREFIX();
983 IEM_MC_BEGIN(1, 1);
984 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
986 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
987 IEM_MC_END();
988 return VINF_SUCCESS;
989}
990
991
992/** Opcode 0x0f 0x01 /7. */
993FNIEMOP_DEF(iemOp_Grp7_swapgs)
994{
995 NOREF(pIemCpu);
996 IEMOP_BITCH_ABOUT_STUB();
997 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
998}
999
1000
1001/** Opcode 0x0f 0x01 /7. */
1002FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1003{
1004 NOREF(pIemCpu);
1005 IEMOP_BITCH_ABOUT_STUB();
1006 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1007}
1008
1009
1010/** Opcode 0x0f 0x01. */
1011FNIEMOP_DEF(iemOp_Grp7)
1012{
1013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1014 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1015 {
1016 case 0:
1017 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1018 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1019 switch (bRm & X86_MODRM_RM_MASK)
1020 {
1021 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1022 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1023 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1024 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027
1028 case 1:
1029 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1030 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1031 switch (bRm & X86_MODRM_RM_MASK)
1032 {
1033 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1034 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1035 }
1036 return IEMOP_RAISE_INVALID_OPCODE();
1037
1038 case 2:
1039 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1040 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1041 switch (bRm & X86_MODRM_RM_MASK)
1042 {
1043 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1044 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1045 }
1046 return IEMOP_RAISE_INVALID_OPCODE();
1047
1048 case 3:
1049 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1050 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1051 switch (bRm & X86_MODRM_RM_MASK)
1052 {
1053 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1054 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1055 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1056 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1057 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1058 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1059 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1060 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1062 }
1063
1064 case 4:
1065 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1066
1067 case 5:
1068 return IEMOP_RAISE_INVALID_OPCODE();
1069
1070 case 6:
1071 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1072
1073 case 7:
1074 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1075 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1076 switch (bRm & X86_MODRM_RM_MASK)
1077 {
1078 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1079 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1080 }
1081 return IEMOP_RAISE_INVALID_OPCODE();
1082
1083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1084 }
1085}
1086
1087
1088/** Opcode 0x0f 0x02. */
1089FNIEMOP_STUB(iemOp_lar_Gv_Ew);
1090/** Opcode 0x0f 0x03. */
1091FNIEMOP_STUB(iemOp_lsl_Gv_Ew);
1092/** Opcode 0x0f 0x04. */
1093FNIEMOP_STUB(iemOp_syscall);
1094
1095
1096/** Opcode 0x0f 0x05. */
1097FNIEMOP_DEF(iemOp_clts)
1098{
1099 IEMOP_MNEMONIC("clts");
1100 IEMOP_HLP_NO_LOCK_PREFIX();
1101 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1102}
1103
1104
1105/** Opcode 0x0f 0x06. */
1106FNIEMOP_STUB(iemOp_sysret);
1107/** Opcode 0x0f 0x08. */
1108FNIEMOP_STUB(iemOp_invd);
1109
1110
1111/** Opcode 0x0f 0x09. */
1112FNIEMOP_DEF(iemOp_wbinvd)
1113{
1114 IEMOP_MNEMONIC("wbinvd");
1115 IEMOP_HLP_NO_LOCK_PREFIX();
1116 IEM_MC_BEGIN(0, 0);
1117 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1118 IEM_MC_ADVANCE_RIP();
1119 IEM_MC_END();
1120 return VINF_SUCCESS; /* ignore for now */
1121}
1122
1123
1124/** Opcode 0x0f 0x0b. */
1125FNIEMOP_STUB(iemOp_ud2);
1126
1127/** Opcode 0x0f 0x0d. */
1128FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1129{
1130 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1131 if (!IEM_IS_AMD_CPUID_FEATURES_ANY_PRESENT(X86_CPUID_EXT_FEATURE_EDX_LONG_MODE | X86_CPUID_AMD_FEATURE_EDX_3DNOW,
1132 X86_CPUID_AMD_FEATURE_ECX_3DNOWPRF))
1133 {
1134 IEMOP_MNEMONIC("GrpP");
1135 return IEMOP_RAISE_INVALID_OPCODE();
1136 }
1137
1138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1140 {
1141 IEMOP_MNEMONIC("GrpP");
1142 return IEMOP_RAISE_INVALID_OPCODE();
1143 }
1144
1145 IEMOP_HLP_NO_LOCK_PREFIX();
1146 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1147 {
1148 case 2: /* Aliased to /0 for the time being. */
1149 case 4: /* Aliased to /0 for the time being. */
1150 case 5: /* Aliased to /0 for the time being. */
1151 case 6: /* Aliased to /0 for the time being. */
1152 case 7: /* Aliased to /0 for the time being. */
1153 case 0: IEMOP_MNEMONIC("prefetch"); break;
1154 case 1: IEMOP_MNEMONIC("prefetchw "); break;
1155 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1157 }
1158
1159 IEM_MC_BEGIN(0, 1);
1160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1162 /* Currently a NOP. */
1163 IEM_MC_ADVANCE_RIP();
1164 IEM_MC_END();
1165 return VINF_SUCCESS;
1166}
1167
1168
1169/** Opcode 0x0f 0x0e. */
1170FNIEMOP_STUB(iemOp_femms);
1171
1172
1173/** Opcode 0x0f 0x0f 0x0c. */
1174FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1175
1176/** Opcode 0x0f 0x0f 0x0d. */
1177FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1178
1179/** Opcode 0x0f 0x0f 0x1c. */
1180FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1181
1182/** Opcode 0x0f 0x0f 0x1d. */
1183FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1184
1185/** Opcode 0x0f 0x0f 0x8a. */
1186FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1187
1188/** Opcode 0x0f 0x0f 0x8e. */
1189FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1190
1191/** Opcode 0x0f 0x0f 0x90. */
1192FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1193
1194/** Opcode 0x0f 0x0f 0x94. */
1195FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1196
1197/** Opcode 0x0f 0x0f 0x96. */
1198FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1199
1200/** Opcode 0x0f 0x0f 0x97. */
1201FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1202
1203/** Opcode 0x0f 0x0f 0x9a. */
1204FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1205
1206/** Opcode 0x0f 0x0f 0x9e. */
1207FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1208
1209/** Opcode 0x0f 0x0f 0xa0. */
1210FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1211
1212/** Opcode 0x0f 0x0f 0xa4. */
1213FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1214
1215/** Opcode 0x0f 0x0f 0xa6. */
1216FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1217
1218/** Opcode 0x0f 0x0f 0xa7. */
1219FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1220
1221/** Opcode 0x0f 0x0f 0xaa. */
1222FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1223
1224/** Opcode 0x0f 0x0f 0xae. */
1225FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1226
1227/** Opcode 0x0f 0x0f 0xb0. */
1228FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1229
1230/** Opcode 0x0f 0x0f 0xb4. */
1231FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1232
1233/** Opcode 0x0f 0x0f 0xb6. */
1234FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1235
1236/** Opcode 0x0f 0x0f 0xb7. */
1237FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1238
1239/** Opcode 0x0f 0x0f 0xbb. */
1240FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1241
1242/** Opcode 0x0f 0x0f 0xbf. */
1243FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1244
1245
1246/** Opcode 0x0f 0x0f. */
1247FNIEMOP_DEF(iemOp_3Dnow)
1248{
1249 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_AMD_FEATURE_EDX_3DNOW))
1250 {
1251 IEMOP_MNEMONIC("3Dnow");
1252 return IEMOP_RAISE_INVALID_OPCODE();
1253 }
1254
1255 /* This is pretty sparse, use switch instead of table. */
1256 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1257 switch (b)
1258 {
1259 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1260 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1261 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1262 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1263 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1264 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1265 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1266 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1267 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1268 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1269 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1270 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1271 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1272 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1273 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1274 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1275 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1276 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1277 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1278 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1279 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1280 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1281 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1282 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1283 default:
1284 return IEMOP_RAISE_INVALID_OPCODE();
1285 }
1286}
1287
1288
1289/** Opcode 0x0f 0x10. */
1290FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1291/** Opcode 0x0f 0x11. */
1292FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1293/** Opcode 0x0f 0x12. */
1294FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq);
1295/** Opcode 0x0f 0x13. */
1296FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq);
1297/** Opcode 0x0f 0x14. */
1298FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1299/** Opcode 0x0f 0x15. */
1300FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1301/** Opcode 0x0f 0x16. */
1302FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq);
1303/** Opcode 0x0f 0x17. */
1304FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq);
1305
1306
1307/** Opcode 0x0f 0x18. */
1308FNIEMOP_DEF(iemOp_prefetch_Grp16)
1309{
1310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1311 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1312 {
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1315 {
1316 case 4: /* Aliased to /0 for the time being according to AMD. */
1317 case 5: /* Aliased to /0 for the time being according to AMD. */
1318 case 6: /* Aliased to /0 for the time being according to AMD. */
1319 case 7: /* Aliased to /0 for the time being according to AMD. */
1320 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1321 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1322 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1323 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1325 }
1326
1327 IEM_MC_BEGIN(0, 1);
1328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1330 /* Currently a NOP. */
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS;
1334 }
1335
1336 return IEMOP_RAISE_INVALID_OPCODE();
1337}
1338
1339
1340/** Opcode 0x0f 0x19..0x1f. */
1341FNIEMOP_DEF(iemOp_nop_Ev)
1342{
1343 IEMOP_HLP_NO_LOCK_PREFIX();
1344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1345 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1346 {
1347 IEM_MC_BEGIN(0, 0);
1348 IEM_MC_ADVANCE_RIP();
1349 IEM_MC_END();
1350 }
1351 else
1352 {
1353 IEM_MC_BEGIN(0, 1);
1354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1356 /* Currently a NOP. */
1357 IEM_MC_ADVANCE_RIP();
1358 IEM_MC_END();
1359 }
1360 return VINF_SUCCESS;
1361}
1362
1363
1364/** Opcode 0x0f 0x20. */
1365FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1366{
1367 /* mod is ignored, as is operand size overrides. */
1368 IEMOP_MNEMONIC("mov Rd,Cd");
1369 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1370 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1371 else
1372 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1373
1374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1375 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1376 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1377 {
1378 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1379 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1380 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1381 iCrReg |= 8;
1382 }
1383 switch (iCrReg)
1384 {
1385 case 0: case 2: case 3: case 4: case 8:
1386 break;
1387 default:
1388 return IEMOP_RAISE_INVALID_OPCODE();
1389 }
1390 IEMOP_HLP_DONE_DECODING();
1391
1392 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1393}
1394
1395
1396/** Opcode 0x0f 0x21. */
1397FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1398{
1399 IEMOP_MNEMONIC("mov Rd,Dd");
1400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1401 IEMOP_HLP_NO_LOCK_PREFIX();
1402 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1403 return IEMOP_RAISE_INVALID_OPCODE();
1404 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1405 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1406 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1407}
1408
1409
1410/** Opcode 0x0f 0x22. */
1411FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1412{
1413 /* mod is ignored, as is operand size overrides. */
1414 IEMOP_MNEMONIC("mov Cd,Rd");
1415 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1416 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1417 else
1418 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1419
1420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1421 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1422 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1423 {
1424 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1425 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1426 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1427 iCrReg |= 8;
1428 }
1429 switch (iCrReg)
1430 {
1431 case 0: case 2: case 3: case 4: case 8:
1432 break;
1433 default:
1434 return IEMOP_RAISE_INVALID_OPCODE();
1435 }
1436 IEMOP_HLP_DONE_DECODING();
1437
1438 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1439}
1440
1441
1442/** Opcode 0x0f 0x23. */
1443FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1444{
1445 IEMOP_MNEMONIC("mov Dd,Rd");
1446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1449 return IEMOP_RAISE_INVALID_OPCODE();
1450 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1451 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1452 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1453}
1454
1455
1456/** Opcode 0x0f 0x24. */
1457FNIEMOP_DEF(iemOp_mov_Rd_Td)
1458{
1459 IEMOP_MNEMONIC("mov Rd,Td");
1460 /* The RM byte is not considered, see testcase. */
1461 return IEMOP_RAISE_INVALID_OPCODE();
1462}
1463
1464
1465/** Opcode 0x0f 0x26. */
1466FNIEMOP_DEF(iemOp_mov_Td_Rd)
1467{
1468 IEMOP_MNEMONIC("mov Td,Rd");
1469 /* The RM byte is not considered, see testcase. */
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471}
1472
1473
1474/** Opcode 0x0f 0x28. */
1475FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1476/** Opcode 0x0f 0x29. */
1477FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1478/** Opcode 0x0f 0x2a. */
1479FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey);
1480/** Opcode 0x0f 0x2b. */
1481FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1482/** Opcode 0x0f 0x2c. */
1483FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd);
1484/** Opcode 0x0f 0x2d. */
1485FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1486/** Opcode 0x0f 0x2e. */
1487FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd);
1488/** Opcode 0x0f 0x2f. */
1489FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1490
1491
1492/** Opcode 0x0f 0x30. */
1493FNIEMOP_DEF(iemOp_wrmsr)
1494{
1495 IEMOP_MNEMONIC("wrmsr");
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1498}
1499
1500
1501/** Opcode 0x0f 0x31. */
1502FNIEMOP_DEF(iemOp_rdtsc)
1503{
1504 IEMOP_MNEMONIC("rdtsc");
1505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1506 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1507}
1508
1509
1510/** Opcode 0x0f 0x33. */
1511FNIEMOP_DEF(iemOp_rdmsr)
1512{
1513 IEMOP_MNEMONIC("rdmsr");
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1515 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1516}
1517
1518
1519/** Opcode 0x0f 0x34. */
1520FNIEMOP_STUB(iemOp_rdpmc);
1521/** Opcode 0x0f 0x34. */
1522FNIEMOP_STUB(iemOp_sysenter);
1523/** Opcode 0x0f 0x35. */
1524FNIEMOP_STUB(iemOp_sysexit);
1525/** Opcode 0x0f 0x37. */
1526FNIEMOP_STUB(iemOp_getsec);
1527/** Opcode 0x0f 0x38. */
1528FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1529/** Opcode 0x0f 0x3a. */
1530FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1531/** Opcode 0x0f 0x3c (?). */
1532FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1533
1534/**
1535 * Implements a conditional move.
1536 *
1537 * Wish there was an obvious way to do this where we could share and reduce
1538 * code bloat.
1539 *
1540 * @param a_Cnd The conditional "microcode" operation.
1541 */
1542#define CMOV_X(a_Cnd) \
1543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1545 { \
1546 switch (pIemCpu->enmEffOpSize) \
1547 { \
1548 case IEMMODE_16BIT: \
1549 IEM_MC_BEGIN(0, 1); \
1550 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1551 a_Cnd { \
1552 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1553 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1554 } IEM_MC_ENDIF(); \
1555 IEM_MC_ADVANCE_RIP(); \
1556 IEM_MC_END(); \
1557 return VINF_SUCCESS; \
1558 \
1559 case IEMMODE_32BIT: \
1560 IEM_MC_BEGIN(0, 1); \
1561 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1562 a_Cnd { \
1563 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1564 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1565 } IEM_MC_ELSE() { \
1566 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1567 } IEM_MC_ENDIF(); \
1568 IEM_MC_ADVANCE_RIP(); \
1569 IEM_MC_END(); \
1570 return VINF_SUCCESS; \
1571 \
1572 case IEMMODE_64BIT: \
1573 IEM_MC_BEGIN(0, 1); \
1574 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1575 a_Cnd { \
1576 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1577 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1578 } IEM_MC_ENDIF(); \
1579 IEM_MC_ADVANCE_RIP(); \
1580 IEM_MC_END(); \
1581 return VINF_SUCCESS; \
1582 \
1583 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1584 } \
1585 } \
1586 else \
1587 { \
1588 switch (pIemCpu->enmEffOpSize) \
1589 { \
1590 case IEMMODE_16BIT: \
1591 IEM_MC_BEGIN(0, 2); \
1592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1593 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1595 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1596 a_Cnd { \
1597 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1598 } IEM_MC_ENDIF(); \
1599 IEM_MC_ADVANCE_RIP(); \
1600 IEM_MC_END(); \
1601 return VINF_SUCCESS; \
1602 \
1603 case IEMMODE_32BIT: \
1604 IEM_MC_BEGIN(0, 2); \
1605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1606 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1608 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1609 a_Cnd { \
1610 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1611 } IEM_MC_ELSE() { \
1612 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1613 } IEM_MC_ENDIF(); \
1614 IEM_MC_ADVANCE_RIP(); \
1615 IEM_MC_END(); \
1616 return VINF_SUCCESS; \
1617 \
1618 case IEMMODE_64BIT: \
1619 IEM_MC_BEGIN(0, 2); \
1620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1621 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1623 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1624 a_Cnd { \
1625 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1626 } IEM_MC_ENDIF(); \
1627 IEM_MC_ADVANCE_RIP(); \
1628 IEM_MC_END(); \
1629 return VINF_SUCCESS; \
1630 \
1631 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1632 } \
1633 } do {} while (0)
1634
1635
1636
1637/** Opcode 0x0f 0x40. */
1638FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1639{
1640 IEMOP_MNEMONIC("cmovo Gv,Ev");
1641 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1642}
1643
1644
1645/** Opcode 0x0f 0x41. */
1646FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1647{
1648 IEMOP_MNEMONIC("cmovno Gv,Ev");
1649 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1650}
1651
1652
1653/** Opcode 0x0f 0x42. */
1654FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1655{
1656 IEMOP_MNEMONIC("cmovc Gv,Ev");
1657 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1658}
1659
1660
1661/** Opcode 0x0f 0x43. */
1662FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1663{
1664 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1665 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1666}
1667
1668
1669/** Opcode 0x0f 0x44. */
1670FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1671{
1672 IEMOP_MNEMONIC("cmove Gv,Ev");
1673 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1674}
1675
1676
1677/** Opcode 0x0f 0x45. */
1678FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1679{
1680 IEMOP_MNEMONIC("cmovne Gv,Ev");
1681 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1682}
1683
1684
1685/** Opcode 0x0f 0x46. */
1686FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1687{
1688 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1689 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1690}
1691
1692
1693/** Opcode 0x0f 0x47. */
1694FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1695{
1696 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1697 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1698}
1699
1700
1701/** Opcode 0x0f 0x48. */
1702FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1703{
1704 IEMOP_MNEMONIC("cmovs Gv,Ev");
1705 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1706}
1707
1708
1709/** Opcode 0x0f 0x49. */
1710FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1711{
1712 IEMOP_MNEMONIC("cmovns Gv,Ev");
1713 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1714}
1715
1716
1717/** Opcode 0x0f 0x4a. */
1718FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1719{
1720 IEMOP_MNEMONIC("cmovp Gv,Ev");
1721 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1722}
1723
1724
1725/** Opcode 0x0f 0x4b. */
1726FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1727{
1728 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1729 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1730}
1731
1732
1733/** Opcode 0x0f 0x4c. */
1734FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1735{
1736 IEMOP_MNEMONIC("cmovl Gv,Ev");
1737 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1738}
1739
1740
1741/** Opcode 0x0f 0x4d. */
1742FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1743{
1744 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1745 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1746}
1747
1748
1749/** Opcode 0x0f 0x4e. */
1750FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1751{
1752 IEMOP_MNEMONIC("cmovle Gv,Ev");
1753 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1754}
1755
1756
1757/** Opcode 0x0f 0x4f. */
1758FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1759{
1760 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1761 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1762}
1763
1764#undef CMOV_X
1765
1766/** Opcode 0x0f 0x50. */
1767FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1768/** Opcode 0x0f 0x51. */
1769FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1770/** Opcode 0x0f 0x52. */
1771FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1772/** Opcode 0x0f 0x53. */
1773FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1774/** Opcode 0x0f 0x54. */
1775FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1776/** Opcode 0x0f 0x55. */
1777FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1778/** Opcode 0x0f 0x56. */
1779FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1780/** Opcode 0x0f 0x57. */
1781FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1782/** Opcode 0x0f 0x58. */
1783FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd);
1784/** Opcode 0x0f 0x59. */
1785FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);
1786/** Opcode 0x0f 0x5a. */
1787FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1788/** Opcode 0x0f 0x5b. */
1789FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1790/** Opcode 0x0f 0x5c. */
1791FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1792/** Opcode 0x0f 0x5d. */
1793FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1794/** Opcode 0x0f 0x5e. */
1795FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1796/** Opcode 0x0f 0x5f. */
1797FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1798/** Opcode 0x0f 0x60. */
1799FNIEMOP_STUB(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq);
1800/** Opcode 0x0f 0x61. */
1801FNIEMOP_STUB(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq);
1802/** Opcode 0x0f 0x62. */
1803FNIEMOP_STUB(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq);
1804/** Opcode 0x0f 0x63. */
1805FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
1806/** Opcode 0x0f 0x64. */
1807FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
1808/** Opcode 0x0f 0x65. */
1809FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
1810/** Opcode 0x0f 0x66. */
1811FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
1812/** Opcode 0x0f 0x67. */
1813FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
1814/** Opcode 0x0f 0x68. */
1815FNIEMOP_STUB(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq);
1816/** Opcode 0x0f 0x69. */
1817FNIEMOP_STUB(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq);
1818/** Opcode 0x0f 0x6a. */
1819FNIEMOP_STUB(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq);
1820/** Opcode 0x0f 0x6b. */
1821FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
1822/** Opcode 0x0f 0x6c. */
1823FNIEMOP_STUB(iemOp_punpcklqdq_Vdq_Wdq);
1824/** Opcode 0x0f 0x6d. */
1825FNIEMOP_STUB(iemOp_punpckhqdq_Vdq_Wdq);
1826/** Opcode 0x0f 0x6e. */
1827FNIEMOP_STUB(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey);
1828/** Opcode 0x0f 0x6f. */
1829FNIEMOP_STUB(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq);
1830/** Opcode 0x0f 0x70. */
1831FNIEMOP_STUB(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib);
1832
1833/** Opcode 0x0f 0x71 11/2. */
1834FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
1835
1836/** Opcode 0x66 0x0f 0x71 11/2. */
1837FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
1838
1839/** Opcode 0x0f 0x71 11/4. */
1840FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
1841
1842/** Opcode 0x66 0x0f 0x71 11/4. */
1843FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
1844
1845/** Opcode 0x0f 0x71 11/6. */
1846FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
1847
1848/** Opcode 0x66 0x0f 0x71 11/6. */
1849FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
1850
1851
1852/** Opcode 0x0f 0x71. */
1853FNIEMOP_DEF(iemOp_Grp12)
1854{
1855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1856 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1857 return IEMOP_RAISE_INVALID_OPCODE();
1858 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1859 {
1860 case 0: case 1: case 3: case 5: case 7:
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862 case 2:
1863 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1864 {
1865 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
1866 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
1867 default: return IEMOP_RAISE_INVALID_OPCODE();
1868 }
1869 case 4:
1870 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1871 {
1872 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
1873 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
1874 default: return IEMOP_RAISE_INVALID_OPCODE();
1875 }
1876 case 6:
1877 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1878 {
1879 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
1880 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
1881 default: return IEMOP_RAISE_INVALID_OPCODE();
1882 }
1883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1884 }
1885}
1886
1887
1888/** Opcode 0x0f 0x72 11/2. */
1889FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
1890
1891/** Opcode 0x66 0x0f 0x72 11/2. */
1892FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
1893
1894/** Opcode 0x0f 0x72 11/4. */
1895FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
1896
1897/** Opcode 0x66 0x0f 0x72 11/4. */
1898FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
1899
1900/** Opcode 0x0f 0x72 11/6. */
1901FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
1902
1903/** Opcode 0x66 0x0f 0x72 11/6. */
1904FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
1905
1906
1907/** Opcode 0x0f 0x72. */
1908FNIEMOP_DEF(iemOp_Grp13)
1909{
1910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1911 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1912 return IEMOP_RAISE_INVALID_OPCODE();
1913 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1914 {
1915 case 0: case 1: case 3: case 5: case 7:
1916 return IEMOP_RAISE_INVALID_OPCODE();
1917 case 2:
1918 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1919 {
1920 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
1921 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
1922 default: return IEMOP_RAISE_INVALID_OPCODE();
1923 }
1924 case 4:
1925 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1926 {
1927 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
1928 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
1929 default: return IEMOP_RAISE_INVALID_OPCODE();
1930 }
1931 case 6:
1932 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1933 {
1934 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
1935 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
1936 default: return IEMOP_RAISE_INVALID_OPCODE();
1937 }
1938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1939 }
1940}
1941
1942
1943/** Opcode 0x0f 0x73 11/2. */
1944FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
1945
1946/** Opcode 0x66 0x0f 0x73 11/2. */
1947FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
1948
1949/** Opcode 0x66 0x0f 0x73 11/3. */
1950FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm);
1951
1952/** Opcode 0x0f 0x73 11/6. */
1953FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
1954
1955/** Opcode 0x66 0x0f 0x73 11/6. */
1956FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
1957
1958/** Opcode 0x66 0x0f 0x73 11/7. */
1959FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm);
1960
1961
1962/** Opcode 0x0f 0x73. */
1963FNIEMOP_DEF(iemOp_Grp14)
1964{
1965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1966 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1967 return IEMOP_RAISE_INVALID_OPCODE();
1968 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1969 {
1970 case 0: case 1: case 4: case 5:
1971 return IEMOP_RAISE_INVALID_OPCODE();
1972 case 2:
1973 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1974 {
1975 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
1976 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
1977 default: return IEMOP_RAISE_INVALID_OPCODE();
1978 }
1979 case 3:
1980 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1981 {
1982 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
1983 default: return IEMOP_RAISE_INVALID_OPCODE();
1984 }
1985 case 6:
1986 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1987 {
1988 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
1989 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
1990 default: return IEMOP_RAISE_INVALID_OPCODE();
1991 }
1992 case 7:
1993 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1994 {
1995 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
1996 default: return IEMOP_RAISE_INVALID_OPCODE();
1997 }
1998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1999 }
2000}
2001
2002
2003/** Opcode 0x0f 0x74. */
2004FNIEMOP_STUB(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq);
2005/** Opcode 0x0f 0x75. */
2006FNIEMOP_STUB(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq);
2007/** Opcode 0x0f 0x76. */
2008FNIEMOP_STUB(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq);
2009/** Opcode 0x0f 0x77. */
2010FNIEMOP_STUB(iemOp_emms);
2011/** Opcode 0x0f 0x78. */
2012FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2013/** Opcode 0x0f 0x79. */
2014FNIEMOP_UD_STUB(iemOp_vmwrite);
2015/** Opcode 0x0f 0x7c. */
2016FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2017/** Opcode 0x0f 0x7d. */
2018FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2019/** Opcode 0x0f 0x7e. */
2020FNIEMOP_STUB(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq);
2021/** Opcode 0x0f 0x7f. */
2022FNIEMOP_STUB(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq);
2023
2024
2025/** Opcode 0x0f 0x80. */
2026FNIEMOP_DEF(iemOp_jo_Jv)
2027{
2028 IEMOP_MNEMONIC("jo Jv");
2029 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2030 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2031 {
2032 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2033 IEMOP_HLP_NO_LOCK_PREFIX();
2034
2035 IEM_MC_BEGIN(0, 0);
2036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2037 IEM_MC_REL_JMP_S16(i16Imm);
2038 } IEM_MC_ELSE() {
2039 IEM_MC_ADVANCE_RIP();
2040 } IEM_MC_ENDIF();
2041 IEM_MC_END();
2042 }
2043 else
2044 {
2045 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2046 IEMOP_HLP_NO_LOCK_PREFIX();
2047
2048 IEM_MC_BEGIN(0, 0);
2049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2050 IEM_MC_REL_JMP_S32(i32Imm);
2051 } IEM_MC_ELSE() {
2052 IEM_MC_ADVANCE_RIP();
2053 } IEM_MC_ENDIF();
2054 IEM_MC_END();
2055 }
2056 return VINF_SUCCESS;
2057}
2058
2059
2060/** Opcode 0x0f 0x81. */
2061FNIEMOP_DEF(iemOp_jno_Jv)
2062{
2063 IEMOP_MNEMONIC("jno Jv");
2064 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2065 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2066 {
2067 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2068 IEMOP_HLP_NO_LOCK_PREFIX();
2069
2070 IEM_MC_BEGIN(0, 0);
2071 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2072 IEM_MC_ADVANCE_RIP();
2073 } IEM_MC_ELSE() {
2074 IEM_MC_REL_JMP_S16(i16Imm);
2075 } IEM_MC_ENDIF();
2076 IEM_MC_END();
2077 }
2078 else
2079 {
2080 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2081 IEMOP_HLP_NO_LOCK_PREFIX();
2082
2083 IEM_MC_BEGIN(0, 0);
2084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2085 IEM_MC_ADVANCE_RIP();
2086 } IEM_MC_ELSE() {
2087 IEM_MC_REL_JMP_S32(i32Imm);
2088 } IEM_MC_ENDIF();
2089 IEM_MC_END();
2090 }
2091 return VINF_SUCCESS;
2092}
2093
2094
2095/** Opcode 0x0f 0x82. */
2096FNIEMOP_DEF(iemOp_jc_Jv)
2097{
2098 IEMOP_MNEMONIC("jc/jb/jnae Jv");
2099 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2100 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2101 {
2102 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2103 IEMOP_HLP_NO_LOCK_PREFIX();
2104
2105 IEM_MC_BEGIN(0, 0);
2106 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2107 IEM_MC_REL_JMP_S16(i16Imm);
2108 } IEM_MC_ELSE() {
2109 IEM_MC_ADVANCE_RIP();
2110 } IEM_MC_ENDIF();
2111 IEM_MC_END();
2112 }
2113 else
2114 {
2115 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2116 IEMOP_HLP_NO_LOCK_PREFIX();
2117
2118 IEM_MC_BEGIN(0, 0);
2119 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2120 IEM_MC_REL_JMP_S32(i32Imm);
2121 } IEM_MC_ELSE() {
2122 IEM_MC_ADVANCE_RIP();
2123 } IEM_MC_ENDIF();
2124 IEM_MC_END();
2125 }
2126 return VINF_SUCCESS;
2127}
2128
2129
2130/** Opcode 0x0f 0x83. */
2131FNIEMOP_DEF(iemOp_jnc_Jv)
2132{
2133 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
2134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2135 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2136 {
2137 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2138 IEMOP_HLP_NO_LOCK_PREFIX();
2139
2140 IEM_MC_BEGIN(0, 0);
2141 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2142 IEM_MC_ADVANCE_RIP();
2143 } IEM_MC_ELSE() {
2144 IEM_MC_REL_JMP_S16(i16Imm);
2145 } IEM_MC_ENDIF();
2146 IEM_MC_END();
2147 }
2148 else
2149 {
2150 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2151 IEMOP_HLP_NO_LOCK_PREFIX();
2152
2153 IEM_MC_BEGIN(0, 0);
2154 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2155 IEM_MC_ADVANCE_RIP();
2156 } IEM_MC_ELSE() {
2157 IEM_MC_REL_JMP_S32(i32Imm);
2158 } IEM_MC_ENDIF();
2159 IEM_MC_END();
2160 }
2161 return VINF_SUCCESS;
2162}
2163
2164
2165/** Opcode 0x0f 0x84. */
2166FNIEMOP_DEF(iemOp_je_Jv)
2167{
2168 IEMOP_MNEMONIC("je/jz Jv");
2169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2170 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2171 {
2172 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2173 IEMOP_HLP_NO_LOCK_PREFIX();
2174
2175 IEM_MC_BEGIN(0, 0);
2176 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2177 IEM_MC_REL_JMP_S16(i16Imm);
2178 } IEM_MC_ELSE() {
2179 IEM_MC_ADVANCE_RIP();
2180 } IEM_MC_ENDIF();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2186 IEMOP_HLP_NO_LOCK_PREFIX();
2187
2188 IEM_MC_BEGIN(0, 0);
2189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2190 IEM_MC_REL_JMP_S32(i32Imm);
2191 } IEM_MC_ELSE() {
2192 IEM_MC_ADVANCE_RIP();
2193 } IEM_MC_ENDIF();
2194 IEM_MC_END();
2195 }
2196 return VINF_SUCCESS;
2197}
2198
2199
2200/** Opcode 0x0f 0x85. */
2201FNIEMOP_DEF(iemOp_jne_Jv)
2202{
2203 IEMOP_MNEMONIC("jne/jnz Jv");
2204 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2205 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2206 {
2207 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2208 IEMOP_HLP_NO_LOCK_PREFIX();
2209
2210 IEM_MC_BEGIN(0, 0);
2211 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2212 IEM_MC_ADVANCE_RIP();
2213 } IEM_MC_ELSE() {
2214 IEM_MC_REL_JMP_S16(i16Imm);
2215 } IEM_MC_ENDIF();
2216 IEM_MC_END();
2217 }
2218 else
2219 {
2220 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2221 IEMOP_HLP_NO_LOCK_PREFIX();
2222
2223 IEM_MC_BEGIN(0, 0);
2224 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2225 IEM_MC_ADVANCE_RIP();
2226 } IEM_MC_ELSE() {
2227 IEM_MC_REL_JMP_S32(i32Imm);
2228 } IEM_MC_ENDIF();
2229 IEM_MC_END();
2230 }
2231 return VINF_SUCCESS;
2232}
2233
2234
2235/** Opcode 0x0f 0x86. */
2236FNIEMOP_DEF(iemOp_jbe_Jv)
2237{
2238 IEMOP_MNEMONIC("jbe/jna Jv");
2239 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2240 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2241 {
2242 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2243 IEMOP_HLP_NO_LOCK_PREFIX();
2244
2245 IEM_MC_BEGIN(0, 0);
2246 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2247 IEM_MC_REL_JMP_S16(i16Imm);
2248 } IEM_MC_ELSE() {
2249 IEM_MC_ADVANCE_RIP();
2250 } IEM_MC_ENDIF();
2251 IEM_MC_END();
2252 }
2253 else
2254 {
2255 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2256 IEMOP_HLP_NO_LOCK_PREFIX();
2257
2258 IEM_MC_BEGIN(0, 0);
2259 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2260 IEM_MC_REL_JMP_S32(i32Imm);
2261 } IEM_MC_ELSE() {
2262 IEM_MC_ADVANCE_RIP();
2263 } IEM_MC_ENDIF();
2264 IEM_MC_END();
2265 }
2266 return VINF_SUCCESS;
2267}
2268
2269
2270/** Opcode 0x0f 0x87. */
2271FNIEMOP_DEF(iemOp_jnbe_Jv)
2272{
2273 IEMOP_MNEMONIC("jnbe/ja Jv");
2274 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2275 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2276 {
2277 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2278 IEMOP_HLP_NO_LOCK_PREFIX();
2279
2280 IEM_MC_BEGIN(0, 0);
2281 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2282 IEM_MC_ADVANCE_RIP();
2283 } IEM_MC_ELSE() {
2284 IEM_MC_REL_JMP_S16(i16Imm);
2285 } IEM_MC_ENDIF();
2286 IEM_MC_END();
2287 }
2288 else
2289 {
2290 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2291 IEMOP_HLP_NO_LOCK_PREFIX();
2292
2293 IEM_MC_BEGIN(0, 0);
2294 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2295 IEM_MC_ADVANCE_RIP();
2296 } IEM_MC_ELSE() {
2297 IEM_MC_REL_JMP_S32(i32Imm);
2298 } IEM_MC_ENDIF();
2299 IEM_MC_END();
2300 }
2301 return VINF_SUCCESS;
2302}
2303
2304
2305/** Opcode 0x0f 0x88. */
2306FNIEMOP_DEF(iemOp_js_Jv)
2307{
2308 IEMOP_MNEMONIC("js Jv");
2309 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2310 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2311 {
2312 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2313 IEMOP_HLP_NO_LOCK_PREFIX();
2314
2315 IEM_MC_BEGIN(0, 0);
2316 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2317 IEM_MC_REL_JMP_S16(i16Imm);
2318 } IEM_MC_ELSE() {
2319 IEM_MC_ADVANCE_RIP();
2320 } IEM_MC_ENDIF();
2321 IEM_MC_END();
2322 }
2323 else
2324 {
2325 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2326 IEMOP_HLP_NO_LOCK_PREFIX();
2327
2328 IEM_MC_BEGIN(0, 0);
2329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2330 IEM_MC_REL_JMP_S32(i32Imm);
2331 } IEM_MC_ELSE() {
2332 IEM_MC_ADVANCE_RIP();
2333 } IEM_MC_ENDIF();
2334 IEM_MC_END();
2335 }
2336 return VINF_SUCCESS;
2337}
2338
2339
2340/** Opcode 0x0f 0x89. */
2341FNIEMOP_DEF(iemOp_jns_Jv)
2342{
2343 IEMOP_MNEMONIC("jns Jv");
2344 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2345 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2346 {
2347 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2348 IEMOP_HLP_NO_LOCK_PREFIX();
2349
2350 IEM_MC_BEGIN(0, 0);
2351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2352 IEM_MC_ADVANCE_RIP();
2353 } IEM_MC_ELSE() {
2354 IEM_MC_REL_JMP_S16(i16Imm);
2355 } IEM_MC_ENDIF();
2356 IEM_MC_END();
2357 }
2358 else
2359 {
2360 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2361 IEMOP_HLP_NO_LOCK_PREFIX();
2362
2363 IEM_MC_BEGIN(0, 0);
2364 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2365 IEM_MC_ADVANCE_RIP();
2366 } IEM_MC_ELSE() {
2367 IEM_MC_REL_JMP_S32(i32Imm);
2368 } IEM_MC_ENDIF();
2369 IEM_MC_END();
2370 }
2371 return VINF_SUCCESS;
2372}
2373
2374
2375/** Opcode 0x0f 0x8a. */
2376FNIEMOP_DEF(iemOp_jp_Jv)
2377{
2378 IEMOP_MNEMONIC("jp Jv");
2379 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2380 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2381 {
2382 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2383 IEMOP_HLP_NO_LOCK_PREFIX();
2384
2385 IEM_MC_BEGIN(0, 0);
2386 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2387 IEM_MC_REL_JMP_S16(i16Imm);
2388 } IEM_MC_ELSE() {
2389 IEM_MC_ADVANCE_RIP();
2390 } IEM_MC_ENDIF();
2391 IEM_MC_END();
2392 }
2393 else
2394 {
2395 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2396 IEMOP_HLP_NO_LOCK_PREFIX();
2397
2398 IEM_MC_BEGIN(0, 0);
2399 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2400 IEM_MC_REL_JMP_S32(i32Imm);
2401 } IEM_MC_ELSE() {
2402 IEM_MC_ADVANCE_RIP();
2403 } IEM_MC_ENDIF();
2404 IEM_MC_END();
2405 }
2406 return VINF_SUCCESS;
2407}
2408
2409
2410/** Opcode 0x0f 0x8b. */
2411FNIEMOP_DEF(iemOp_jnp_Jv)
2412{
2413 IEMOP_MNEMONIC("jo Jv");
2414 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2415 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2416 {
2417 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2418 IEMOP_HLP_NO_LOCK_PREFIX();
2419
2420 IEM_MC_BEGIN(0, 0);
2421 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2422 IEM_MC_ADVANCE_RIP();
2423 } IEM_MC_ELSE() {
2424 IEM_MC_REL_JMP_S16(i16Imm);
2425 } IEM_MC_ENDIF();
2426 IEM_MC_END();
2427 }
2428 else
2429 {
2430 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2431 IEMOP_HLP_NO_LOCK_PREFIX();
2432
2433 IEM_MC_BEGIN(0, 0);
2434 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2435 IEM_MC_ADVANCE_RIP();
2436 } IEM_MC_ELSE() {
2437 IEM_MC_REL_JMP_S32(i32Imm);
2438 } IEM_MC_ENDIF();
2439 IEM_MC_END();
2440 }
2441 return VINF_SUCCESS;
2442}
2443
2444
2445/** Opcode 0x0f 0x8c. */
2446FNIEMOP_DEF(iemOp_jl_Jv)
2447{
2448 IEMOP_MNEMONIC("jl/jnge Jv");
2449 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2450 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2451 {
2452 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2453 IEMOP_HLP_NO_LOCK_PREFIX();
2454
2455 IEM_MC_BEGIN(0, 0);
2456 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2457 IEM_MC_REL_JMP_S16(i16Imm);
2458 } IEM_MC_ELSE() {
2459 IEM_MC_ADVANCE_RIP();
2460 } IEM_MC_ENDIF();
2461 IEM_MC_END();
2462 }
2463 else
2464 {
2465 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2466 IEMOP_HLP_NO_LOCK_PREFIX();
2467
2468 IEM_MC_BEGIN(0, 0);
2469 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2470 IEM_MC_REL_JMP_S32(i32Imm);
2471 } IEM_MC_ELSE() {
2472 IEM_MC_ADVANCE_RIP();
2473 } IEM_MC_ENDIF();
2474 IEM_MC_END();
2475 }
2476 return VINF_SUCCESS;
2477}
2478
2479
2480/** Opcode 0x0f 0x8d. */
2481FNIEMOP_DEF(iemOp_jnl_Jv)
2482{
2483 IEMOP_MNEMONIC("jnl/jge Jv");
2484 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2485 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2486 {
2487 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2488 IEMOP_HLP_NO_LOCK_PREFIX();
2489
2490 IEM_MC_BEGIN(0, 0);
2491 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2492 IEM_MC_ADVANCE_RIP();
2493 } IEM_MC_ELSE() {
2494 IEM_MC_REL_JMP_S16(i16Imm);
2495 } IEM_MC_ENDIF();
2496 IEM_MC_END();
2497 }
2498 else
2499 {
2500 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2501 IEMOP_HLP_NO_LOCK_PREFIX();
2502
2503 IEM_MC_BEGIN(0, 0);
2504 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2505 IEM_MC_ADVANCE_RIP();
2506 } IEM_MC_ELSE() {
2507 IEM_MC_REL_JMP_S32(i32Imm);
2508 } IEM_MC_ENDIF();
2509 IEM_MC_END();
2510 }
2511 return VINF_SUCCESS;
2512}
2513
2514
2515/** Opcode 0x0f 0x8e. */
2516FNIEMOP_DEF(iemOp_jle_Jv)
2517{
2518 IEMOP_MNEMONIC("jle/jng Jv");
2519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2520 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2521 {
2522 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2523 IEMOP_HLP_NO_LOCK_PREFIX();
2524
2525 IEM_MC_BEGIN(0, 0);
2526 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2527 IEM_MC_REL_JMP_S16(i16Imm);
2528 } IEM_MC_ELSE() {
2529 IEM_MC_ADVANCE_RIP();
2530 } IEM_MC_ENDIF();
2531 IEM_MC_END();
2532 }
2533 else
2534 {
2535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2536 IEMOP_HLP_NO_LOCK_PREFIX();
2537
2538 IEM_MC_BEGIN(0, 0);
2539 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2540 IEM_MC_REL_JMP_S32(i32Imm);
2541 } IEM_MC_ELSE() {
2542 IEM_MC_ADVANCE_RIP();
2543 } IEM_MC_ENDIF();
2544 IEM_MC_END();
2545 }
2546 return VINF_SUCCESS;
2547}
2548
2549
2550/** Opcode 0x0f 0x8f. */
2551FNIEMOP_DEF(iemOp_jnle_Jv)
2552{
2553 IEMOP_MNEMONIC("jnle/jg Jv");
2554 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2555 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2556 {
2557 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2558 IEMOP_HLP_NO_LOCK_PREFIX();
2559
2560 IEM_MC_BEGIN(0, 0);
2561 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2562 IEM_MC_ADVANCE_RIP();
2563 } IEM_MC_ELSE() {
2564 IEM_MC_REL_JMP_S16(i16Imm);
2565 } IEM_MC_ENDIF();
2566 IEM_MC_END();
2567 }
2568 else
2569 {
2570 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2571 IEMOP_HLP_NO_LOCK_PREFIX();
2572
2573 IEM_MC_BEGIN(0, 0);
2574 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2575 IEM_MC_ADVANCE_RIP();
2576 } IEM_MC_ELSE() {
2577 IEM_MC_REL_JMP_S32(i32Imm);
2578 } IEM_MC_ENDIF();
2579 IEM_MC_END();
2580 }
2581 return VINF_SUCCESS;
2582}
2583
2584
2585/** Opcode 0x0f 0x90. */
2586FNIEMOP_DEF(iemOp_seto_Eb)
2587{
2588 IEMOP_MNEMONIC("seto Eb");
2589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2590 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2591
2592 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2593 * any way. AMD says it's "unused", whatever that means. We're
2594 * ignoring for now. */
2595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2596 {
2597 /* register target */
2598 IEM_MC_BEGIN(0, 0);
2599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2600 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2601 } IEM_MC_ELSE() {
2602 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2603 } IEM_MC_ENDIF();
2604 IEM_MC_ADVANCE_RIP();
2605 IEM_MC_END();
2606 }
2607 else
2608 {
2609 /* memory target */
2610 IEM_MC_BEGIN(0, 1);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2614 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2615 } IEM_MC_ELSE() {
2616 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2617 } IEM_MC_ENDIF();
2618 IEM_MC_ADVANCE_RIP();
2619 IEM_MC_END();
2620 }
2621 return VINF_SUCCESS;
2622}
2623
2624
2625/** Opcode 0x0f 0x91. */
2626FNIEMOP_DEF(iemOp_setno_Eb)
2627{
2628 IEMOP_MNEMONIC("setno Eb");
2629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2630 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2631
2632 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2633 * any way. AMD says it's "unused", whatever that means. We're
2634 * ignoring for now. */
2635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2636 {
2637 /* register target */
2638 IEM_MC_BEGIN(0, 0);
2639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2640 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2641 } IEM_MC_ELSE() {
2642 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2643 } IEM_MC_ENDIF();
2644 IEM_MC_ADVANCE_RIP();
2645 IEM_MC_END();
2646 }
2647 else
2648 {
2649 /* memory target */
2650 IEM_MC_BEGIN(0, 1);
2651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2654 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2655 } IEM_MC_ELSE() {
2656 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2657 } IEM_MC_ENDIF();
2658 IEM_MC_ADVANCE_RIP();
2659 IEM_MC_END();
2660 }
2661 return VINF_SUCCESS;
2662}
2663
2664
2665/** Opcode 0x0f 0x92. */
2666FNIEMOP_DEF(iemOp_setc_Eb)
2667{
2668 IEMOP_MNEMONIC("setc Eb");
2669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2670 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2671
2672 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2673 * any way. AMD says it's "unused", whatever that means. We're
2674 * ignoring for now. */
2675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2676 {
2677 /* register target */
2678 IEM_MC_BEGIN(0, 0);
2679 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2680 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2681 } IEM_MC_ELSE() {
2682 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2683 } IEM_MC_ENDIF();
2684 IEM_MC_ADVANCE_RIP();
2685 IEM_MC_END();
2686 }
2687 else
2688 {
2689 /* memory target */
2690 IEM_MC_BEGIN(0, 1);
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2693 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2694 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2695 } IEM_MC_ELSE() {
2696 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2697 } IEM_MC_ENDIF();
2698 IEM_MC_ADVANCE_RIP();
2699 IEM_MC_END();
2700 }
2701 return VINF_SUCCESS;
2702}
2703
2704
2705/** Opcode 0x0f 0x93. */
2706FNIEMOP_DEF(iemOp_setnc_Eb)
2707{
2708 IEMOP_MNEMONIC("setnc Eb");
2709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2710 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2711
2712 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2713 * any way. AMD says it's "unused", whatever that means. We're
2714 * ignoring for now. */
2715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2716 {
2717 /* register target */
2718 IEM_MC_BEGIN(0, 0);
2719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2720 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2721 } IEM_MC_ELSE() {
2722 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2723 } IEM_MC_ENDIF();
2724 IEM_MC_ADVANCE_RIP();
2725 IEM_MC_END();
2726 }
2727 else
2728 {
2729 /* memory target */
2730 IEM_MC_BEGIN(0, 1);
2731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2733 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2734 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2735 } IEM_MC_ELSE() {
2736 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2737 } IEM_MC_ENDIF();
2738 IEM_MC_ADVANCE_RIP();
2739 IEM_MC_END();
2740 }
2741 return VINF_SUCCESS;
2742}
2743
2744
2745/** Opcode 0x0f 0x94. */
2746FNIEMOP_DEF(iemOp_sete_Eb)
2747{
2748 IEMOP_MNEMONIC("sete Eb");
2749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2750 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2751
2752 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2753 * any way. AMD says it's "unused", whatever that means. We're
2754 * ignoring for now. */
2755 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2756 {
2757 /* register target */
2758 IEM_MC_BEGIN(0, 0);
2759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2760 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2761 } IEM_MC_ELSE() {
2762 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2763 } IEM_MC_ENDIF();
2764 IEM_MC_ADVANCE_RIP();
2765 IEM_MC_END();
2766 }
2767 else
2768 {
2769 /* memory target */
2770 IEM_MC_BEGIN(0, 1);
2771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2773 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2774 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2775 } IEM_MC_ELSE() {
2776 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2777 } IEM_MC_ENDIF();
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 return VINF_SUCCESS;
2782}
2783
2784
2785/** Opcode 0x0f 0x95. */
2786FNIEMOP_DEF(iemOp_setne_Eb)
2787{
2788 IEMOP_MNEMONIC("setne Eb");
2789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2790 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2791
2792 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2793 * any way. AMD says it's "unused", whatever that means. We're
2794 * ignoring for now. */
2795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2796 {
2797 /* register target */
2798 IEM_MC_BEGIN(0, 0);
2799 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2800 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2801 } IEM_MC_ELSE() {
2802 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2803 } IEM_MC_ENDIF();
2804 IEM_MC_ADVANCE_RIP();
2805 IEM_MC_END();
2806 }
2807 else
2808 {
2809 /* memory target */
2810 IEM_MC_BEGIN(0, 1);
2811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2814 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2815 } IEM_MC_ELSE() {
2816 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2817 } IEM_MC_ENDIF();
2818 IEM_MC_ADVANCE_RIP();
2819 IEM_MC_END();
2820 }
2821 return VINF_SUCCESS;
2822}
2823
2824
2825/** Opcode 0x0f 0x96. */
2826FNIEMOP_DEF(iemOp_setbe_Eb)
2827{
2828 IEMOP_MNEMONIC("setbe Eb");
2829 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2830 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2831
2832 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2833 * any way. AMD says it's "unused", whatever that means. We're
2834 * ignoring for now. */
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /* register target */
2838 IEM_MC_BEGIN(0, 0);
2839 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2840 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2841 } IEM_MC_ELSE() {
2842 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2843 } IEM_MC_ENDIF();
2844 IEM_MC_ADVANCE_RIP();
2845 IEM_MC_END();
2846 }
2847 else
2848 {
2849 /* memory target */
2850 IEM_MC_BEGIN(0, 1);
2851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2853 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2854 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2855 } IEM_MC_ELSE() {
2856 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2857 } IEM_MC_ENDIF();
2858 IEM_MC_ADVANCE_RIP();
2859 IEM_MC_END();
2860 }
2861 return VINF_SUCCESS;
2862}
2863
2864
2865/** Opcode 0x0f 0x97. */
2866FNIEMOP_DEF(iemOp_setnbe_Eb)
2867{
2868 IEMOP_MNEMONIC("setnbe Eb");
2869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2870 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2871
2872 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2873 * any way. AMD says it's "unused", whatever that means. We're
2874 * ignoring for now. */
2875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2876 {
2877 /* register target */
2878 IEM_MC_BEGIN(0, 0);
2879 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2880 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2881 } IEM_MC_ELSE() {
2882 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2883 } IEM_MC_ENDIF();
2884 IEM_MC_ADVANCE_RIP();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /* memory target */
2890 IEM_MC_BEGIN(0, 1);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2893 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2894 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2895 } IEM_MC_ELSE() {
2896 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2897 } IEM_MC_ENDIF();
2898 IEM_MC_ADVANCE_RIP();
2899 IEM_MC_END();
2900 }
2901 return VINF_SUCCESS;
2902}
2903
2904
2905/** Opcode 0x0f 0x98. */
2906FNIEMOP_DEF(iemOp_sets_Eb)
2907{
2908 IEMOP_MNEMONIC("sets Eb");
2909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2910 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2911
2912 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2913 * any way. AMD says it's "unused", whatever that means. We're
2914 * ignoring for now. */
2915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2916 {
2917 /* register target */
2918 IEM_MC_BEGIN(0, 0);
2919 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2920 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2921 } IEM_MC_ELSE() {
2922 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2923 } IEM_MC_ENDIF();
2924 IEM_MC_ADVANCE_RIP();
2925 IEM_MC_END();
2926 }
2927 else
2928 {
2929 /* memory target */
2930 IEM_MC_BEGIN(0, 1);
2931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2933 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2934 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2935 } IEM_MC_ELSE() {
2936 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2937 } IEM_MC_ENDIF();
2938 IEM_MC_ADVANCE_RIP();
2939 IEM_MC_END();
2940 }
2941 return VINF_SUCCESS;
2942}
2943
2944
2945/** Opcode 0x0f 0x99. */
2946FNIEMOP_DEF(iemOp_setns_Eb)
2947{
2948 IEMOP_MNEMONIC("setns Eb");
2949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2950 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2951
2952 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2953 * any way. AMD says it's "unused", whatever that means. We're
2954 * ignoring for now. */
2955 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2956 {
2957 /* register target */
2958 IEM_MC_BEGIN(0, 0);
2959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2960 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2961 } IEM_MC_ELSE() {
2962 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2963 } IEM_MC_ENDIF();
2964 IEM_MC_ADVANCE_RIP();
2965 IEM_MC_END();
2966 }
2967 else
2968 {
2969 /* memory target */
2970 IEM_MC_BEGIN(0, 1);
2971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2973 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2974 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2975 } IEM_MC_ELSE() {
2976 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2977 } IEM_MC_ENDIF();
2978 IEM_MC_ADVANCE_RIP();
2979 IEM_MC_END();
2980 }
2981 return VINF_SUCCESS;
2982}
2983
2984
2985/** Opcode 0x0f 0x9a. */
2986FNIEMOP_DEF(iemOp_setp_Eb)
2987{
2988 IEMOP_MNEMONIC("setnp Eb");
2989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2990 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2991
2992 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2993 * any way. AMD says it's "unused", whatever that means. We're
2994 * ignoring for now. */
2995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2996 {
2997 /* register target */
2998 IEM_MC_BEGIN(0, 0);
2999 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3000 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3001 } IEM_MC_ELSE() {
3002 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3003 } IEM_MC_ENDIF();
3004 IEM_MC_ADVANCE_RIP();
3005 IEM_MC_END();
3006 }
3007 else
3008 {
3009 /* memory target */
3010 IEM_MC_BEGIN(0, 1);
3011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3013 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3014 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3015 } IEM_MC_ELSE() {
3016 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3017 } IEM_MC_ENDIF();
3018 IEM_MC_ADVANCE_RIP();
3019 IEM_MC_END();
3020 }
3021 return VINF_SUCCESS;
3022}
3023
3024
3025/** Opcode 0x0f 0x9b. */
3026FNIEMOP_DEF(iemOp_setnp_Eb)
3027{
3028 IEMOP_MNEMONIC("setnp Eb");
3029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3030 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3031
3032 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3033 * any way. AMD says it's "unused", whatever that means. We're
3034 * ignoring for now. */
3035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3036 {
3037 /* register target */
3038 IEM_MC_BEGIN(0, 0);
3039 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3040 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3041 } IEM_MC_ELSE() {
3042 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3043 } IEM_MC_ENDIF();
3044 IEM_MC_ADVANCE_RIP();
3045 IEM_MC_END();
3046 }
3047 else
3048 {
3049 /* memory target */
3050 IEM_MC_BEGIN(0, 1);
3051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3053 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3054 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3055 } IEM_MC_ELSE() {
3056 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3057 } IEM_MC_ENDIF();
3058 IEM_MC_ADVANCE_RIP();
3059 IEM_MC_END();
3060 }
3061 return VINF_SUCCESS;
3062}
3063
3064
3065/** Opcode 0x0f 0x9c. */
3066FNIEMOP_DEF(iemOp_setl_Eb)
3067{
3068 IEMOP_MNEMONIC("setl Eb");
3069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3070 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3071
3072 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3073 * any way. AMD says it's "unused", whatever that means. We're
3074 * ignoring for now. */
3075 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3076 {
3077 /* register target */
3078 IEM_MC_BEGIN(0, 0);
3079 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3080 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3081 } IEM_MC_ELSE() {
3082 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3083 } IEM_MC_ENDIF();
3084 IEM_MC_ADVANCE_RIP();
3085 IEM_MC_END();
3086 }
3087 else
3088 {
3089 /* memory target */
3090 IEM_MC_BEGIN(0, 1);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3093 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3094 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3095 } IEM_MC_ELSE() {
3096 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3097 } IEM_MC_ENDIF();
3098 IEM_MC_ADVANCE_RIP();
3099 IEM_MC_END();
3100 }
3101 return VINF_SUCCESS;
3102}
3103
3104
3105/** Opcode 0x0f 0x9d. */
3106FNIEMOP_DEF(iemOp_setnl_Eb)
3107{
3108 IEMOP_MNEMONIC("setnl Eb");
3109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3110 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3111
3112 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3113 * any way. AMD says it's "unused", whatever that means. We're
3114 * ignoring for now. */
3115 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3116 {
3117 /* register target */
3118 IEM_MC_BEGIN(0, 0);
3119 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3120 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3121 } IEM_MC_ELSE() {
3122 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3123 } IEM_MC_ENDIF();
3124 IEM_MC_ADVANCE_RIP();
3125 IEM_MC_END();
3126 }
3127 else
3128 {
3129 /* memory target */
3130 IEM_MC_BEGIN(0, 1);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3133 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3134 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3135 } IEM_MC_ELSE() {
3136 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3137 } IEM_MC_ENDIF();
3138 IEM_MC_ADVANCE_RIP();
3139 IEM_MC_END();
3140 }
3141 return VINF_SUCCESS;
3142}
3143
3144
3145/** Opcode 0x0f 0x9e. */
3146FNIEMOP_DEF(iemOp_setle_Eb)
3147{
3148 IEMOP_MNEMONIC("setle Eb");
3149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3150 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3151
3152 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3153 * any way. AMD says it's "unused", whatever that means. We're
3154 * ignoring for now. */
3155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3156 {
3157 /* register target */
3158 IEM_MC_BEGIN(0, 0);
3159 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3160 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3161 } IEM_MC_ELSE() {
3162 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3163 } IEM_MC_ENDIF();
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 else
3168 {
3169 /* memory target */
3170 IEM_MC_BEGIN(0, 1);
3171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3173 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3174 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3175 } IEM_MC_ELSE() {
3176 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3177 } IEM_MC_ENDIF();
3178 IEM_MC_ADVANCE_RIP();
3179 IEM_MC_END();
3180 }
3181 return VINF_SUCCESS;
3182}
3183
3184
3185/** Opcode 0x0f 0x9f. */
3186FNIEMOP_DEF(iemOp_setnle_Eb)
3187{
3188 IEMOP_MNEMONIC("setnle Eb");
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3191
3192 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3193 * any way. AMD says it's "unused", whatever that means. We're
3194 * ignoring for now. */
3195 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3196 {
3197 /* register target */
3198 IEM_MC_BEGIN(0, 0);
3199 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3200 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3201 } IEM_MC_ELSE() {
3202 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3203 } IEM_MC_ENDIF();
3204 IEM_MC_ADVANCE_RIP();
3205 IEM_MC_END();
3206 }
3207 else
3208 {
3209 /* memory target */
3210 IEM_MC_BEGIN(0, 1);
3211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3213 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3214 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3215 } IEM_MC_ELSE() {
3216 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3217 } IEM_MC_ENDIF();
3218 IEM_MC_ADVANCE_RIP();
3219 IEM_MC_END();
3220 }
3221 return VINF_SUCCESS;
3222}
3223
3224
3225/**
3226 * Common 'push segment-register' helper.
3227 */
3228FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
3229{
3230 IEMOP_HLP_NO_LOCK_PREFIX();
3231 if (iReg < X86_SREG_FS)
3232 IEMOP_HLP_NO_64BIT();
3233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3234
3235 switch (pIemCpu->enmEffOpSize)
3236 {
3237 case IEMMODE_16BIT:
3238 IEM_MC_BEGIN(0, 1);
3239 IEM_MC_LOCAL(uint16_t, u16Value);
3240 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
3241 IEM_MC_PUSH_U16(u16Value);
3242 IEM_MC_ADVANCE_RIP();
3243 IEM_MC_END();
3244 break;
3245
3246 case IEMMODE_32BIT:
3247 IEM_MC_BEGIN(0, 1);
3248 IEM_MC_LOCAL(uint32_t, u32Value);
3249 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
3250 IEM_MC_PUSH_U32(u32Value);
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 break;
3254
3255 case IEMMODE_64BIT:
3256 IEM_MC_BEGIN(0, 1);
3257 IEM_MC_LOCAL(uint64_t, u64Value);
3258 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
3259 IEM_MC_PUSH_U64(u64Value);
3260 IEM_MC_ADVANCE_RIP();
3261 IEM_MC_END();
3262 break;
3263 }
3264
3265 return VINF_SUCCESS;
3266}
3267
3268
3269/** Opcode 0x0f 0xa0. */
3270FNIEMOP_DEF(iemOp_push_fs)
3271{
3272 IEMOP_MNEMONIC("push fs");
3273 IEMOP_HLP_NO_LOCK_PREFIX();
3274 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
3275}
3276
3277
3278/** Opcode 0x0f 0xa1. */
3279FNIEMOP_DEF(iemOp_pop_fs)
3280{
3281 IEMOP_MNEMONIC("pop fs");
3282 IEMOP_HLP_NO_LOCK_PREFIX();
3283 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
3284}
3285
3286
3287/** Opcode 0x0f 0xa2. */
3288FNIEMOP_DEF(iemOp_cpuid)
3289{
3290 IEMOP_MNEMONIC("cpuid");
3291 IEMOP_HLP_NO_LOCK_PREFIX();
3292 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
3293}
3294
3295
3296/**
3297 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
3298 * iemOp_bts_Ev_Gv.
3299 */
3300FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
3301{
3302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3304
3305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3306 {
3307 /* register destination. */
3308 IEMOP_HLP_NO_LOCK_PREFIX();
3309 switch (pIemCpu->enmEffOpSize)
3310 {
3311 case IEMMODE_16BIT:
3312 IEM_MC_BEGIN(3, 0);
3313 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3314 IEM_MC_ARG(uint16_t, u16Src, 1);
3315 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3316
3317 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3318 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
3319 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3320 IEM_MC_REF_EFLAGS(pEFlags);
3321 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3322
3323 IEM_MC_ADVANCE_RIP();
3324 IEM_MC_END();
3325 return VINF_SUCCESS;
3326
3327 case IEMMODE_32BIT:
3328 IEM_MC_BEGIN(3, 0);
3329 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3330 IEM_MC_ARG(uint32_t, u32Src, 1);
3331 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3332
3333 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3334 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
3335 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3336 IEM_MC_REF_EFLAGS(pEFlags);
3337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3338
3339 IEM_MC_ADVANCE_RIP();
3340 IEM_MC_END();
3341 return VINF_SUCCESS;
3342
3343 case IEMMODE_64BIT:
3344 IEM_MC_BEGIN(3, 0);
3345 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3346 IEM_MC_ARG(uint64_t, u64Src, 1);
3347 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3348
3349 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3350 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
3351 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3352 IEM_MC_REF_EFLAGS(pEFlags);
3353 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3354
3355 IEM_MC_ADVANCE_RIP();
3356 IEM_MC_END();
3357 return VINF_SUCCESS;
3358
3359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3360 }
3361 }
3362 else
3363 {
3364 /* memory destination. */
3365
3366 uint32_t fAccess;
3367 if (pImpl->pfnLockedU16)
3368 fAccess = IEM_ACCESS_DATA_RW;
3369 else /* BT */
3370 {
3371 IEMOP_HLP_NO_LOCK_PREFIX();
3372 fAccess = IEM_ACCESS_DATA_R;
3373 }
3374
3375 /** @todo test negative bit offsets! */
3376 switch (pIemCpu->enmEffOpSize)
3377 {
3378 case IEMMODE_16BIT:
3379 IEM_MC_BEGIN(3, 2);
3380 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3381 IEM_MC_ARG(uint16_t, u16Src, 1);
3382 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3384 IEM_MC_LOCAL(int16_t, i16AddrAdj);
3385
3386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3387 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3388 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
3389 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
3390 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
3391 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
3392 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
3393 IEM_MC_FETCH_EFLAGS(EFlags);
3394
3395 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3396 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3397 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3398 else
3399 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3400 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3401
3402 IEM_MC_COMMIT_EFLAGS(EFlags);
3403 IEM_MC_ADVANCE_RIP();
3404 IEM_MC_END();
3405 return VINF_SUCCESS;
3406
3407 case IEMMODE_32BIT:
3408 IEM_MC_BEGIN(3, 2);
3409 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3410 IEM_MC_ARG(uint32_t, u32Src, 1);
3411 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3413 IEM_MC_LOCAL(int32_t, i32AddrAdj);
3414
3415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3416 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3417 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
3418 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
3419 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
3420 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
3421 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
3422 IEM_MC_FETCH_EFLAGS(EFlags);
3423
3424 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3425 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3426 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3427 else
3428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3429 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3430
3431 IEM_MC_COMMIT_EFLAGS(EFlags);
3432 IEM_MC_ADVANCE_RIP();
3433 IEM_MC_END();
3434 return VINF_SUCCESS;
3435
3436 case IEMMODE_64BIT:
3437 IEM_MC_BEGIN(3, 2);
3438 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3439 IEM_MC_ARG(uint64_t, u64Src, 1);
3440 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3442 IEM_MC_LOCAL(int64_t, i64AddrAdj);
3443
3444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3445 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3446 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
3447 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
3448 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
3449 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
3450 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
3451 IEM_MC_FETCH_EFLAGS(EFlags);
3452
3453 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3454 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3455 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3456 else
3457 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3458 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3459
3460 IEM_MC_COMMIT_EFLAGS(EFlags);
3461 IEM_MC_ADVANCE_RIP();
3462 IEM_MC_END();
3463 return VINF_SUCCESS;
3464
3465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3466 }
3467 }
3468}
3469
3470
3471/** Opcode 0x0f 0xa3. */
3472FNIEMOP_DEF(iemOp_bt_Ev_Gv)
3473{
3474 IEMOP_MNEMONIC("bt Gv,Gv");
3475 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
3476}
3477
3478
3479/**
3480 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
3481 */
3482FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
3483{
3484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3485 IEMOP_HLP_NO_LOCK_PREFIX();
3486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3487
3488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3489 {
3490 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3491 IEMOP_HLP_NO_LOCK_PREFIX();
3492
3493 switch (pIemCpu->enmEffOpSize)
3494 {
3495 case IEMMODE_16BIT:
3496 IEM_MC_BEGIN(4, 0);
3497 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3498 IEM_MC_ARG(uint16_t, u16Src, 1);
3499 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3500 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3501
3502 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3503 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3504 IEM_MC_REF_EFLAGS(pEFlags);
3505 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3506
3507 IEM_MC_ADVANCE_RIP();
3508 IEM_MC_END();
3509 return VINF_SUCCESS;
3510
3511 case IEMMODE_32BIT:
3512 IEM_MC_BEGIN(4, 0);
3513 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3514 IEM_MC_ARG(uint32_t, u32Src, 1);
3515 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3516 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3517
3518 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3519 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3520 IEM_MC_REF_EFLAGS(pEFlags);
3521 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3522
3523 IEM_MC_ADVANCE_RIP();
3524 IEM_MC_END();
3525 return VINF_SUCCESS;
3526
3527 case IEMMODE_64BIT:
3528 IEM_MC_BEGIN(4, 0);
3529 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3530 IEM_MC_ARG(uint64_t, u64Src, 1);
3531 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3532 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3533
3534 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3535 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3536 IEM_MC_REF_EFLAGS(pEFlags);
3537 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3538
3539 IEM_MC_ADVANCE_RIP();
3540 IEM_MC_END();
3541 return VINF_SUCCESS;
3542
3543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3544 }
3545 }
3546 else
3547 {
3548 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3549
3550 switch (pIemCpu->enmEffOpSize)
3551 {
3552 case IEMMODE_16BIT:
3553 IEM_MC_BEGIN(4, 2);
3554 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3555 IEM_MC_ARG(uint16_t, u16Src, 1);
3556 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3559
3560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3561 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3562 IEM_MC_ASSIGN(cShiftArg, cShift);
3563 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3564 IEM_MC_FETCH_EFLAGS(EFlags);
3565 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3566 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3567
3568 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3569 IEM_MC_COMMIT_EFLAGS(EFlags);
3570 IEM_MC_ADVANCE_RIP();
3571 IEM_MC_END();
3572 return VINF_SUCCESS;
3573
3574 case IEMMODE_32BIT:
3575 IEM_MC_BEGIN(4, 2);
3576 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3577 IEM_MC_ARG(uint32_t, u32Src, 1);
3578 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3581
3582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3583 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3584 IEM_MC_ASSIGN(cShiftArg, cShift);
3585 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3586 IEM_MC_FETCH_EFLAGS(EFlags);
3587 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3588 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3589
3590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3591 IEM_MC_COMMIT_EFLAGS(EFlags);
3592 IEM_MC_ADVANCE_RIP();
3593 IEM_MC_END();
3594 return VINF_SUCCESS;
3595
3596 case IEMMODE_64BIT:
3597 IEM_MC_BEGIN(4, 2);
3598 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3599 IEM_MC_ARG(uint64_t, u64Src, 1);
3600 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3601 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3603
3604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3605 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3606 IEM_MC_ASSIGN(cShiftArg, cShift);
3607 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3608 IEM_MC_FETCH_EFLAGS(EFlags);
3609 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3610 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3611
3612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3613 IEM_MC_COMMIT_EFLAGS(EFlags);
3614 IEM_MC_ADVANCE_RIP();
3615 IEM_MC_END();
3616 return VINF_SUCCESS;
3617
3618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3619 }
3620 }
3621}
3622
3623
3624/**
3625 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
3626 */
3627FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
3628{
3629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3630 IEMOP_HLP_NO_LOCK_PREFIX();
3631 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3632
3633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3634 {
3635 IEMOP_HLP_NO_LOCK_PREFIX();
3636
3637 switch (pIemCpu->enmEffOpSize)
3638 {
3639 case IEMMODE_16BIT:
3640 IEM_MC_BEGIN(4, 0);
3641 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3642 IEM_MC_ARG(uint16_t, u16Src, 1);
3643 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3644 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3645
3646 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3647 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3648 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3649 IEM_MC_REF_EFLAGS(pEFlags);
3650 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3651
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 return VINF_SUCCESS;
3655
3656 case IEMMODE_32BIT:
3657 IEM_MC_BEGIN(4, 0);
3658 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3659 IEM_MC_ARG(uint32_t, u32Src, 1);
3660 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3661 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3662
3663 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3664 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3665 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3666 IEM_MC_REF_EFLAGS(pEFlags);
3667 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3668
3669 IEM_MC_ADVANCE_RIP();
3670 IEM_MC_END();
3671 return VINF_SUCCESS;
3672
3673 case IEMMODE_64BIT:
3674 IEM_MC_BEGIN(4, 0);
3675 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3676 IEM_MC_ARG(uint64_t, u64Src, 1);
3677 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3678 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3679
3680 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3681 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3682 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3683 IEM_MC_REF_EFLAGS(pEFlags);
3684 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3685
3686 IEM_MC_ADVANCE_RIP();
3687 IEM_MC_END();
3688 return VINF_SUCCESS;
3689
3690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3691 }
3692 }
3693 else
3694 {
3695 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3696
3697 switch (pIemCpu->enmEffOpSize)
3698 {
3699 case IEMMODE_16BIT:
3700 IEM_MC_BEGIN(4, 2);
3701 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3702 IEM_MC_ARG(uint16_t, u16Src, 1);
3703 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3704 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3706
3707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3708 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3709 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3710 IEM_MC_FETCH_EFLAGS(EFlags);
3711 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3712 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3713
3714 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3715 IEM_MC_COMMIT_EFLAGS(EFlags);
3716 IEM_MC_ADVANCE_RIP();
3717 IEM_MC_END();
3718 return VINF_SUCCESS;
3719
3720 case IEMMODE_32BIT:
3721 IEM_MC_BEGIN(4, 2);
3722 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3723 IEM_MC_ARG(uint32_t, u32Src, 1);
3724 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3725 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3727
3728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3729 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3730 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3731 IEM_MC_FETCH_EFLAGS(EFlags);
3732 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3733 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3734
3735 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3736 IEM_MC_COMMIT_EFLAGS(EFlags);
3737 IEM_MC_ADVANCE_RIP();
3738 IEM_MC_END();
3739 return VINF_SUCCESS;
3740
3741 case IEMMODE_64BIT:
3742 IEM_MC_BEGIN(4, 2);
3743 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3744 IEM_MC_ARG(uint64_t, u64Src, 1);
3745 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3748
3749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3750 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3751 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3752 IEM_MC_FETCH_EFLAGS(EFlags);
3753 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3754 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3755
3756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3757 IEM_MC_COMMIT_EFLAGS(EFlags);
3758 IEM_MC_ADVANCE_RIP();
3759 IEM_MC_END();
3760 return VINF_SUCCESS;
3761
3762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3763 }
3764 }
3765}
3766
3767
3768
3769/** Opcode 0x0f 0xa4. */
3770FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
3771{
3772 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
3773 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
3774}
3775
3776
3777/** Opcode 0x0f 0xa7. */
3778FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
3779{
3780 IEMOP_MNEMONIC("shld Ev,Gv,CL");
3781 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
3782}
3783
3784
3785/** Opcode 0x0f 0xa8. */
3786FNIEMOP_DEF(iemOp_push_gs)
3787{
3788 IEMOP_MNEMONIC("push gs");
3789 IEMOP_HLP_NO_LOCK_PREFIX();
3790 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
3791}
3792
3793
3794/** Opcode 0x0f 0xa9. */
3795FNIEMOP_DEF(iemOp_pop_gs)
3796{
3797 IEMOP_MNEMONIC("pop gs");
3798 IEMOP_HLP_NO_LOCK_PREFIX();
3799 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
3800}
3801
3802
3803/** Opcode 0x0f 0xaa. */
3804FNIEMOP_STUB(iemOp_rsm);
3805
3806
3807/** Opcode 0x0f 0xab. */
3808FNIEMOP_DEF(iemOp_bts_Ev_Gv)
3809{
3810 IEMOP_MNEMONIC("bts Ev,Gv");
3811 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
3812}
3813
3814
3815/** Opcode 0x0f 0xac. */
3816FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
3817{
3818 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
3819 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
3820}
3821
3822
3823/** Opcode 0x0f 0xad. */
3824FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
3825{
3826 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
3827 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
3828}
3829
3830
3831/** Opcode 0x0f 0xae mem/0. */
3832FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
3833{
3834 IEMOP_MNEMONIC("fxsave m512");
3835 IEMOP_HLP_NO_LOCK_PREFIX();
3836 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3837 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3838
3839 IEM_MC_BEGIN(3, 1);
3840 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3841 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3842 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3844 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
3845 IEM_MC_END();
3846 return VINF_SUCCESS;
3847}
3848
3849
3850/** Opcode 0x0f 0xae mem/1. */
3851FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
3852{
3853 IEMOP_MNEMONIC("fxrstor m512");
3854 IEMOP_HLP_NO_LOCK_PREFIX();
3855 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3856 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3857
3858 IEM_MC_BEGIN(3, 1);
3859 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3860 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3861 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3863 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
3864 IEM_MC_END();
3865 return VINF_SUCCESS;
3866}
3867
3868
3869/** Opcode 0x0f 0xae mem/2. */
3870FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
3871
3872/** Opcode 0x0f 0xae mem/3. */
3873FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
3874
3875/** Opcode 0x0f 0xae mem/4. */
3876FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
3877
3878/** Opcode 0x0f 0xae mem/5. */
3879FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
3880
3881/** Opcode 0x0f 0xae mem/6. */
3882FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
3883
3884/** Opcode 0x0f 0xae mem/7. */
3885FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
3886
3887/** Opcode 0x0f 0xae 11b/5. */
3888FNIEMOP_STUB_1(iemOp_Grp15_lfence, uint8_t, bRm);
3889
3890/** Opcode 0x0f 0xae 11b/6. */
3891FNIEMOP_STUB_1(iemOp_Grp15_mfence, uint8_t, bRm);
3892
3893/** Opcode 0x0f 0xae 11b/7. */
3894FNIEMOP_STUB_1(iemOp_Grp15_sfence, uint8_t, bRm);
3895
3896/** Opcode 0xf3 0x0f 0xae 11b/0. */
3897FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
3898
3899/** Opcode 0xf3 0x0f 0xae 11b/1. */
3900FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
3901
3902/** Opcode 0xf3 0x0f 0xae 11b/2. */
3903FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
3904
3905/** Opcode 0xf3 0x0f 0xae 11b/3. */
3906FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
3907
3908
3909/** Opcode 0x0f 0xae. */
3910FNIEMOP_DEF(iemOp_Grp15)
3911{
3912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3913 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3914 {
3915 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3916 {
3917 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
3918 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
3919 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
3920 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
3921 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
3922 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
3923 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
3924 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
3925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3926 }
3927 }
3928 else
3929 {
3930 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
3931 {
3932 case 0:
3933 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3934 {
3935 case 0: return IEMOP_RAISE_INVALID_OPCODE();
3936 case 1: return IEMOP_RAISE_INVALID_OPCODE();
3937 case 2: return IEMOP_RAISE_INVALID_OPCODE();
3938 case 3: return IEMOP_RAISE_INVALID_OPCODE();
3939 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3940 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
3941 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
3942 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
3943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3944 }
3945 break;
3946
3947 case IEM_OP_PRF_REPZ:
3948 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3949 {
3950 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
3951 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
3952 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
3953 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
3954 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3955 case 5: return IEMOP_RAISE_INVALID_OPCODE();
3956 case 6: return IEMOP_RAISE_INVALID_OPCODE();
3957 case 7: return IEMOP_RAISE_INVALID_OPCODE();
3958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3959 }
3960 break;
3961
3962 default:
3963 return IEMOP_RAISE_INVALID_OPCODE();
3964 }
3965 }
3966}
3967
3968
3969/** Opcode 0x0f 0xaf. */
3970FNIEMOP_DEF(iemOp_imul_Gv_Ev)
3971{
3972 IEMOP_MNEMONIC("imul Gv,Ev");
3973 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
3975}
3976
3977
3978/** Opcode 0x0f 0xb0. */
3979FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
3980{
3981 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
3982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3983
3984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3985 {
3986 IEMOP_HLP_DONE_DECODING();
3987 IEM_MC_BEGIN(4, 0);
3988 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3989 IEM_MC_ARG(uint8_t *, pu8Al, 1);
3990 IEM_MC_ARG(uint8_t, u8Src, 2);
3991 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3992
3993 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3994 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3995 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
3996 IEM_MC_REF_EFLAGS(pEFlags);
3997 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3998 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
3999 else
4000 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
4001
4002 IEM_MC_ADVANCE_RIP();
4003 IEM_MC_END();
4004 }
4005 else
4006 {
4007 IEM_MC_BEGIN(4, 3);
4008 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4009 IEM_MC_ARG(uint8_t *, pu8Al, 1);
4010 IEM_MC_ARG(uint8_t, u8Src, 2);
4011 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4013 IEM_MC_LOCAL(uint8_t, u8Al);
4014
4015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4016 IEMOP_HLP_DONE_DECODING();
4017 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4018 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4019 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
4020 IEM_MC_FETCH_EFLAGS(EFlags);
4021 IEM_MC_REF_LOCAL(pu8Al, u8Al);
4022 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4023 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
4024 else
4025 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
4026
4027 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
4028 IEM_MC_COMMIT_EFLAGS(EFlags);
4029 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
4030 IEM_MC_ADVANCE_RIP();
4031 IEM_MC_END();
4032 }
4033 return VINF_SUCCESS;
4034}
4035
4036/** Opcode 0x0f 0xb1. */
4037FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
4038{
4039 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
4040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4041
4042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4043 {
4044 IEMOP_HLP_DONE_DECODING();
4045 switch (pIemCpu->enmEffOpSize)
4046 {
4047 case IEMMODE_16BIT:
4048 IEM_MC_BEGIN(4, 0);
4049 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4050 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
4051 IEM_MC_ARG(uint16_t, u16Src, 2);
4052 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4053
4054 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4055 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4056 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
4057 IEM_MC_REF_EFLAGS(pEFlags);
4058 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4059 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
4060 else
4061 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
4062
4063 IEM_MC_ADVANCE_RIP();
4064 IEM_MC_END();
4065 return VINF_SUCCESS;
4066
4067 case IEMMODE_32BIT:
4068 IEM_MC_BEGIN(4, 0);
4069 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4070 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
4071 IEM_MC_ARG(uint32_t, u32Src, 2);
4072 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4073
4074 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4075 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4076 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
4077 IEM_MC_REF_EFLAGS(pEFlags);
4078 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4079 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
4080 else
4081 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
4082
4083 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
4084 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4085 IEM_MC_ADVANCE_RIP();
4086 IEM_MC_END();
4087 return VINF_SUCCESS;
4088
4089 case IEMMODE_64BIT:
4090 IEM_MC_BEGIN(4, 0);
4091 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4092 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
4093#ifdef RT_ARCH_X86
4094 IEM_MC_ARG(uint64_t *, pu64Src, 2);
4095#else
4096 IEM_MC_ARG(uint64_t, u64Src, 2);
4097#endif
4098 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4099
4100 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4101 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
4102 IEM_MC_REF_EFLAGS(pEFlags);
4103#ifdef RT_ARCH_X86
4104 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4105 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4106 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
4107 else
4108 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
4109#else
4110 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4111 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4112 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
4113 else
4114 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
4115#endif
4116
4117 IEM_MC_ADVANCE_RIP();
4118 IEM_MC_END();
4119 return VINF_SUCCESS;
4120
4121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4122 }
4123 }
4124 else
4125 {
4126 switch (pIemCpu->enmEffOpSize)
4127 {
4128 case IEMMODE_16BIT:
4129 IEM_MC_BEGIN(4, 3);
4130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4131 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
4132 IEM_MC_ARG(uint16_t, u16Src, 2);
4133 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4135 IEM_MC_LOCAL(uint16_t, u16Ax);
4136
4137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4138 IEMOP_HLP_DONE_DECODING();
4139 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4140 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4141 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
4142 IEM_MC_FETCH_EFLAGS(EFlags);
4143 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
4144 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4145 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
4146 else
4147 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
4148
4149 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4150 IEM_MC_COMMIT_EFLAGS(EFlags);
4151 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
4152 IEM_MC_ADVANCE_RIP();
4153 IEM_MC_END();
4154 return VINF_SUCCESS;
4155
4156 case IEMMODE_32BIT:
4157 IEM_MC_BEGIN(4, 3);
4158 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4159 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
4160 IEM_MC_ARG(uint32_t, u32Src, 2);
4161 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4163 IEM_MC_LOCAL(uint32_t, u32Eax);
4164
4165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4166 IEMOP_HLP_DONE_DECODING();
4167 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4168 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4169 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
4170 IEM_MC_FETCH_EFLAGS(EFlags);
4171 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
4172 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4173 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
4174 else
4175 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
4176
4177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4178 IEM_MC_COMMIT_EFLAGS(EFlags);
4179 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
4180 IEM_MC_ADVANCE_RIP();
4181 IEM_MC_END();
4182 return VINF_SUCCESS;
4183
4184 case IEMMODE_64BIT:
4185 IEM_MC_BEGIN(4, 3);
4186 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4187 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
4188#ifdef RT_ARCH_X86
4189 IEM_MC_ARG(uint64_t *, pu64Src, 2);
4190#else
4191 IEM_MC_ARG(uint64_t, u64Src, 2);
4192#endif
4193 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4195 IEM_MC_LOCAL(uint64_t, u64Rax);
4196
4197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4198 IEMOP_HLP_DONE_DECODING();
4199 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4200 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
4201 IEM_MC_FETCH_EFLAGS(EFlags);
4202 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
4203#ifdef RT_ARCH_X86
4204 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4205 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4206 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
4207 else
4208 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
4209#else
4210 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4211 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4212 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
4213 else
4214 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
4215#endif
4216
4217 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4218 IEM_MC_COMMIT_EFLAGS(EFlags);
4219 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
4220 IEM_MC_ADVANCE_RIP();
4221 IEM_MC_END();
4222 return VINF_SUCCESS;
4223
4224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4225 }
4226 }
4227}
4228
4229
4230FNIEMOP_DEF_1(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg)
4231{
4232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4233 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4234
4235 /* The source cannot be a register. */
4236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4237 return IEMOP_RAISE_INVALID_OPCODE();
4238 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
4239
4240 switch (pIemCpu->enmEffOpSize)
4241 {
4242 case IEMMODE_16BIT:
4243 IEM_MC_BEGIN(5, 1);
4244 IEM_MC_ARG(uint16_t, uSel, 0);
4245 IEM_MC_ARG(uint16_t, offSeg, 1);
4246 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
4247 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
4248 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
4249 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
4250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
4251 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
4252 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
4253 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
4254 IEM_MC_END();
4255 return VINF_SUCCESS;
4256
4257 case IEMMODE_32BIT:
4258 IEM_MC_BEGIN(5, 1);
4259 IEM_MC_ARG(uint16_t, uSel, 0);
4260 IEM_MC_ARG(uint32_t, offSeg, 1);
4261 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
4262 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
4263 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
4264 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
4265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
4266 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
4267 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
4268 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
4269 IEM_MC_END();
4270 return VINF_SUCCESS;
4271
4272 case IEMMODE_64BIT:
4273 IEM_MC_BEGIN(5, 1);
4274 IEM_MC_ARG(uint16_t, uSel, 0);
4275 IEM_MC_ARG(uint64_t, offSeg, 1);
4276 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
4277 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
4278 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
4279 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
4280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
4281 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
4282 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
4283 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
4284 IEM_MC_END();
4285 return VINF_SUCCESS;
4286
4287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4288 }
4289}
4290
4291
4292/** Opcode 0x0f 0xb2. */
4293FNIEMOP_DEF(iemOp_lss_Gv_Mp)
4294{
4295 IEMOP_MNEMONIC("lss Gv,Mp");
4296 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_SS);
4297}
4298
4299
4300/** Opcode 0x0f 0xb3. */
4301FNIEMOP_DEF(iemOp_btr_Ev_Gv)
4302{
4303 IEMOP_MNEMONIC("btr Ev,Gv");
4304 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
4305}
4306
4307
4308/** Opcode 0x0f 0xb4. */
4309FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
4310{
4311 IEMOP_MNEMONIC("lfs Gv,Mp");
4312 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_FS);
4313}
4314
4315
4316/** Opcode 0x0f 0xb5. */
4317FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
4318{
4319 IEMOP_MNEMONIC("lgs Gv,Mp");
4320 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_GS);
4321}
4322
4323
4324/** Opcode 0x0f 0xb6. */
4325FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
4326{
4327 IEMOP_MNEMONIC("movzx Gv,Eb");
4328
4329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4330 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4331
4332 /*
4333 * If rm is denoting a register, no more instruction bytes.
4334 */
4335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4336 {
4337 switch (pIemCpu->enmEffOpSize)
4338 {
4339 case IEMMODE_16BIT:
4340 IEM_MC_BEGIN(0, 1);
4341 IEM_MC_LOCAL(uint16_t, u16Value);
4342 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4343 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4344 IEM_MC_ADVANCE_RIP();
4345 IEM_MC_END();
4346 return VINF_SUCCESS;
4347
4348 case IEMMODE_32BIT:
4349 IEM_MC_BEGIN(0, 1);
4350 IEM_MC_LOCAL(uint32_t, u32Value);
4351 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4352 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4353 IEM_MC_ADVANCE_RIP();
4354 IEM_MC_END();
4355 return VINF_SUCCESS;
4356
4357 case IEMMODE_64BIT:
4358 IEM_MC_BEGIN(0, 1);
4359 IEM_MC_LOCAL(uint64_t, u64Value);
4360 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4361 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4362 IEM_MC_ADVANCE_RIP();
4363 IEM_MC_END();
4364 return VINF_SUCCESS;
4365
4366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4367 }
4368 }
4369 else
4370 {
4371 /*
4372 * We're loading a register from memory.
4373 */
4374 switch (pIemCpu->enmEffOpSize)
4375 {
4376 case IEMMODE_16BIT:
4377 IEM_MC_BEGIN(0, 2);
4378 IEM_MC_LOCAL(uint16_t, u16Value);
4379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4381 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
4382 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4383 IEM_MC_ADVANCE_RIP();
4384 IEM_MC_END();
4385 return VINF_SUCCESS;
4386
4387 case IEMMODE_32BIT:
4388 IEM_MC_BEGIN(0, 2);
4389 IEM_MC_LOCAL(uint32_t, u32Value);
4390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4392 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4393 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4394 IEM_MC_ADVANCE_RIP();
4395 IEM_MC_END();
4396 return VINF_SUCCESS;
4397
4398 case IEMMODE_64BIT:
4399 IEM_MC_BEGIN(0, 2);
4400 IEM_MC_LOCAL(uint64_t, u64Value);
4401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4403 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4404 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4405 IEM_MC_ADVANCE_RIP();
4406 IEM_MC_END();
4407 return VINF_SUCCESS;
4408
4409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4410 }
4411 }
4412}
4413
4414
4415/** Opcode 0x0f 0xb7. */
4416FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
4417{
4418 IEMOP_MNEMONIC("movzx Gv,Ew");
4419
4420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4421 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4422
4423 /** @todo Not entirely sure how the operand size prefix is handled here,
4424 * assuming that it will be ignored. Would be nice to have a few
4425 * test for this. */
4426 /*
4427 * If rm is denoting a register, no more instruction bytes.
4428 */
4429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4430 {
4431 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4432 {
4433 IEM_MC_BEGIN(0, 1);
4434 IEM_MC_LOCAL(uint32_t, u32Value);
4435 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4436 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4437 IEM_MC_ADVANCE_RIP();
4438 IEM_MC_END();
4439 }
4440 else
4441 {
4442 IEM_MC_BEGIN(0, 1);
4443 IEM_MC_LOCAL(uint64_t, u64Value);
4444 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4445 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4446 IEM_MC_ADVANCE_RIP();
4447 IEM_MC_END();
4448 }
4449 }
4450 else
4451 {
4452 /*
4453 * We're loading a register from memory.
4454 */
4455 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4456 {
4457 IEM_MC_BEGIN(0, 2);
4458 IEM_MC_LOCAL(uint32_t, u32Value);
4459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4461 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4462 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4463 IEM_MC_ADVANCE_RIP();
4464 IEM_MC_END();
4465 }
4466 else
4467 {
4468 IEM_MC_BEGIN(0, 2);
4469 IEM_MC_LOCAL(uint64_t, u64Value);
4470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4472 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4473 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4474 IEM_MC_ADVANCE_RIP();
4475 IEM_MC_END();
4476 }
4477 }
4478 return VINF_SUCCESS;
4479}
4480
4481
4482/** Opcode 0x0f 0xb8. */
4483FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
4484
4485
4486/** Opcode 0x0f 0xb9. */
4487FNIEMOP_DEF(iemOp_Grp10)
4488{
4489 Log(("iemOp_Grp10 -> #UD\n"));
4490 return IEMOP_RAISE_INVALID_OPCODE();
4491}
4492
4493
4494/** Opcode 0x0f 0xba. */
4495FNIEMOP_DEF(iemOp_Grp8)
4496{
4497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4498 PCIEMOPBINSIZES pImpl;
4499 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
4500 {
4501 case 0: case 1: case 2: case 3:
4502 return IEMOP_RAISE_INVALID_OPCODE();
4503 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
4504 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
4505 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
4506 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
4507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4508 }
4509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4510
4511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4512 {
4513 /* register destination. */
4514 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4515 IEMOP_HLP_NO_LOCK_PREFIX();
4516
4517 switch (pIemCpu->enmEffOpSize)
4518 {
4519 case IEMMODE_16BIT:
4520 IEM_MC_BEGIN(3, 0);
4521 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4522 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
4523 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4524
4525 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4526 IEM_MC_REF_EFLAGS(pEFlags);
4527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4528
4529 IEM_MC_ADVANCE_RIP();
4530 IEM_MC_END();
4531 return VINF_SUCCESS;
4532
4533 case IEMMODE_32BIT:
4534 IEM_MC_BEGIN(3, 0);
4535 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4536 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
4537 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4538
4539 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4540 IEM_MC_REF_EFLAGS(pEFlags);
4541 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4542
4543 IEM_MC_ADVANCE_RIP();
4544 IEM_MC_END();
4545 return VINF_SUCCESS;
4546
4547 case IEMMODE_64BIT:
4548 IEM_MC_BEGIN(3, 0);
4549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4550 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
4551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4552
4553 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4554 IEM_MC_REF_EFLAGS(pEFlags);
4555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4556
4557 IEM_MC_ADVANCE_RIP();
4558 IEM_MC_END();
4559 return VINF_SUCCESS;
4560
4561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4562 }
4563 }
4564 else
4565 {
4566 /* memory destination. */
4567
4568 uint32_t fAccess;
4569 if (pImpl->pfnLockedU16)
4570 fAccess = IEM_ACCESS_DATA_RW;
4571 else /* BT */
4572 {
4573 IEMOP_HLP_NO_LOCK_PREFIX();
4574 fAccess = IEM_ACCESS_DATA_R;
4575 }
4576
4577 /** @todo test negative bit offsets! */
4578 switch (pIemCpu->enmEffOpSize)
4579 {
4580 case IEMMODE_16BIT:
4581 IEM_MC_BEGIN(3, 1);
4582 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4583 IEM_MC_ARG(uint16_t, u16Src, 1);
4584 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4586
4587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4588 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4589 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
4590 IEM_MC_FETCH_EFLAGS(EFlags);
4591 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4592 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4593 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4594 else
4595 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4596 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4597
4598 IEM_MC_COMMIT_EFLAGS(EFlags);
4599 IEM_MC_ADVANCE_RIP();
4600 IEM_MC_END();
4601 return VINF_SUCCESS;
4602
4603 case IEMMODE_32BIT:
4604 IEM_MC_BEGIN(3, 1);
4605 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4606 IEM_MC_ARG(uint32_t, u32Src, 1);
4607 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4609
4610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4611 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4612 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
4613 IEM_MC_FETCH_EFLAGS(EFlags);
4614 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4615 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4617 else
4618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4619 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4620
4621 IEM_MC_COMMIT_EFLAGS(EFlags);
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 return VINF_SUCCESS;
4625
4626 case IEMMODE_64BIT:
4627 IEM_MC_BEGIN(3, 1);
4628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4629 IEM_MC_ARG(uint64_t, u64Src, 1);
4630 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4632
4633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4634 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4635 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
4636 IEM_MC_FETCH_EFLAGS(EFlags);
4637 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4638 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4640 else
4641 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4642 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4643
4644 IEM_MC_COMMIT_EFLAGS(EFlags);
4645 IEM_MC_ADVANCE_RIP();
4646 IEM_MC_END();
4647 return VINF_SUCCESS;
4648
4649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4650 }
4651 }
4652
4653}
4654
4655
4656/** Opcode 0x0f 0xbb. */
4657FNIEMOP_DEF(iemOp_btc_Ev_Gv)
4658{
4659 IEMOP_MNEMONIC("btc Ev,Gv");
4660 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
4661}
4662
4663
4664/** Opcode 0x0f 0xbc. */
4665FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
4666{
4667 IEMOP_MNEMONIC("bsf Gv,Ev");
4668 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4669 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
4670}
4671
4672
4673/** Opcode 0x0f 0xbd. */
4674FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
4675{
4676 IEMOP_MNEMONIC("bsr Gv,Ev");
4677 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4678 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
4679}
4680
4681
4682/** Opcode 0x0f 0xbe. */
4683FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
4684{
4685 IEMOP_MNEMONIC("movsx Gv,Eb");
4686
4687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4688 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4689
4690 /*
4691 * If rm is denoting a register, no more instruction bytes.
4692 */
4693 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4694 {
4695 switch (pIemCpu->enmEffOpSize)
4696 {
4697 case IEMMODE_16BIT:
4698 IEM_MC_BEGIN(0, 1);
4699 IEM_MC_LOCAL(uint16_t, u16Value);
4700 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4701 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4702 IEM_MC_ADVANCE_RIP();
4703 IEM_MC_END();
4704 return VINF_SUCCESS;
4705
4706 case IEMMODE_32BIT:
4707 IEM_MC_BEGIN(0, 1);
4708 IEM_MC_LOCAL(uint32_t, u32Value);
4709 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4710 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4711 IEM_MC_ADVANCE_RIP();
4712 IEM_MC_END();
4713 return VINF_SUCCESS;
4714
4715 case IEMMODE_64BIT:
4716 IEM_MC_BEGIN(0, 1);
4717 IEM_MC_LOCAL(uint64_t, u64Value);
4718 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4719 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4720 IEM_MC_ADVANCE_RIP();
4721 IEM_MC_END();
4722 return VINF_SUCCESS;
4723
4724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4725 }
4726 }
4727 else
4728 {
4729 /*
4730 * We're loading a register from memory.
4731 */
4732 switch (pIemCpu->enmEffOpSize)
4733 {
4734 case IEMMODE_16BIT:
4735 IEM_MC_BEGIN(0, 2);
4736 IEM_MC_LOCAL(uint16_t, u16Value);
4737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4739 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
4740 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4741 IEM_MC_ADVANCE_RIP();
4742 IEM_MC_END();
4743 return VINF_SUCCESS;
4744
4745 case IEMMODE_32BIT:
4746 IEM_MC_BEGIN(0, 2);
4747 IEM_MC_LOCAL(uint32_t, u32Value);
4748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4750 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4751 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4752 IEM_MC_ADVANCE_RIP();
4753 IEM_MC_END();
4754 return VINF_SUCCESS;
4755
4756 case IEMMODE_64BIT:
4757 IEM_MC_BEGIN(0, 2);
4758 IEM_MC_LOCAL(uint64_t, u64Value);
4759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4761 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4762 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4763 IEM_MC_ADVANCE_RIP();
4764 IEM_MC_END();
4765 return VINF_SUCCESS;
4766
4767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4768 }
4769 }
4770}
4771
4772
4773/** Opcode 0x0f 0xbf. */
4774FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
4775{
4776 IEMOP_MNEMONIC("movsx Gv,Ew");
4777
4778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4779 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4780
4781 /** @todo Not entirely sure how the operand size prefix is handled here,
4782 * assuming that it will be ignored. Would be nice to have a few
4783 * test for this. */
4784 /*
4785 * If rm is denoting a register, no more instruction bytes.
4786 */
4787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4788 {
4789 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4790 {
4791 IEM_MC_BEGIN(0, 1);
4792 IEM_MC_LOCAL(uint32_t, u32Value);
4793 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4794 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4795 IEM_MC_ADVANCE_RIP();
4796 IEM_MC_END();
4797 }
4798 else
4799 {
4800 IEM_MC_BEGIN(0, 1);
4801 IEM_MC_LOCAL(uint64_t, u64Value);
4802 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4803 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4804 IEM_MC_ADVANCE_RIP();
4805 IEM_MC_END();
4806 }
4807 }
4808 else
4809 {
4810 /*
4811 * We're loading a register from memory.
4812 */
4813 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4814 {
4815 IEM_MC_BEGIN(0, 2);
4816 IEM_MC_LOCAL(uint32_t, u32Value);
4817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4819 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4820 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4821 IEM_MC_ADVANCE_RIP();
4822 IEM_MC_END();
4823 }
4824 else
4825 {
4826 IEM_MC_BEGIN(0, 2);
4827 IEM_MC_LOCAL(uint64_t, u64Value);
4828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4830 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4831 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4832 IEM_MC_ADVANCE_RIP();
4833 IEM_MC_END();
4834 }
4835 }
4836 return VINF_SUCCESS;
4837}
4838
4839
4840/** Opcode 0x0f 0xc0. */
4841FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
4842{
4843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4844 IEMOP_MNEMONIC("xadd Eb,Gb");
4845
4846 /*
4847 * If rm is denoting a register, no more instruction bytes.
4848 */
4849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4850 {
4851 IEMOP_HLP_NO_LOCK_PREFIX();
4852
4853 IEM_MC_BEGIN(3, 0);
4854 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4855 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4856 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4857
4858 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4859 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4860 IEM_MC_REF_EFLAGS(pEFlags);
4861 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4862
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 }
4866 else
4867 {
4868 /*
4869 * We're accessing memory.
4870 */
4871 IEM_MC_BEGIN(3, 3);
4872 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4873 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4874 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4875 IEM_MC_LOCAL(uint8_t, u8RegCopy);
4876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4877
4878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4879 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4880 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4881 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
4882 IEM_MC_FETCH_EFLAGS(EFlags);
4883 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4884 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4885 else
4886 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
4887
4888 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
4889 IEM_MC_COMMIT_EFLAGS(EFlags);
4890 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
4891 IEM_MC_ADVANCE_RIP();
4892 IEM_MC_END();
4893 return VINF_SUCCESS;
4894 }
4895 return VINF_SUCCESS;
4896}
4897
4898
4899/** Opcode 0x0f 0xc1. */
4900FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
4901{
4902 IEMOP_MNEMONIC("xadd Ev,Gv");
4903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4904
4905 /*
4906 * If rm is denoting a register, no more instruction bytes.
4907 */
4908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4909 {
4910 IEMOP_HLP_NO_LOCK_PREFIX();
4911
4912 switch (pIemCpu->enmEffOpSize)
4913 {
4914 case IEMMODE_16BIT:
4915 IEM_MC_BEGIN(3, 0);
4916 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4917 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4918 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4919
4920 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4921 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4922 IEM_MC_REF_EFLAGS(pEFlags);
4923 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4924
4925 IEM_MC_ADVANCE_RIP();
4926 IEM_MC_END();
4927 return VINF_SUCCESS;
4928
4929 case IEMMODE_32BIT:
4930 IEM_MC_BEGIN(3, 0);
4931 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4932 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4933 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4934
4935 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4936 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4937 IEM_MC_REF_EFLAGS(pEFlags);
4938 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
4939
4940 IEM_MC_ADVANCE_RIP();
4941 IEM_MC_END();
4942 return VINF_SUCCESS;
4943
4944 case IEMMODE_64BIT:
4945 IEM_MC_BEGIN(3, 0);
4946 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4947 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4949
4950 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4951 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4952 IEM_MC_REF_EFLAGS(pEFlags);
4953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
4954
4955 IEM_MC_ADVANCE_RIP();
4956 IEM_MC_END();
4957 return VINF_SUCCESS;
4958
4959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4960 }
4961 }
4962 else
4963 {
4964 /*
4965 * We're accessing memory.
4966 */
4967 switch (pIemCpu->enmEffOpSize)
4968 {
4969 case IEMMODE_16BIT:
4970 IEM_MC_BEGIN(3, 3);
4971 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4972 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4973 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4974 IEM_MC_LOCAL(uint16_t, u16RegCopy);
4975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4976
4977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4978 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4979 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4980 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
4981 IEM_MC_FETCH_EFLAGS(EFlags);
4982 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4984 else
4985 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
4986
4987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4988 IEM_MC_COMMIT_EFLAGS(EFlags);
4989 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
4990 IEM_MC_ADVANCE_RIP();
4991 IEM_MC_END();
4992 return VINF_SUCCESS;
4993
4994 case IEMMODE_32BIT:
4995 IEM_MC_BEGIN(3, 3);
4996 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4997 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4998 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4999 IEM_MC_LOCAL(uint32_t, u32RegCopy);
5000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5001
5002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5003 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
5004 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5005 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
5006 IEM_MC_FETCH_EFLAGS(EFlags);
5007 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
5009 else
5010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
5011
5012 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5013 IEM_MC_COMMIT_EFLAGS(EFlags);
5014 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
5015 IEM_MC_ADVANCE_RIP();
5016 IEM_MC_END();
5017 return VINF_SUCCESS;
5018
5019 case IEMMODE_64BIT:
5020 IEM_MC_BEGIN(3, 3);
5021 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5022 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
5023 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5024 IEM_MC_LOCAL(uint64_t, u64RegCopy);
5025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5026
5027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5028 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
5029 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5030 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
5031 IEM_MC_FETCH_EFLAGS(EFlags);
5032 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5033 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
5034 else
5035 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
5036
5037 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5038 IEM_MC_COMMIT_EFLAGS(EFlags);
5039 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
5040 IEM_MC_ADVANCE_RIP();
5041 IEM_MC_END();
5042 return VINF_SUCCESS;
5043
5044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5045 }
5046 }
5047}
5048
5049/** Opcode 0x0f 0xc2. */
5050FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
5051
5052/** Opcode 0x0f 0xc3. */
5053FNIEMOP_STUB(iemOp_movnti_My_Gy);
5054
5055/** Opcode 0x0f 0xc4. */
5056FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
5057
5058/** Opcode 0x0f 0xc5. */
5059FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
5060
5061/** Opcode 0x0f 0xc6. */
5062FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
5063
5064
5065/** Opcode 0x0f 0xc7 !11/1. */
5066FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
5067{
5068 IEMOP_MNEMONIC("cmpxchg8b Mq");
5069
5070 IEM_MC_BEGIN(4, 3);
5071 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
5072 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
5073 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
5074 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
5075 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
5076 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
5077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5078
5079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5080 IEMOP_HLP_DONE_DECODING();
5081 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
5082
5083 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
5084 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
5085 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
5086
5087 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
5088 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
5089 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
5090
5091 IEM_MC_FETCH_EFLAGS(EFlags);
5092 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
5094 else
5095 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
5096
5097 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
5098 IEM_MC_COMMIT_EFLAGS(EFlags);
5099 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
5100 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
5101 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
5102 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
5103 IEM_MC_ENDIF();
5104 IEM_MC_ADVANCE_RIP();
5105
5106 IEM_MC_END();
5107 return VINF_SUCCESS;
5108}
5109
5110
5111/** Opcode REX.W 0x0f 0xc7 !11/1. */
5112FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
5113
5114/** Opcode 0x0f 0xc7 11/6. */
5115FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
5116
5117/** Opcode 0x0f 0xc7 !11/6. */
5118FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
5119
5120/** Opcode 0x66 0x0f 0xc7 !11/6. */
5121FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
5122
5123/** Opcode 0xf3 0x0f 0xc7 !11/6. */
5124FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
5125
5126/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
5127FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
5128
5129
5130/** Opcode 0x0f 0xc7. */
5131FNIEMOP_DEF(iemOp_Grp9)
5132{
5133 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
5134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5135 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5136 {
5137 case 0: case 2: case 3: case 4: case 5:
5138 return IEMOP_RAISE_INVALID_OPCODE();
5139 case 1:
5140 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
5141 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
5142 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
5143 return IEMOP_RAISE_INVALID_OPCODE();
5144 if (bRm & IEM_OP_PRF_SIZE_REX_W)
5145 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
5146 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
5147 case 6:
5148 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5149 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
5150 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
5151 {
5152 case 0:
5153 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
5154 case IEM_OP_PRF_SIZE_OP:
5155 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
5156 case IEM_OP_PRF_REPZ:
5157 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
5158 default:
5159 return IEMOP_RAISE_INVALID_OPCODE();
5160 }
5161 case 7:
5162 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
5163 {
5164 case 0:
5165 case IEM_OP_PRF_REPZ:
5166 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
5167 default:
5168 return IEMOP_RAISE_INVALID_OPCODE();
5169 }
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172}
5173
5174
5175/**
5176 * Common 'bswap register' helper.
5177 */
5178FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
5179{
5180 IEMOP_HLP_NO_LOCK_PREFIX();
5181 switch (pIemCpu->enmEffOpSize)
5182 {
5183 case IEMMODE_16BIT:
5184 IEM_MC_BEGIN(1, 0);
5185 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5186 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
5187 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
5188 IEM_MC_ADVANCE_RIP();
5189 IEM_MC_END();
5190 return VINF_SUCCESS;
5191
5192 case IEMMODE_32BIT:
5193 IEM_MC_BEGIN(1, 0);
5194 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5195 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
5196 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5197 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
5198 IEM_MC_ADVANCE_RIP();
5199 IEM_MC_END();
5200 return VINF_SUCCESS;
5201
5202 case IEMMODE_64BIT:
5203 IEM_MC_BEGIN(1, 0);
5204 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5205 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
5206 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
5207 IEM_MC_ADVANCE_RIP();
5208 IEM_MC_END();
5209 return VINF_SUCCESS;
5210
5211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5212 }
5213}
5214
5215
5216/** Opcode 0x0f 0xc8. */
5217FNIEMOP_DEF(iemOp_bswap_rAX_r8)
5218{
5219 IEMOP_MNEMONIC("bswap rAX/r8");
5220 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexReg);
5221}
5222
5223
5224/** Opcode 0x0f 0xc9. */
5225FNIEMOP_DEF(iemOp_bswap_rCX_r9)
5226{
5227 IEMOP_MNEMONIC("bswap rCX/r9");
5228 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexReg);
5229}
5230
5231
5232/** Opcode 0x0f 0xca. */
5233FNIEMOP_DEF(iemOp_bswap_rDX_r10)
5234{
5235 IEMOP_MNEMONIC("bswap rDX/r9");
5236 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexReg);
5237}
5238
5239
5240/** Opcode 0x0f 0xcb. */
5241FNIEMOP_DEF(iemOp_bswap_rBX_r11)
5242{
5243 IEMOP_MNEMONIC("bswap rBX/r9");
5244 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexReg);
5245}
5246
5247
5248/** Opcode 0x0f 0xcc. */
5249FNIEMOP_DEF(iemOp_bswap_rSP_r12)
5250{
5251 IEMOP_MNEMONIC("bswap rSP/r12");
5252 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexReg);
5253}
5254
5255
5256/** Opcode 0x0f 0xcd. */
5257FNIEMOP_DEF(iemOp_bswap_rBP_r13)
5258{
5259 IEMOP_MNEMONIC("bswap rBP/r13");
5260 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexReg);
5261}
5262
5263
5264/** Opcode 0x0f 0xce. */
5265FNIEMOP_DEF(iemOp_bswap_rSI_r14)
5266{
5267 IEMOP_MNEMONIC("bswap rSI/r14");
5268 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexReg);
5269}
5270
5271
5272/** Opcode 0x0f 0xcf. */
5273FNIEMOP_DEF(iemOp_bswap_rDI_r15)
5274{
5275 IEMOP_MNEMONIC("bswap rDI/r15");
5276 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexReg);
5277}
5278
5279
5280
5281/** Opcode 0x0f 0xd0. */
5282FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
5283/** Opcode 0x0f 0xd1. */
5284FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
5285/** Opcode 0x0f 0xd2. */
5286FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
5287/** Opcode 0x0f 0xd3. */
5288FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
5289/** Opcode 0x0f 0xd4. */
5290FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
5291/** Opcode 0x0f 0xd5. */
5292FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
5293/** Opcode 0x0f 0xd6. */
5294FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
5295/** Opcode 0x0f 0xd7. */
5296FNIEMOP_STUB(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq);
5297/** Opcode 0x0f 0xd8. */
5298FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
5299/** Opcode 0x0f 0xd9. */
5300FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
5301/** Opcode 0x0f 0xda. */
5302FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
5303/** Opcode 0x0f 0xdb. */
5304FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
5305/** Opcode 0x0f 0xdc. */
5306FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
5307/** Opcode 0x0f 0xdd. */
5308FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
5309/** Opcode 0x0f 0xde. */
5310FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
5311/** Opcode 0x0f 0xdf. */
5312FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
5313/** Opcode 0x0f 0xe0. */
5314FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
5315/** Opcode 0x0f 0xe1. */
5316FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
5317/** Opcode 0x0f 0xe2. */
5318FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
5319/** Opcode 0x0f 0xe3. */
5320FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
5321/** Opcode 0x0f 0xe4. */
5322FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
5323/** Opcode 0x0f 0xe5. */
5324FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
5325/** Opcode 0x0f 0xe6. */
5326FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
5327/** Opcode 0x0f 0xe7. */
5328FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
5329/** Opcode 0x0f 0xe8. */
5330FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
5331/** Opcode 0x0f 0xe9. */
5332FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
5333/** Opcode 0x0f 0xea. */
5334FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
5335/** Opcode 0x0f 0xeb. */
5336FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
5337/** Opcode 0x0f 0xec. */
5338FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
5339/** Opcode 0x0f 0xed. */
5340FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
5341/** Opcode 0x0f 0xee. */
5342FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
5343/** Opcode 0x0f 0xef. */
5344FNIEMOP_STUB(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq);
5345/** Opcode 0x0f 0xf0. */
5346FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
5347/** Opcode 0x0f 0xf1. */
5348FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
5349/** Opcode 0x0f 0xf2. */
5350FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
5351/** Opcode 0x0f 0xf3. */
5352FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
5353/** Opcode 0x0f 0xf4. */
5354FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
5355/** Opcode 0x0f 0xf5. */
5356FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
5357/** Opcode 0x0f 0xf6. */
5358FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
5359/** Opcode 0x0f 0xf7. */
5360FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
5361/** Opcode 0x0f 0xf8. */
5362FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq);
5363/** Opcode 0x0f 0xf9. */
5364FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
5365/** Opcode 0x0f 0xfa. */
5366FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
5367/** Opcode 0x0f 0xfb. */
5368FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
5369/** Opcode 0x0f 0xfc. */
5370FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
5371/** Opcode 0x0f 0xfd. */
5372FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
5373/** Opcode 0x0f 0xfe. */
5374FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
5375
5376
5377const PFNIEMOP g_apfnTwoByteMap[256] =
5378{
5379 /* 0x00 */ iemOp_Grp6,
5380 /* 0x01 */ iemOp_Grp7,
5381 /* 0x02 */ iemOp_lar_Gv_Ew,
5382 /* 0x03 */ iemOp_lsl_Gv_Ew,
5383 /* 0x04 */ iemOp_Invalid,
5384 /* 0x05 */ iemOp_syscall,
5385 /* 0x06 */ iemOp_clts,
5386 /* 0x07 */ iemOp_sysret,
5387 /* 0x08 */ iemOp_invd,
5388 /* 0x09 */ iemOp_wbinvd,
5389 /* 0x0a */ iemOp_Invalid,
5390 /* 0x0b */ iemOp_ud2,
5391 /* 0x0c */ iemOp_Invalid,
5392 /* 0x0d */ iemOp_nop_Ev_GrpP,
5393 /* 0x0e */ iemOp_femms,
5394 /* 0x0f */ iemOp_3Dnow,
5395 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
5396 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
5397 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
5398 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
5399 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
5400 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
5401 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
5402 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
5403 /* 0x18 */ iemOp_prefetch_Grp16,
5404 /* 0x19 */ iemOp_nop_Ev,
5405 /* 0x1a */ iemOp_nop_Ev,
5406 /* 0x1b */ iemOp_nop_Ev,
5407 /* 0x1c */ iemOp_nop_Ev,
5408 /* 0x1d */ iemOp_nop_Ev,
5409 /* 0x1e */ iemOp_nop_Ev,
5410 /* 0x1f */ iemOp_nop_Ev,
5411 /* 0x20 */ iemOp_mov_Rd_Cd,
5412 /* 0x21 */ iemOp_mov_Rd_Dd,
5413 /* 0x22 */ iemOp_mov_Cd_Rd,
5414 /* 0x23 */ iemOp_mov_Dd_Rd,
5415 /* 0x24 */ iemOp_mov_Rd_Td,
5416 /* 0x25 */ iemOp_Invalid,
5417 /* 0x26 */ iemOp_mov_Td_Rd,
5418 /* 0x27 */ iemOp_Invalid,
5419 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
5420 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
5421 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
5422 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
5423 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
5424 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
5425 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
5426 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
5427 /* 0x30 */ iemOp_wrmsr,
5428 /* 0x31 */ iemOp_rdtsc,
5429 /* 0x32 */ iemOp_rdmsr,
5430 /* 0x33 */ iemOp_rdpmc,
5431 /* 0x34 */ iemOp_sysenter,
5432 /* 0x35 */ iemOp_sysexit,
5433 /* 0x36 */ iemOp_Invalid,
5434 /* 0x37 */ iemOp_getsec,
5435 /* 0x38 */ iemOp_3byte_Esc_A4,
5436 /* 0x39 */ iemOp_Invalid,
5437 /* 0x3a */ iemOp_3byte_Esc_A5,
5438 /* 0x3b */ iemOp_Invalid,
5439 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
5440 /* 0x3d */ iemOp_Invalid,
5441 /* 0x3e */ iemOp_Invalid,
5442 /* 0x3f */ iemOp_Invalid,
5443 /* 0x40 */ iemOp_cmovo_Gv_Ev,
5444 /* 0x41 */ iemOp_cmovno_Gv_Ev,
5445 /* 0x42 */ iemOp_cmovc_Gv_Ev,
5446 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
5447 /* 0x44 */ iemOp_cmove_Gv_Ev,
5448 /* 0x45 */ iemOp_cmovne_Gv_Ev,
5449 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
5450 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
5451 /* 0x48 */ iemOp_cmovs_Gv_Ev,
5452 /* 0x49 */ iemOp_cmovns_Gv_Ev,
5453 /* 0x4a */ iemOp_cmovp_Gv_Ev,
5454 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
5455 /* 0x4c */ iemOp_cmovl_Gv_Ev,
5456 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
5457 /* 0x4e */ iemOp_cmovle_Gv_Ev,
5458 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
5459 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
5460 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
5461 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
5462 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
5463 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
5464 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
5465 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
5466 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
5467 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
5468 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
5469 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
5470 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
5471 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
5472 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
5473 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
5474 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
5475 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
5476 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
5477 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
5478 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
5479 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
5480 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
5481 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
5482 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
5483 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
5484 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
5485 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
5486 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
5487 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
5488 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
5489 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
5490 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
5491 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
5492 /* 0x71 */ iemOp_Grp12,
5493 /* 0x72 */ iemOp_Grp13,
5494 /* 0x73 */ iemOp_Grp14,
5495 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
5496 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
5497 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
5498 /* 0x77 */ iemOp_emms,
5499 /* 0x78 */ iemOp_vmread_AmdGrp17,
5500 /* 0x79 */ iemOp_vmwrite,
5501 /* 0x7a */ iemOp_Invalid,
5502 /* 0x7b */ iemOp_Invalid,
5503 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
5504 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
5505 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
5506 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
5507 /* 0x80 */ iemOp_jo_Jv,
5508 /* 0x81 */ iemOp_jno_Jv,
5509 /* 0x82 */ iemOp_jc_Jv,
5510 /* 0x83 */ iemOp_jnc_Jv,
5511 /* 0x84 */ iemOp_je_Jv,
5512 /* 0x85 */ iemOp_jne_Jv,
5513 /* 0x86 */ iemOp_jbe_Jv,
5514 /* 0x87 */ iemOp_jnbe_Jv,
5515 /* 0x88 */ iemOp_js_Jv,
5516 /* 0x89 */ iemOp_jns_Jv,
5517 /* 0x8a */ iemOp_jp_Jv,
5518 /* 0x8b */ iemOp_jnp_Jv,
5519 /* 0x8c */ iemOp_jl_Jv,
5520 /* 0x8d */ iemOp_jnl_Jv,
5521 /* 0x8e */ iemOp_jle_Jv,
5522 /* 0x8f */ iemOp_jnle_Jv,
5523 /* 0x90 */ iemOp_seto_Eb,
5524 /* 0x91 */ iemOp_setno_Eb,
5525 /* 0x92 */ iemOp_setc_Eb,
5526 /* 0x93 */ iemOp_setnc_Eb,
5527 /* 0x94 */ iemOp_sete_Eb,
5528 /* 0x95 */ iemOp_setne_Eb,
5529 /* 0x96 */ iemOp_setbe_Eb,
5530 /* 0x97 */ iemOp_setnbe_Eb,
5531 /* 0x98 */ iemOp_sets_Eb,
5532 /* 0x99 */ iemOp_setns_Eb,
5533 /* 0x9a */ iemOp_setp_Eb,
5534 /* 0x9b */ iemOp_setnp_Eb,
5535 /* 0x9c */ iemOp_setl_Eb,
5536 /* 0x9d */ iemOp_setnl_Eb,
5537 /* 0x9e */ iemOp_setle_Eb,
5538 /* 0x9f */ iemOp_setnle_Eb,
5539 /* 0xa0 */ iemOp_push_fs,
5540 /* 0xa1 */ iemOp_pop_fs,
5541 /* 0xa2 */ iemOp_cpuid,
5542 /* 0xa3 */ iemOp_bt_Ev_Gv,
5543 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
5544 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
5545 /* 0xa6 */ iemOp_Invalid,
5546 /* 0xa7 */ iemOp_Invalid,
5547 /* 0xa8 */ iemOp_push_gs,
5548 /* 0xa9 */ iemOp_pop_gs,
5549 /* 0xaa */ iemOp_rsm,
5550 /* 0xab */ iemOp_bts_Ev_Gv,
5551 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
5552 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
5553 /* 0xae */ iemOp_Grp15,
5554 /* 0xaf */ iemOp_imul_Gv_Ev,
5555 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
5556 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
5557 /* 0xb2 */ iemOp_lss_Gv_Mp,
5558 /* 0xb3 */ iemOp_btr_Ev_Gv,
5559 /* 0xb4 */ iemOp_lfs_Gv_Mp,
5560 /* 0xb5 */ iemOp_lgs_Gv_Mp,
5561 /* 0xb6 */ iemOp_movzx_Gv_Eb,
5562 /* 0xb7 */ iemOp_movzx_Gv_Ew,
5563 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
5564 /* 0xb9 */ iemOp_Grp10,
5565 /* 0xba */ iemOp_Grp8,
5566 /* 0xbd */ iemOp_btc_Ev_Gv,
5567 /* 0xbc */ iemOp_bsf_Gv_Ev,
5568 /* 0xbd */ iemOp_bsr_Gv_Ev,
5569 /* 0xbe */ iemOp_movsx_Gv_Eb,
5570 /* 0xbf */ iemOp_movsx_Gv_Ew,
5571 /* 0xc0 */ iemOp_xadd_Eb_Gb,
5572 /* 0xc1 */ iemOp_xadd_Ev_Gv,
5573 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
5574 /* 0xc3 */ iemOp_movnti_My_Gy,
5575 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
5576 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
5577 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
5578 /* 0xc7 */ iemOp_Grp9,
5579 /* 0xc8 */ iemOp_bswap_rAX_r8,
5580 /* 0xc9 */ iemOp_bswap_rCX_r9,
5581 /* 0xca */ iemOp_bswap_rDX_r10,
5582 /* 0xcb */ iemOp_bswap_rBX_r11,
5583 /* 0xcc */ iemOp_bswap_rSP_r12,
5584 /* 0xcd */ iemOp_bswap_rBP_r13,
5585 /* 0xce */ iemOp_bswap_rSI_r14,
5586 /* 0xcf */ iemOp_bswap_rDI_r15,
5587 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
5588 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
5589 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
5590 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
5591 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
5592 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
5593 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
5594 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
5595 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
5596 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
5597 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
5598 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
5599 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
5600 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
5601 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
5602 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
5603 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
5604 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
5605 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
5606 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
5607 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
5608 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
5609 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
5610 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
5611 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
5612 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
5613 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
5614 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
5615 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
5616 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
5617 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
5618 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
5619 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
5620 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
5621 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
5622 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
5623 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
5624 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
5625 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
5626 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
5627 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
5628 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
5629 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
5630 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
5631 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
5632 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
5633 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
5634 /* 0xff */ iemOp_Invalid
5635};
5636
5637/** @} */
5638
5639
5640/** @name One byte opcodes.
5641 *
5642 * @{
5643 */
5644
5645/** Opcode 0x00. */
5646FNIEMOP_DEF(iemOp_add_Eb_Gb)
5647{
5648 IEMOP_MNEMONIC("add Eb,Gb");
5649 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
5650}
5651
5652
5653/** Opcode 0x01. */
5654FNIEMOP_DEF(iemOp_add_Ev_Gv)
5655{
5656 IEMOP_MNEMONIC("add Ev,Gv");
5657 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
5658}
5659
5660
5661/** Opcode 0x02. */
5662FNIEMOP_DEF(iemOp_add_Gb_Eb)
5663{
5664 IEMOP_MNEMONIC("add Gb,Eb");
5665 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
5666}
5667
5668
5669/** Opcode 0x03. */
5670FNIEMOP_DEF(iemOp_add_Gv_Ev)
5671{
5672 IEMOP_MNEMONIC("add Gv,Ev");
5673 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
5674}
5675
5676
5677/** Opcode 0x04. */
5678FNIEMOP_DEF(iemOp_add_Al_Ib)
5679{
5680 IEMOP_MNEMONIC("add al,Ib");
5681 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
5682}
5683
5684
5685/** Opcode 0x05. */
5686FNIEMOP_DEF(iemOp_add_eAX_Iz)
5687{
5688 IEMOP_MNEMONIC("add rAX,Iz");
5689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
5690}
5691
5692
5693/** Opcode 0x06. */
5694FNIEMOP_DEF(iemOp_push_ES)
5695{
5696 IEMOP_MNEMONIC("push es");
5697 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
5698}
5699
5700
5701/** Opcode 0x07. */
5702FNIEMOP_DEF(iemOp_pop_ES)
5703{
5704 IEMOP_MNEMONIC("pop es");
5705 IEMOP_HLP_NO_64BIT();
5706 IEMOP_HLP_NO_LOCK_PREFIX();
5707 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
5708}
5709
5710
5711/** Opcode 0x08. */
5712FNIEMOP_DEF(iemOp_or_Eb_Gb)
5713{
5714 IEMOP_MNEMONIC("or Eb,Gb");
5715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
5717}
5718
5719
5720/** Opcode 0x09. */
5721FNIEMOP_DEF(iemOp_or_Ev_Gv)
5722{
5723 IEMOP_MNEMONIC("or Ev,Gv ");
5724 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
5726}
5727
5728
5729/** Opcode 0x0a. */
5730FNIEMOP_DEF(iemOp_or_Gb_Eb)
5731{
5732 IEMOP_MNEMONIC("or Gb,Eb");
5733 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5734 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
5735}
5736
5737
5738/** Opcode 0x0b. */
5739FNIEMOP_DEF(iemOp_or_Gv_Ev)
5740{
5741 IEMOP_MNEMONIC("or Gv,Ev");
5742 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5743 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
5744}
5745
5746
5747/** Opcode 0x0c. */
5748FNIEMOP_DEF(iemOp_or_Al_Ib)
5749{
5750 IEMOP_MNEMONIC("or al,Ib");
5751 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5752 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
5753}
5754
5755
5756/** Opcode 0x0d. */
5757FNIEMOP_DEF(iemOp_or_eAX_Iz)
5758{
5759 IEMOP_MNEMONIC("or rAX,Iz");
5760 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5761 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
5762}
5763
5764
5765/** Opcode 0x0e. */
5766FNIEMOP_DEF(iemOp_push_CS)
5767{
5768 IEMOP_MNEMONIC("push cs");
5769 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
5770}
5771
5772
5773/** Opcode 0x0f. */
5774FNIEMOP_DEF(iemOp_2byteEscape)
5775{
5776 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5777 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
5778}
5779
5780/** Opcode 0x10. */
5781FNIEMOP_DEF(iemOp_adc_Eb_Gb)
5782{
5783 IEMOP_MNEMONIC("adc Eb,Gb");
5784 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
5785}
5786
5787
5788/** Opcode 0x11. */
5789FNIEMOP_DEF(iemOp_adc_Ev_Gv)
5790{
5791 IEMOP_MNEMONIC("adc Ev,Gv");
5792 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
5793}
5794
5795
5796/** Opcode 0x12. */
5797FNIEMOP_DEF(iemOp_adc_Gb_Eb)
5798{
5799 IEMOP_MNEMONIC("adc Gb,Eb");
5800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
5801}
5802
5803
5804/** Opcode 0x13. */
5805FNIEMOP_DEF(iemOp_adc_Gv_Ev)
5806{
5807 IEMOP_MNEMONIC("adc Gv,Ev");
5808 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
5809}
5810
5811
5812/** Opcode 0x14. */
5813FNIEMOP_DEF(iemOp_adc_Al_Ib)
5814{
5815 IEMOP_MNEMONIC("adc al,Ib");
5816 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
5817}
5818
5819
5820/** Opcode 0x15. */
5821FNIEMOP_DEF(iemOp_adc_eAX_Iz)
5822{
5823 IEMOP_MNEMONIC("adc rAX,Iz");
5824 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
5825}
5826
5827
5828/** Opcode 0x16. */
5829FNIEMOP_DEF(iemOp_push_SS)
5830{
5831 IEMOP_MNEMONIC("push ss");
5832 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
5833}
5834
5835
5836/** Opcode 0x17. */
5837FNIEMOP_DEF(iemOp_pop_SS)
5838{
5839 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
5840 IEMOP_HLP_NO_LOCK_PREFIX();
5841 IEMOP_HLP_NO_64BIT();
5842 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
5843}
5844
5845
5846/** Opcode 0x18. */
5847FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
5848{
5849 IEMOP_MNEMONIC("sbb Eb,Gb");
5850 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
5851}
5852
5853
5854/** Opcode 0x19. */
5855FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
5856{
5857 IEMOP_MNEMONIC("sbb Ev,Gv");
5858 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
5859}
5860
5861
5862/** Opcode 0x1a. */
5863FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
5864{
5865 IEMOP_MNEMONIC("sbb Gb,Eb");
5866 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
5867}
5868
5869
5870/** Opcode 0x1b. */
5871FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
5872{
5873 IEMOP_MNEMONIC("sbb Gv,Ev");
5874 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
5875}
5876
5877
5878/** Opcode 0x1c. */
5879FNIEMOP_DEF(iemOp_sbb_Al_Ib)
5880{
5881 IEMOP_MNEMONIC("sbb al,Ib");
5882 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
5883}
5884
5885
5886/** Opcode 0x1d. */
5887FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
5888{
5889 IEMOP_MNEMONIC("sbb rAX,Iz");
5890 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
5891}
5892
5893
5894/** Opcode 0x1e. */
5895FNIEMOP_DEF(iemOp_push_DS)
5896{
5897 IEMOP_MNEMONIC("push ds");
5898 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
5899}
5900
5901
5902/** Opcode 0x1f. */
5903FNIEMOP_DEF(iemOp_pop_DS)
5904{
5905 IEMOP_MNEMONIC("pop ds");
5906 IEMOP_HLP_NO_LOCK_PREFIX();
5907 IEMOP_HLP_NO_64BIT();
5908 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
5909}
5910
5911
5912/** Opcode 0x20. */
5913FNIEMOP_DEF(iemOp_and_Eb_Gb)
5914{
5915 IEMOP_MNEMONIC("and Eb,Gb");
5916 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5917 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
5918}
5919
5920
5921/** Opcode 0x21. */
5922FNIEMOP_DEF(iemOp_and_Ev_Gv)
5923{
5924 IEMOP_MNEMONIC("and Ev,Gv");
5925 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5926 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
5927}
5928
5929
5930/** Opcode 0x22. */
5931FNIEMOP_DEF(iemOp_and_Gb_Eb)
5932{
5933 IEMOP_MNEMONIC("and Gb,Eb");
5934 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5935 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
5936}
5937
5938
5939/** Opcode 0x23. */
5940FNIEMOP_DEF(iemOp_and_Gv_Ev)
5941{
5942 IEMOP_MNEMONIC("and Gv,Ev");
5943 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5944 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
5945}
5946
5947
5948/** Opcode 0x24. */
5949FNIEMOP_DEF(iemOp_and_Al_Ib)
5950{
5951 IEMOP_MNEMONIC("and al,Ib");
5952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5953 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
5954}
5955
5956
5957/** Opcode 0x25. */
5958FNIEMOP_DEF(iemOp_and_eAX_Iz)
5959{
5960 IEMOP_MNEMONIC("and rAX,Iz");
5961 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5962 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
5963}
5964
5965
5966/** Opcode 0x26. */
5967FNIEMOP_DEF(iemOp_seg_ES)
5968{
5969 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
5970 pIemCpu->iEffSeg = X86_SREG_ES;
5971
5972 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5973 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5974}
5975
5976
5977/** Opcode 0x27. */
5978FNIEMOP_STUB(iemOp_daa);
5979
5980
5981/** Opcode 0x28. */
5982FNIEMOP_DEF(iemOp_sub_Eb_Gb)
5983{
5984 IEMOP_MNEMONIC("sub Eb,Gb");
5985 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
5986}
5987
5988
5989/** Opcode 0x29. */
5990FNIEMOP_DEF(iemOp_sub_Ev_Gv)
5991{
5992 IEMOP_MNEMONIC("sub Ev,Gv");
5993 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
5994}
5995
5996
5997/** Opcode 0x2a. */
5998FNIEMOP_DEF(iemOp_sub_Gb_Eb)
5999{
6000 IEMOP_MNEMONIC("sub Gb,Eb");
6001 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
6002}
6003
6004
6005/** Opcode 0x2b. */
6006FNIEMOP_DEF(iemOp_sub_Gv_Ev)
6007{
6008 IEMOP_MNEMONIC("sub Gv,Ev");
6009 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
6010}
6011
6012
6013/** Opcode 0x2c. */
6014FNIEMOP_DEF(iemOp_sub_Al_Ib)
6015{
6016 IEMOP_MNEMONIC("sub al,Ib");
6017 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
6018}
6019
6020
6021/** Opcode 0x2d. */
6022FNIEMOP_DEF(iemOp_sub_eAX_Iz)
6023{
6024 IEMOP_MNEMONIC("sub rAX,Iz");
6025 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
6026}
6027
6028
6029/** Opcode 0x2e. */
6030FNIEMOP_DEF(iemOp_seg_CS)
6031{
6032 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
6033 pIemCpu->iEffSeg = X86_SREG_CS;
6034
6035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6036 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6037}
6038
6039
6040/** Opcode 0x2f. */
6041FNIEMOP_STUB(iemOp_das);
6042
6043
6044/** Opcode 0x30. */
6045FNIEMOP_DEF(iemOp_xor_Eb_Gb)
6046{
6047 IEMOP_MNEMONIC("xor Eb,Gb");
6048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6049 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
6050}
6051
6052
6053/** Opcode 0x31. */
6054FNIEMOP_DEF(iemOp_xor_Ev_Gv)
6055{
6056 IEMOP_MNEMONIC("xor Ev,Gv");
6057 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6058 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
6059}
6060
6061
6062/** Opcode 0x32. */
6063FNIEMOP_DEF(iemOp_xor_Gb_Eb)
6064{
6065 IEMOP_MNEMONIC("xor Gb,Eb");
6066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6067 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
6068}
6069
6070
6071/** Opcode 0x33. */
6072FNIEMOP_DEF(iemOp_xor_Gv_Ev)
6073{
6074 IEMOP_MNEMONIC("xor Gv,Ev");
6075 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6076 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
6077}
6078
6079
6080/** Opcode 0x34. */
6081FNIEMOP_DEF(iemOp_xor_Al_Ib)
6082{
6083 IEMOP_MNEMONIC("xor al,Ib");
6084 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6085 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
6086}
6087
6088
6089/** Opcode 0x35. */
6090FNIEMOP_DEF(iemOp_xor_eAX_Iz)
6091{
6092 IEMOP_MNEMONIC("xor rAX,Iz");
6093 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6094 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
6095}
6096
6097
6098/** Opcode 0x36. */
6099FNIEMOP_DEF(iemOp_seg_SS)
6100{
6101 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
6102 pIemCpu->iEffSeg = X86_SREG_SS;
6103
6104 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6105 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6106}
6107
6108
6109/** Opcode 0x37. */
6110FNIEMOP_STUB(iemOp_aaa);
6111
6112
6113/** Opcode 0x38. */
6114FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
6115{
6116 IEMOP_MNEMONIC("cmp Eb,Gb");
6117 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
6118 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
6119}
6120
6121
6122/** Opcode 0x39. */
6123FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
6124{
6125 IEMOP_MNEMONIC("cmp Ev,Gv");
6126 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
6127 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
6128}
6129
6130
6131/** Opcode 0x3a. */
6132FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
6133{
6134 IEMOP_MNEMONIC("cmp Gb,Eb");
6135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
6136}
6137
6138
6139/** Opcode 0x3b. */
6140FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
6141{
6142 IEMOP_MNEMONIC("cmp Gv,Ev");
6143 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
6144}
6145
6146
6147/** Opcode 0x3c. */
6148FNIEMOP_DEF(iemOp_cmp_Al_Ib)
6149{
6150 IEMOP_MNEMONIC("cmp al,Ib");
6151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
6152}
6153
6154
6155/** Opcode 0x3d. */
6156FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
6157{
6158 IEMOP_MNEMONIC("cmp rAX,Iz");
6159 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
6160}
6161
6162
6163/** Opcode 0x3e. */
6164FNIEMOP_DEF(iemOp_seg_DS)
6165{
6166 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
6167 pIemCpu->iEffSeg = X86_SREG_DS;
6168
6169 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6170 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6171}
6172
6173
6174/** Opcode 0x3f. */
6175FNIEMOP_STUB(iemOp_aas);
6176
6177/**
6178 * Common 'inc/dec/not/neg register' helper.
6179 */
6180FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
6181{
6182 IEMOP_HLP_NO_LOCK_PREFIX();
6183 switch (pIemCpu->enmEffOpSize)
6184 {
6185 case IEMMODE_16BIT:
6186 IEM_MC_BEGIN(2, 0);
6187 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6188 IEM_MC_ARG(uint32_t *, pEFlags, 1);
6189 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
6190 IEM_MC_REF_EFLAGS(pEFlags);
6191 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
6192 IEM_MC_ADVANCE_RIP();
6193 IEM_MC_END();
6194 return VINF_SUCCESS;
6195
6196 case IEMMODE_32BIT:
6197 IEM_MC_BEGIN(2, 0);
6198 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6199 IEM_MC_ARG(uint32_t *, pEFlags, 1);
6200 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6201 IEM_MC_REF_EFLAGS(pEFlags);
6202 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
6203 IEM_MC_ADVANCE_RIP();
6204 IEM_MC_END();
6205 return VINF_SUCCESS;
6206
6207 case IEMMODE_64BIT:
6208 IEM_MC_BEGIN(2, 0);
6209 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6210 IEM_MC_ARG(uint32_t *, pEFlags, 1);
6211 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6212 IEM_MC_REF_EFLAGS(pEFlags);
6213 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 return VINF_SUCCESS;
6217 }
6218 return VINF_SUCCESS;
6219}
6220
6221
6222/** Opcode 0x40. */
6223FNIEMOP_DEF(iemOp_inc_eAX)
6224{
6225 /*
6226 * This is a REX prefix in 64-bit mode.
6227 */
6228 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6229 {
6230 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
6231
6232 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6233 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6234 }
6235
6236 IEMOP_MNEMONIC("inc eAX");
6237 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
6238}
6239
6240
6241/** Opcode 0x41. */
6242FNIEMOP_DEF(iemOp_inc_eCX)
6243{
6244 /*
6245 * This is a REX prefix in 64-bit mode.
6246 */
6247 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6248 {
6249 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
6250 pIemCpu->uRexB = 1 << 3;
6251
6252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6254 }
6255
6256 IEMOP_MNEMONIC("inc eCX");
6257 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
6258}
6259
6260
6261/** Opcode 0x42. */
6262FNIEMOP_DEF(iemOp_inc_eDX)
6263{
6264 /*
6265 * This is a REX prefix in 64-bit mode.
6266 */
6267 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6268 {
6269 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
6270 pIemCpu->uRexIndex = 1 << 3;
6271
6272 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6273 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6274 }
6275
6276 IEMOP_MNEMONIC("inc eDX");
6277 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
6278}
6279
6280
6281
6282/** Opcode 0x43. */
6283FNIEMOP_DEF(iemOp_inc_eBX)
6284{
6285 /*
6286 * This is a REX prefix in 64-bit mode.
6287 */
6288 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6289 {
6290 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
6291 pIemCpu->uRexB = 1 << 3;
6292 pIemCpu->uRexIndex = 1 << 3;
6293
6294 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6295 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6296 }
6297
6298 IEMOP_MNEMONIC("inc eBX");
6299 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
6300}
6301
6302
6303/** Opcode 0x44. */
6304FNIEMOP_DEF(iemOp_inc_eSP)
6305{
6306 /*
6307 * This is a REX prefix in 64-bit mode.
6308 */
6309 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6310 {
6311 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
6312 pIemCpu->uRexReg = 1 << 3;
6313
6314 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6315 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6316 }
6317
6318 IEMOP_MNEMONIC("inc eSP");
6319 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
6320}
6321
6322
6323/** Opcode 0x45. */
6324FNIEMOP_DEF(iemOp_inc_eBP)
6325{
6326 /*
6327 * This is a REX prefix in 64-bit mode.
6328 */
6329 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6330 {
6331 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
6332 pIemCpu->uRexReg = 1 << 3;
6333 pIemCpu->uRexB = 1 << 3;
6334
6335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6337 }
6338
6339 IEMOP_MNEMONIC("inc eBP");
6340 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
6341}
6342
6343
6344/** Opcode 0x46. */
6345FNIEMOP_DEF(iemOp_inc_eSI)
6346{
6347 /*
6348 * This is a REX prefix in 64-bit mode.
6349 */
6350 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6351 {
6352 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
6353 pIemCpu->uRexReg = 1 << 3;
6354 pIemCpu->uRexIndex = 1 << 3;
6355
6356 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6357 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6358 }
6359
6360 IEMOP_MNEMONIC("inc eSI");
6361 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
6362}
6363
6364
6365/** Opcode 0x47. */
6366FNIEMOP_DEF(iemOp_inc_eDI)
6367{
6368 /*
6369 * This is a REX prefix in 64-bit mode.
6370 */
6371 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6372 {
6373 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
6374 pIemCpu->uRexReg = 1 << 3;
6375 pIemCpu->uRexB = 1 << 3;
6376 pIemCpu->uRexIndex = 1 << 3;
6377
6378 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6379 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6380 }
6381
6382 IEMOP_MNEMONIC("inc eDI");
6383 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
6384}
6385
6386
6387/** Opcode 0x48. */
6388FNIEMOP_DEF(iemOp_dec_eAX)
6389{
6390 /*
6391 * This is a REX prefix in 64-bit mode.
6392 */
6393 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6394 {
6395 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
6396 iemRecalEffOpSize(pIemCpu);
6397
6398 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6399 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6400 }
6401
6402 IEMOP_MNEMONIC("dec eAX");
6403 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
6404}
6405
6406
6407/** Opcode 0x49. */
6408FNIEMOP_DEF(iemOp_dec_eCX)
6409{
6410 /*
6411 * This is a REX prefix in 64-bit mode.
6412 */
6413 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6414 {
6415 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
6416 pIemCpu->uRexB = 1 << 3;
6417 iemRecalEffOpSize(pIemCpu);
6418
6419 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6420 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6421 }
6422
6423 IEMOP_MNEMONIC("dec eCX");
6424 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
6425}
6426
6427
6428/** Opcode 0x4a. */
6429FNIEMOP_DEF(iemOp_dec_eDX)
6430{
6431 /*
6432 * This is a REX prefix in 64-bit mode.
6433 */
6434 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6435 {
6436 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6437 pIemCpu->uRexIndex = 1 << 3;
6438 iemRecalEffOpSize(pIemCpu);
6439
6440 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6441 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6442 }
6443
6444 IEMOP_MNEMONIC("dec eDX");
6445 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
6446}
6447
6448
6449/** Opcode 0x4b. */
6450FNIEMOP_DEF(iemOp_dec_eBX)
6451{
6452 /*
6453 * This is a REX prefix in 64-bit mode.
6454 */
6455 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6456 {
6457 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6458 pIemCpu->uRexB = 1 << 3;
6459 pIemCpu->uRexIndex = 1 << 3;
6460 iemRecalEffOpSize(pIemCpu);
6461
6462 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6463 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6464 }
6465
6466 IEMOP_MNEMONIC("dec eBX");
6467 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
6468}
6469
6470
6471/** Opcode 0x4c. */
6472FNIEMOP_DEF(iemOp_dec_eSP)
6473{
6474 /*
6475 * This is a REX prefix in 64-bit mode.
6476 */
6477 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6478 {
6479 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
6480 pIemCpu->uRexReg = 1 << 3;
6481 iemRecalEffOpSize(pIemCpu);
6482
6483 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6484 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6485 }
6486
6487 IEMOP_MNEMONIC("dec eSP");
6488 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
6489}
6490
6491
6492/** Opcode 0x4d. */
6493FNIEMOP_DEF(iemOp_dec_eBP)
6494{
6495 /*
6496 * This is a REX prefix in 64-bit mode.
6497 */
6498 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6499 {
6500 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
6501 pIemCpu->uRexReg = 1 << 3;
6502 pIemCpu->uRexB = 1 << 3;
6503 iemRecalEffOpSize(pIemCpu);
6504
6505 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6506 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6507 }
6508
6509 IEMOP_MNEMONIC("dec eBP");
6510 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
6511}
6512
6513
6514/** Opcode 0x4e. */
6515FNIEMOP_DEF(iemOp_dec_eSI)
6516{
6517 /*
6518 * This is a REX prefix in 64-bit mode.
6519 */
6520 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6521 {
6522 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6523 pIemCpu->uRexReg = 1 << 3;
6524 pIemCpu->uRexIndex = 1 << 3;
6525 iemRecalEffOpSize(pIemCpu);
6526
6527 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6528 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6529 }
6530
6531 IEMOP_MNEMONIC("dec eSI");
6532 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
6533}
6534
6535
6536/** Opcode 0x4f. */
6537FNIEMOP_DEF(iemOp_dec_eDI)
6538{
6539 /*
6540 * This is a REX prefix in 64-bit mode.
6541 */
6542 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6543 {
6544 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6545 pIemCpu->uRexReg = 1 << 3;
6546 pIemCpu->uRexB = 1 << 3;
6547 pIemCpu->uRexIndex = 1 << 3;
6548 iemRecalEffOpSize(pIemCpu);
6549
6550 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6551 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6552 }
6553
6554 IEMOP_MNEMONIC("dec eDI");
6555 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
6556}
6557
6558
6559/**
6560 * Common 'push register' helper.
6561 */
6562FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
6563{
6564 IEMOP_HLP_NO_LOCK_PREFIX();
6565 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6566 {
6567 iReg |= pIemCpu->uRexB;
6568 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
6569 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
6570 }
6571
6572 switch (pIemCpu->enmEffOpSize)
6573 {
6574 case IEMMODE_16BIT:
6575 IEM_MC_BEGIN(0, 1);
6576 IEM_MC_LOCAL(uint16_t, u16Value);
6577 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
6578 IEM_MC_PUSH_U16(u16Value);
6579 IEM_MC_ADVANCE_RIP();
6580 IEM_MC_END();
6581 break;
6582
6583 case IEMMODE_32BIT:
6584 IEM_MC_BEGIN(0, 1);
6585 IEM_MC_LOCAL(uint32_t, u32Value);
6586 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
6587 IEM_MC_PUSH_U32(u32Value);
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 break;
6591
6592 case IEMMODE_64BIT:
6593 IEM_MC_BEGIN(0, 1);
6594 IEM_MC_LOCAL(uint64_t, u64Value);
6595 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
6596 IEM_MC_PUSH_U64(u64Value);
6597 IEM_MC_ADVANCE_RIP();
6598 IEM_MC_END();
6599 break;
6600 }
6601
6602 return VINF_SUCCESS;
6603}
6604
6605
6606/** Opcode 0x50. */
6607FNIEMOP_DEF(iemOp_push_eAX)
6608{
6609 IEMOP_MNEMONIC("push rAX");
6610 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
6611}
6612
6613
6614/** Opcode 0x51. */
6615FNIEMOP_DEF(iemOp_push_eCX)
6616{
6617 IEMOP_MNEMONIC("push rCX");
6618 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
6619}
6620
6621
6622/** Opcode 0x52. */
6623FNIEMOP_DEF(iemOp_push_eDX)
6624{
6625 IEMOP_MNEMONIC("push rDX");
6626 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
6627}
6628
6629
6630/** Opcode 0x53. */
6631FNIEMOP_DEF(iemOp_push_eBX)
6632{
6633 IEMOP_MNEMONIC("push rBX");
6634 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
6635}
6636
6637
6638/** Opcode 0x54. */
6639FNIEMOP_DEF(iemOp_push_eSP)
6640{
6641 IEMOP_MNEMONIC("push rSP");
6642 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
6643}
6644
6645
6646/** Opcode 0x55. */
6647FNIEMOP_DEF(iemOp_push_eBP)
6648{
6649 IEMOP_MNEMONIC("push rBP");
6650 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
6651}
6652
6653
6654/** Opcode 0x56. */
6655FNIEMOP_DEF(iemOp_push_eSI)
6656{
6657 IEMOP_MNEMONIC("push rSI");
6658 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
6659}
6660
6661
6662/** Opcode 0x57. */
6663FNIEMOP_DEF(iemOp_push_eDI)
6664{
6665 IEMOP_MNEMONIC("push rDI");
6666 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
6667}
6668
6669
6670/**
6671 * Common 'pop register' helper.
6672 */
6673FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
6674{
6675 IEMOP_HLP_NO_LOCK_PREFIX();
6676 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6677 {
6678 iReg |= pIemCpu->uRexB;
6679 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
6680 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
6681 }
6682
6683/** @todo How does this code handle iReg==X86_GREG_xSP. How does a real CPU
6684 * handle it, for that matter (Intel pseudo code hints that the popped
6685 * value is incremented by the stack item size.) Test it, both encodings
6686 * and all three register sizes. */
6687 switch (pIemCpu->enmEffOpSize)
6688 {
6689 case IEMMODE_16BIT:
6690 IEM_MC_BEGIN(0, 1);
6691 IEM_MC_LOCAL(uint16_t, *pu16Dst);
6692 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
6693 IEM_MC_POP_U16(pu16Dst);
6694 IEM_MC_ADVANCE_RIP();
6695 IEM_MC_END();
6696 break;
6697
6698 case IEMMODE_32BIT:
6699 IEM_MC_BEGIN(0, 1);
6700 IEM_MC_LOCAL(uint32_t, *pu32Dst);
6701 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6702 IEM_MC_POP_U32(pu32Dst);
6703 IEM_MC_ADVANCE_RIP();
6704 IEM_MC_END();
6705 break;
6706
6707 case IEMMODE_64BIT:
6708 IEM_MC_BEGIN(0, 1);
6709 IEM_MC_LOCAL(uint64_t, *pu64Dst);
6710 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6711 IEM_MC_POP_U64(pu64Dst);
6712 IEM_MC_ADVANCE_RIP();
6713 IEM_MC_END();
6714 break;
6715 }
6716
6717 return VINF_SUCCESS;
6718}
6719
6720
6721/** Opcode 0x58. */
6722FNIEMOP_DEF(iemOp_pop_eAX)
6723{
6724 IEMOP_MNEMONIC("pop rAX");
6725 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
6726}
6727
6728
6729/** Opcode 0x59. */
6730FNIEMOP_DEF(iemOp_pop_eCX)
6731{
6732 IEMOP_MNEMONIC("pop rCX");
6733 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
6734}
6735
6736
6737/** Opcode 0x5a. */
6738FNIEMOP_DEF(iemOp_pop_eDX)
6739{
6740 IEMOP_MNEMONIC("pop rDX");
6741 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
6742}
6743
6744
6745/** Opcode 0x5b. */
6746FNIEMOP_DEF(iemOp_pop_eBX)
6747{
6748 IEMOP_MNEMONIC("pop rBX");
6749 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
6750}
6751
6752
6753/** Opcode 0x5c. */
6754FNIEMOP_DEF(iemOp_pop_eSP)
6755{
6756 IEMOP_MNEMONIC("pop rSP");
6757 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
6758}
6759
6760
6761/** Opcode 0x5d. */
6762FNIEMOP_DEF(iemOp_pop_eBP)
6763{
6764 IEMOP_MNEMONIC("pop rBP");
6765 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
6766}
6767
6768
6769/** Opcode 0x5e. */
6770FNIEMOP_DEF(iemOp_pop_eSI)
6771{
6772 IEMOP_MNEMONIC("pop rSI");
6773 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
6774}
6775
6776
6777/** Opcode 0x5f. */
6778FNIEMOP_DEF(iemOp_pop_eDI)
6779{
6780 IEMOP_MNEMONIC("pop rDI");
6781 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
6782}
6783
6784
6785/** Opcode 0x60. */
6786FNIEMOP_DEF(iemOp_pusha)
6787{
6788 IEMOP_MNEMONIC("pusha");
6789 IEMOP_HLP_NO_64BIT();
6790 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
6791 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
6792 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
6793 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
6794}
6795
6796
6797/** Opcode 0x61. */
6798FNIEMOP_DEF(iemOp_popa)
6799{
6800 IEMOP_MNEMONIC("popa");
6801 IEMOP_HLP_NO_64BIT();
6802 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
6803 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
6804 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
6805 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
6806}
6807
6808
6809/** Opcode 0x62. */
6810FNIEMOP_STUB(iemOp_bound_Gv_Ma);
6811
6812/** Opcode 0x63 - non-64-bit modes. */
6813FNIEMOP_STUB(iemOp_arpl_Ew_Gw);
6814
6815
6816/** Opcode 0x63.
6817 * @note This is a weird one. It works like a regular move instruction if
6818 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
6819 * @todo This definitely needs a testcase to verify the odd cases. */
6820FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
6821{
6822 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
6823
6824 IEMOP_MNEMONIC("movsxd Gv,Ev");
6825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6826
6827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6828 {
6829 /*
6830 * Register to register.
6831 */
6832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6833 IEM_MC_BEGIN(0, 1);
6834 IEM_MC_LOCAL(uint64_t, u64Value);
6835 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6836 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6837 IEM_MC_ADVANCE_RIP();
6838 IEM_MC_END();
6839 }
6840 else
6841 {
6842 /*
6843 * We're loading a register from memory.
6844 */
6845 IEM_MC_BEGIN(0, 2);
6846 IEM_MC_LOCAL(uint64_t, u64Value);
6847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6850 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6851 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6852 IEM_MC_ADVANCE_RIP();
6853 IEM_MC_END();
6854 }
6855 return VINF_SUCCESS;
6856}
6857
6858
6859/** Opcode 0x64. */
6860FNIEMOP_DEF(iemOp_seg_FS)
6861{
6862 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
6863 pIemCpu->iEffSeg = X86_SREG_FS;
6864
6865 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6866 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6867}
6868
6869
6870/** Opcode 0x65. */
6871FNIEMOP_DEF(iemOp_seg_GS)
6872{
6873 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
6874 pIemCpu->iEffSeg = X86_SREG_GS;
6875
6876 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6877 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6878}
6879
6880
6881/** Opcode 0x66. */
6882FNIEMOP_DEF(iemOp_op_size)
6883{
6884 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
6885 iemRecalEffOpSize(pIemCpu);
6886
6887 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6888 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6889}
6890
6891
6892/** Opcode 0x67. */
6893FNIEMOP_DEF(iemOp_addr_size)
6894{
6895 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
6896 switch (pIemCpu->enmDefAddrMode)
6897 {
6898 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
6899 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
6900 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
6901 default: AssertFailed();
6902 }
6903
6904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6906}
6907
6908
6909/** Opcode 0x68. */
6910FNIEMOP_DEF(iemOp_push_Iz)
6911{
6912 IEMOP_MNEMONIC("push Iz");
6913 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6914 switch (pIemCpu->enmEffOpSize)
6915 {
6916 case IEMMODE_16BIT:
6917 {
6918 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6919 IEMOP_HLP_NO_LOCK_PREFIX();
6920 IEM_MC_BEGIN(0,0);
6921 IEM_MC_PUSH_U16(u16Imm);
6922 IEM_MC_ADVANCE_RIP();
6923 IEM_MC_END();
6924 return VINF_SUCCESS;
6925 }
6926
6927 case IEMMODE_32BIT:
6928 {
6929 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6930 IEMOP_HLP_NO_LOCK_PREFIX();
6931 IEM_MC_BEGIN(0,0);
6932 IEM_MC_PUSH_U32(u32Imm);
6933 IEM_MC_ADVANCE_RIP();
6934 IEM_MC_END();
6935 return VINF_SUCCESS;
6936 }
6937
6938 case IEMMODE_64BIT:
6939 {
6940 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6941 IEMOP_HLP_NO_LOCK_PREFIX();
6942 IEM_MC_BEGIN(0,0);
6943 IEM_MC_PUSH_U64(u64Imm);
6944 IEM_MC_ADVANCE_RIP();
6945 IEM_MC_END();
6946 return VINF_SUCCESS;
6947 }
6948
6949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6950 }
6951}
6952
6953
6954/** Opcode 0x69. */
6955FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
6956{
6957 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
6958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6959 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6960
6961 switch (pIemCpu->enmEffOpSize)
6962 {
6963 case IEMMODE_16BIT:
6964 {
6965 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6966 {
6967 /* register operand */
6968 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6970
6971 IEM_MC_BEGIN(3, 1);
6972 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6973 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
6974 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6975 IEM_MC_LOCAL(uint16_t, u16Tmp);
6976
6977 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6978 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6979 IEM_MC_REF_EFLAGS(pEFlags);
6980 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6981 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6982
6983 IEM_MC_ADVANCE_RIP();
6984 IEM_MC_END();
6985 }
6986 else
6987 {
6988 /* memory operand */
6989 IEM_MC_BEGIN(3, 2);
6990 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6991 IEM_MC_ARG(uint16_t, u16Src, 1);
6992 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6993 IEM_MC_LOCAL(uint16_t, u16Tmp);
6994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6995
6996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6997 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6998 IEM_MC_ASSIGN(u16Src, u16Imm);
6999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7000 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7001 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
7002 IEM_MC_REF_EFLAGS(pEFlags);
7003 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
7004 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
7005
7006 IEM_MC_ADVANCE_RIP();
7007 IEM_MC_END();
7008 }
7009 return VINF_SUCCESS;
7010 }
7011
7012 case IEMMODE_32BIT:
7013 {
7014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7015 {
7016 /* register operand */
7017 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7019
7020 IEM_MC_BEGIN(3, 1);
7021 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7022 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
7023 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7024 IEM_MC_LOCAL(uint32_t, u32Tmp);
7025
7026 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7027 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7028 IEM_MC_REF_EFLAGS(pEFlags);
7029 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7030 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7031
7032 IEM_MC_ADVANCE_RIP();
7033 IEM_MC_END();
7034 }
7035 else
7036 {
7037 /* memory operand */
7038 IEM_MC_BEGIN(3, 2);
7039 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7040 IEM_MC_ARG(uint32_t, u32Src, 1);
7041 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7042 IEM_MC_LOCAL(uint32_t, u32Tmp);
7043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7044
7045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7046 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7047 IEM_MC_ASSIGN(u32Src, u32Imm);
7048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7049 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7050 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7051 IEM_MC_REF_EFLAGS(pEFlags);
7052 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7053 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7054
7055 IEM_MC_ADVANCE_RIP();
7056 IEM_MC_END();
7057 }
7058 return VINF_SUCCESS;
7059 }
7060
7061 case IEMMODE_64BIT:
7062 {
7063 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7064 {
7065 /* register operand */
7066 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7068
7069 IEM_MC_BEGIN(3, 1);
7070 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7071 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
7072 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7073 IEM_MC_LOCAL(uint64_t, u64Tmp);
7074
7075 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7076 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7077 IEM_MC_REF_EFLAGS(pEFlags);
7078 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7079 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7080
7081 IEM_MC_ADVANCE_RIP();
7082 IEM_MC_END();
7083 }
7084 else
7085 {
7086 /* memory operand */
7087 IEM_MC_BEGIN(3, 2);
7088 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7089 IEM_MC_ARG(uint64_t, u64Src, 1);
7090 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7091 IEM_MC_LOCAL(uint64_t, u64Tmp);
7092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7093
7094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7095 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7096 IEM_MC_ASSIGN(u64Src, u64Imm);
7097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7098 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7099 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7100 IEM_MC_REF_EFLAGS(pEFlags);
7101 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7102 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7103
7104 IEM_MC_ADVANCE_RIP();
7105 IEM_MC_END();
7106 }
7107 return VINF_SUCCESS;
7108 }
7109 }
7110 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
7111}
7112
7113
7114/** Opcode 0x6a. */
7115FNIEMOP_DEF(iemOp_push_Ib)
7116{
7117 IEMOP_MNEMONIC("push Ib");
7118 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7119 IEMOP_HLP_NO_LOCK_PREFIX();
7120 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7121
7122 IEM_MC_BEGIN(0,0);
7123 switch (pIemCpu->enmEffOpSize)
7124 {
7125 case IEMMODE_16BIT:
7126 IEM_MC_PUSH_U16(i8Imm);
7127 break;
7128 case IEMMODE_32BIT:
7129 IEM_MC_PUSH_U32(i8Imm);
7130 break;
7131 case IEMMODE_64BIT:
7132 IEM_MC_PUSH_U64(i8Imm);
7133 break;
7134 }
7135 IEM_MC_ADVANCE_RIP();
7136 IEM_MC_END();
7137 return VINF_SUCCESS;
7138}
7139
7140
7141/** Opcode 0x6b. */
7142FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
7143{
7144 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
7145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7147
7148 switch (pIemCpu->enmEffOpSize)
7149 {
7150 case IEMMODE_16BIT:
7151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7152 {
7153 /* register operand */
7154 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7156
7157 IEM_MC_BEGIN(3, 1);
7158 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7159 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
7160 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7161 IEM_MC_LOCAL(uint16_t, u16Tmp);
7162
7163 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7164 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
7165 IEM_MC_REF_EFLAGS(pEFlags);
7166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
7167 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
7168
7169 IEM_MC_ADVANCE_RIP();
7170 IEM_MC_END();
7171 }
7172 else
7173 {
7174 /* memory operand */
7175 IEM_MC_BEGIN(3, 2);
7176 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7177 IEM_MC_ARG(uint16_t, u16Src, 1);
7178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7179 IEM_MC_LOCAL(uint16_t, u16Tmp);
7180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7181
7182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7183 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
7184 IEM_MC_ASSIGN(u16Src, u16Imm);
7185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7186 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7187 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
7188 IEM_MC_REF_EFLAGS(pEFlags);
7189 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
7190 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
7191
7192 IEM_MC_ADVANCE_RIP();
7193 IEM_MC_END();
7194 }
7195 return VINF_SUCCESS;
7196
7197 case IEMMODE_32BIT:
7198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7199 {
7200 /* register operand */
7201 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7203
7204 IEM_MC_BEGIN(3, 1);
7205 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7206 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
7207 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7208 IEM_MC_LOCAL(uint32_t, u32Tmp);
7209
7210 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7211 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7212 IEM_MC_REF_EFLAGS(pEFlags);
7213 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7214 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7215
7216 IEM_MC_ADVANCE_RIP();
7217 IEM_MC_END();
7218 }
7219 else
7220 {
7221 /* memory operand */
7222 IEM_MC_BEGIN(3, 2);
7223 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7224 IEM_MC_ARG(uint32_t, u32Src, 1);
7225 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7226 IEM_MC_LOCAL(uint32_t, u32Tmp);
7227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7228
7229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7230 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
7231 IEM_MC_ASSIGN(u32Src, u32Imm);
7232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7233 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7234 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7235 IEM_MC_REF_EFLAGS(pEFlags);
7236 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7237 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7238
7239 IEM_MC_ADVANCE_RIP();
7240 IEM_MC_END();
7241 }
7242 return VINF_SUCCESS;
7243
7244 case IEMMODE_64BIT:
7245 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7246 {
7247 /* register operand */
7248 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7250
7251 IEM_MC_BEGIN(3, 1);
7252 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7253 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
7254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7255 IEM_MC_LOCAL(uint64_t, u64Tmp);
7256
7257 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7258 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7259 IEM_MC_REF_EFLAGS(pEFlags);
7260 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7261 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7262
7263 IEM_MC_ADVANCE_RIP();
7264 IEM_MC_END();
7265 }
7266 else
7267 {
7268 /* memory operand */
7269 IEM_MC_BEGIN(3, 2);
7270 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7271 IEM_MC_ARG(uint64_t, u64Src, 1);
7272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7273 IEM_MC_LOCAL(uint64_t, u64Tmp);
7274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7275
7276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7277 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
7278 IEM_MC_ASSIGN(u64Src, u64Imm);
7279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7280 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7281 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7282 IEM_MC_REF_EFLAGS(pEFlags);
7283 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7284 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7285
7286 IEM_MC_ADVANCE_RIP();
7287 IEM_MC_END();
7288 }
7289 return VINF_SUCCESS;
7290 }
7291 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
7292}
7293
7294
7295/** Opcode 0x6c. */
7296FNIEMOP_DEF(iemOp_insb_Yb_DX)
7297{
7298 IEMOP_HLP_NO_LOCK_PREFIX();
7299 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7300 {
7301 IEMOP_MNEMONIC("rep ins Yb,DX");
7302 switch (pIemCpu->enmEffAddrMode)
7303 {
7304 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr16);
7305 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr32);
7306 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr64);
7307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7308 }
7309 }
7310 else
7311 {
7312 IEMOP_MNEMONIC("ins Yb,DX");
7313 switch (pIemCpu->enmEffAddrMode)
7314 {
7315 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr16);
7316 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr32);
7317 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr64);
7318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7319 }
7320 }
7321}
7322
7323
7324/** Opcode 0x6d. */
7325FNIEMOP_DEF(iemOp_inswd_Yv_DX)
7326{
7327 IEMOP_HLP_NO_LOCK_PREFIX();
7328 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
7329 {
7330 IEMOP_MNEMONIC("rep ins Yv,DX");
7331 switch (pIemCpu->enmEffOpSize)
7332 {
7333 case IEMMODE_16BIT:
7334 switch (pIemCpu->enmEffAddrMode)
7335 {
7336 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr16);
7337 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr32);
7338 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr64);
7339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7340 }
7341 break;
7342 case IEMMODE_64BIT:
7343 case IEMMODE_32BIT:
7344 switch (pIemCpu->enmEffAddrMode)
7345 {
7346 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr16);
7347 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr32);
7348 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr64);
7349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7350 }
7351 break;
7352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7353 }
7354 }
7355 else
7356 {
7357 IEMOP_MNEMONIC("ins Yv,DX");
7358 switch (pIemCpu->enmEffOpSize)
7359 {
7360 case IEMMODE_16BIT:
7361 switch (pIemCpu->enmEffAddrMode)
7362 {
7363 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr16);
7364 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr32);
7365 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr64);
7366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7367 }
7368 break;
7369 case IEMMODE_64BIT:
7370 case IEMMODE_32BIT:
7371 switch (pIemCpu->enmEffAddrMode)
7372 {
7373 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr16);
7374 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr32);
7375 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr64);
7376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7377 }
7378 break;
7379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7380 }
7381 }
7382}
7383
7384
7385/** Opcode 0x6e. */
7386FNIEMOP_DEF(iemOp_outsb_Yb_DX)
7387{
7388 IEMOP_HLP_NO_LOCK_PREFIX();
7389 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7390 {
7391 IEMOP_MNEMONIC("rep out DX,Yb");
7392 switch (pIemCpu->enmEffAddrMode)
7393 {
7394 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg);
7395 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg);
7396 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg);
7397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7398 }
7399 }
7400 else
7401 {
7402 IEMOP_MNEMONIC("out DX,Yb");
7403 switch (pIemCpu->enmEffAddrMode)
7404 {
7405 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg);
7406 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg);
7407 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg);
7408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7409 }
7410 }
7411}
7412
7413
7414/** Opcode 0x6f. */
7415FNIEMOP_DEF(iemOp_outswd_Yv_DX)
7416{
7417 IEMOP_HLP_NO_LOCK_PREFIX();
7418 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
7419 {
7420 IEMOP_MNEMONIC("rep outs DX,Yv");
7421 switch (pIemCpu->enmEffOpSize)
7422 {
7423 case IEMMODE_16BIT:
7424 switch (pIemCpu->enmEffAddrMode)
7425 {
7426 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg);
7427 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg);
7428 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg);
7429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7430 }
7431 break;
7432 case IEMMODE_64BIT:
7433 case IEMMODE_32BIT:
7434 switch (pIemCpu->enmEffAddrMode)
7435 {
7436 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg);
7437 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg);
7438 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg);
7439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7440 }
7441 break;
7442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7443 }
7444 }
7445 else
7446 {
7447 IEMOP_MNEMONIC("outs DX,Yv");
7448 switch (pIemCpu->enmEffOpSize)
7449 {
7450 case IEMMODE_16BIT:
7451 switch (pIemCpu->enmEffAddrMode)
7452 {
7453 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg);
7454 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg);
7455 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg);
7456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7457 }
7458 break;
7459 case IEMMODE_64BIT:
7460 case IEMMODE_32BIT:
7461 switch (pIemCpu->enmEffAddrMode)
7462 {
7463 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg);
7464 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg);
7465 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg);
7466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7467 }
7468 break;
7469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7470 }
7471 }
7472}
7473
7474
7475/** Opcode 0x70. */
7476FNIEMOP_DEF(iemOp_jo_Jb)
7477{
7478 IEMOP_MNEMONIC("jo Jb");
7479 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7480 IEMOP_HLP_NO_LOCK_PREFIX();
7481 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7482
7483 IEM_MC_BEGIN(0, 0);
7484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7485 IEM_MC_REL_JMP_S8(i8Imm);
7486 } IEM_MC_ELSE() {
7487 IEM_MC_ADVANCE_RIP();
7488 } IEM_MC_ENDIF();
7489 IEM_MC_END();
7490 return VINF_SUCCESS;
7491}
7492
7493
7494/** Opcode 0x71. */
7495FNIEMOP_DEF(iemOp_jno_Jb)
7496{
7497 IEMOP_MNEMONIC("jno Jb");
7498 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7499 IEMOP_HLP_NO_LOCK_PREFIX();
7500 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7501
7502 IEM_MC_BEGIN(0, 0);
7503 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7504 IEM_MC_ADVANCE_RIP();
7505 } IEM_MC_ELSE() {
7506 IEM_MC_REL_JMP_S8(i8Imm);
7507 } IEM_MC_ENDIF();
7508 IEM_MC_END();
7509 return VINF_SUCCESS;
7510}
7511
7512/** Opcode 0x72. */
7513FNIEMOP_DEF(iemOp_jc_Jb)
7514{
7515 IEMOP_MNEMONIC("jc/jnae Jb");
7516 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7517 IEMOP_HLP_NO_LOCK_PREFIX();
7518 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7519
7520 IEM_MC_BEGIN(0, 0);
7521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7522 IEM_MC_REL_JMP_S8(i8Imm);
7523 } IEM_MC_ELSE() {
7524 IEM_MC_ADVANCE_RIP();
7525 } IEM_MC_ENDIF();
7526 IEM_MC_END();
7527 return VINF_SUCCESS;
7528}
7529
7530
7531/** Opcode 0x73. */
7532FNIEMOP_DEF(iemOp_jnc_Jb)
7533{
7534 IEMOP_MNEMONIC("jnc/jnb Jb");
7535 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7536 IEMOP_HLP_NO_LOCK_PREFIX();
7537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7538
7539 IEM_MC_BEGIN(0, 0);
7540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7541 IEM_MC_ADVANCE_RIP();
7542 } IEM_MC_ELSE() {
7543 IEM_MC_REL_JMP_S8(i8Imm);
7544 } IEM_MC_ENDIF();
7545 IEM_MC_END();
7546 return VINF_SUCCESS;
7547}
7548
7549
7550/** Opcode 0x74. */
7551FNIEMOP_DEF(iemOp_je_Jb)
7552{
7553 IEMOP_MNEMONIC("je/jz Jb");
7554 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7555 IEMOP_HLP_NO_LOCK_PREFIX();
7556 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7557
7558 IEM_MC_BEGIN(0, 0);
7559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7560 IEM_MC_REL_JMP_S8(i8Imm);
7561 } IEM_MC_ELSE() {
7562 IEM_MC_ADVANCE_RIP();
7563 } IEM_MC_ENDIF();
7564 IEM_MC_END();
7565 return VINF_SUCCESS;
7566}
7567
7568
7569/** Opcode 0x75. */
7570FNIEMOP_DEF(iemOp_jne_Jb)
7571{
7572 IEMOP_MNEMONIC("jne/jnz Jb");
7573 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7574 IEMOP_HLP_NO_LOCK_PREFIX();
7575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7576
7577 IEM_MC_BEGIN(0, 0);
7578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7579 IEM_MC_ADVANCE_RIP();
7580 } IEM_MC_ELSE() {
7581 IEM_MC_REL_JMP_S8(i8Imm);
7582 } IEM_MC_ENDIF();
7583 IEM_MC_END();
7584 return VINF_SUCCESS;
7585}
7586
7587
7588/** Opcode 0x76. */
7589FNIEMOP_DEF(iemOp_jbe_Jb)
7590{
7591 IEMOP_MNEMONIC("jbe/jna Jb");
7592 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7593 IEMOP_HLP_NO_LOCK_PREFIX();
7594 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7595
7596 IEM_MC_BEGIN(0, 0);
7597 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7598 IEM_MC_REL_JMP_S8(i8Imm);
7599 } IEM_MC_ELSE() {
7600 IEM_MC_ADVANCE_RIP();
7601 } IEM_MC_ENDIF();
7602 IEM_MC_END();
7603 return VINF_SUCCESS;
7604}
7605
7606
7607/** Opcode 0x77. */
7608FNIEMOP_DEF(iemOp_jnbe_Jb)
7609{
7610 IEMOP_MNEMONIC("jnbe/ja Jb");
7611 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7612 IEMOP_HLP_NO_LOCK_PREFIX();
7613 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7614
7615 IEM_MC_BEGIN(0, 0);
7616 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7617 IEM_MC_ADVANCE_RIP();
7618 } IEM_MC_ELSE() {
7619 IEM_MC_REL_JMP_S8(i8Imm);
7620 } IEM_MC_ENDIF();
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623}
7624
7625
7626/** Opcode 0x78. */
7627FNIEMOP_DEF(iemOp_js_Jb)
7628{
7629 IEMOP_MNEMONIC("js Jb");
7630 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7631 IEMOP_HLP_NO_LOCK_PREFIX();
7632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7633
7634 IEM_MC_BEGIN(0, 0);
7635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7636 IEM_MC_REL_JMP_S8(i8Imm);
7637 } IEM_MC_ELSE() {
7638 IEM_MC_ADVANCE_RIP();
7639 } IEM_MC_ENDIF();
7640 IEM_MC_END();
7641 return VINF_SUCCESS;
7642}
7643
7644
7645/** Opcode 0x79. */
7646FNIEMOP_DEF(iemOp_jns_Jb)
7647{
7648 IEMOP_MNEMONIC("jns Jb");
7649 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7650 IEMOP_HLP_NO_LOCK_PREFIX();
7651 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7652
7653 IEM_MC_BEGIN(0, 0);
7654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7655 IEM_MC_ADVANCE_RIP();
7656 } IEM_MC_ELSE() {
7657 IEM_MC_REL_JMP_S8(i8Imm);
7658 } IEM_MC_ENDIF();
7659 IEM_MC_END();
7660 return VINF_SUCCESS;
7661}
7662
7663
7664/** Opcode 0x7a. */
7665FNIEMOP_DEF(iemOp_jp_Jb)
7666{
7667 IEMOP_MNEMONIC("jp Jb");
7668 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7669 IEMOP_HLP_NO_LOCK_PREFIX();
7670 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7671
7672 IEM_MC_BEGIN(0, 0);
7673 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7674 IEM_MC_REL_JMP_S8(i8Imm);
7675 } IEM_MC_ELSE() {
7676 IEM_MC_ADVANCE_RIP();
7677 } IEM_MC_ENDIF();
7678 IEM_MC_END();
7679 return VINF_SUCCESS;
7680}
7681
7682
7683/** Opcode 0x7b. */
7684FNIEMOP_DEF(iemOp_jnp_Jb)
7685{
7686 IEMOP_MNEMONIC("jnp Jb");
7687 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7688 IEMOP_HLP_NO_LOCK_PREFIX();
7689 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7690
7691 IEM_MC_BEGIN(0, 0);
7692 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7693 IEM_MC_ADVANCE_RIP();
7694 } IEM_MC_ELSE() {
7695 IEM_MC_REL_JMP_S8(i8Imm);
7696 } IEM_MC_ENDIF();
7697 IEM_MC_END();
7698 return VINF_SUCCESS;
7699}
7700
7701
7702/** Opcode 0x7c. */
7703FNIEMOP_DEF(iemOp_jl_Jb)
7704{
7705 IEMOP_MNEMONIC("jl/jnge Jb");
7706 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7707 IEMOP_HLP_NO_LOCK_PREFIX();
7708 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7709
7710 IEM_MC_BEGIN(0, 0);
7711 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7712 IEM_MC_REL_JMP_S8(i8Imm);
7713 } IEM_MC_ELSE() {
7714 IEM_MC_ADVANCE_RIP();
7715 } IEM_MC_ENDIF();
7716 IEM_MC_END();
7717 return VINF_SUCCESS;
7718}
7719
7720
7721/** Opcode 0x7d. */
7722FNIEMOP_DEF(iemOp_jnl_Jb)
7723{
7724 IEMOP_MNEMONIC("jnl/jge Jb");
7725 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7726 IEMOP_HLP_NO_LOCK_PREFIX();
7727 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7728
7729 IEM_MC_BEGIN(0, 0);
7730 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7731 IEM_MC_ADVANCE_RIP();
7732 } IEM_MC_ELSE() {
7733 IEM_MC_REL_JMP_S8(i8Imm);
7734 } IEM_MC_ENDIF();
7735 IEM_MC_END();
7736 return VINF_SUCCESS;
7737}
7738
7739
7740/** Opcode 0x7e. */
7741FNIEMOP_DEF(iemOp_jle_Jb)
7742{
7743 IEMOP_MNEMONIC("jle/jng Jb");
7744 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7745 IEMOP_HLP_NO_LOCK_PREFIX();
7746 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7747
7748 IEM_MC_BEGIN(0, 0);
7749 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7750 IEM_MC_REL_JMP_S8(i8Imm);
7751 } IEM_MC_ELSE() {
7752 IEM_MC_ADVANCE_RIP();
7753 } IEM_MC_ENDIF();
7754 IEM_MC_END();
7755 return VINF_SUCCESS;
7756}
7757
7758
7759/** Opcode 0x7f. */
7760FNIEMOP_DEF(iemOp_jnle_Jb)
7761{
7762 IEMOP_MNEMONIC("jnle/jg Jb");
7763 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7764 IEMOP_HLP_NO_LOCK_PREFIX();
7765 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7766
7767 IEM_MC_BEGIN(0, 0);
7768 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7769 IEM_MC_ADVANCE_RIP();
7770 } IEM_MC_ELSE() {
7771 IEM_MC_REL_JMP_S8(i8Imm);
7772 } IEM_MC_ENDIF();
7773 IEM_MC_END();
7774 return VINF_SUCCESS;
7775}
7776
7777
7778/** Opcode 0x80. */
7779FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
7780{
7781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7782 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
7783 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7784
7785 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7786 {
7787 /* register target */
7788 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7789 IEMOP_HLP_NO_LOCK_PREFIX();
7790 IEM_MC_BEGIN(3, 0);
7791 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7792 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
7793 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7794
7795 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7796 IEM_MC_REF_EFLAGS(pEFlags);
7797 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
7798
7799 IEM_MC_ADVANCE_RIP();
7800 IEM_MC_END();
7801 }
7802 else
7803 {
7804 /* memory target */
7805 uint32_t fAccess;
7806 if (pImpl->pfnLockedU8)
7807 fAccess = IEM_ACCESS_DATA_RW;
7808 else
7809 { /* CMP */
7810 IEMOP_HLP_NO_LOCK_PREFIX();
7811 fAccess = IEM_ACCESS_DATA_R;
7812 }
7813 IEM_MC_BEGIN(3, 2);
7814 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7815 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7817
7818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7819 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7820 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
7821
7822 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7823 IEM_MC_FETCH_EFLAGS(EFlags);
7824 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7825 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
7826 else
7827 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
7828
7829 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
7830 IEM_MC_COMMIT_EFLAGS(EFlags);
7831 IEM_MC_ADVANCE_RIP();
7832 IEM_MC_END();
7833 }
7834 return VINF_SUCCESS;
7835}
7836
7837
7838/** Opcode 0x81. */
7839FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
7840{
7841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7842 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
7843 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7844
7845 switch (pIemCpu->enmEffOpSize)
7846 {
7847 case IEMMODE_16BIT:
7848 {
7849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7850 {
7851 /* register target */
7852 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7853 IEMOP_HLP_NO_LOCK_PREFIX();
7854 IEM_MC_BEGIN(3, 0);
7855 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7856 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
7857 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7858
7859 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7860 IEM_MC_REF_EFLAGS(pEFlags);
7861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7862
7863 IEM_MC_ADVANCE_RIP();
7864 IEM_MC_END();
7865 }
7866 else
7867 {
7868 /* memory target */
7869 uint32_t fAccess;
7870 if (pImpl->pfnLockedU16)
7871 fAccess = IEM_ACCESS_DATA_RW;
7872 else
7873 { /* CMP, TEST */
7874 IEMOP_HLP_NO_LOCK_PREFIX();
7875 fAccess = IEM_ACCESS_DATA_R;
7876 }
7877 IEM_MC_BEGIN(3, 2);
7878 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7879 IEM_MC_ARG(uint16_t, u16Src, 1);
7880 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7882
7883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7884 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7885 IEM_MC_ASSIGN(u16Src, u16Imm);
7886 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7887 IEM_MC_FETCH_EFLAGS(EFlags);
7888 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7890 else
7891 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7892
7893 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7894 IEM_MC_COMMIT_EFLAGS(EFlags);
7895 IEM_MC_ADVANCE_RIP();
7896 IEM_MC_END();
7897 }
7898 break;
7899 }
7900
7901 case IEMMODE_32BIT:
7902 {
7903 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7904 {
7905 /* register target */
7906 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7907 IEMOP_HLP_NO_LOCK_PREFIX();
7908 IEM_MC_BEGIN(3, 0);
7909 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7910 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
7911 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7912
7913 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7914 IEM_MC_REF_EFLAGS(pEFlags);
7915 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7916
7917 IEM_MC_ADVANCE_RIP();
7918 IEM_MC_END();
7919 }
7920 else
7921 {
7922 /* memory target */
7923 uint32_t fAccess;
7924 if (pImpl->pfnLockedU32)
7925 fAccess = IEM_ACCESS_DATA_RW;
7926 else
7927 { /* CMP, TEST */
7928 IEMOP_HLP_NO_LOCK_PREFIX();
7929 fAccess = IEM_ACCESS_DATA_R;
7930 }
7931 IEM_MC_BEGIN(3, 2);
7932 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7933 IEM_MC_ARG(uint32_t, u32Src, 1);
7934 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7936
7937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7938 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7939 IEM_MC_ASSIGN(u32Src, u32Imm);
7940 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7941 IEM_MC_FETCH_EFLAGS(EFlags);
7942 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7943 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7944 else
7945 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7946
7947 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7948 IEM_MC_COMMIT_EFLAGS(EFlags);
7949 IEM_MC_ADVANCE_RIP();
7950 IEM_MC_END();
7951 }
7952 break;
7953 }
7954
7955 case IEMMODE_64BIT:
7956 {
7957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7958 {
7959 /* register target */
7960 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7961 IEMOP_HLP_NO_LOCK_PREFIX();
7962 IEM_MC_BEGIN(3, 0);
7963 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7964 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
7965 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7966
7967 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7968 IEM_MC_REF_EFLAGS(pEFlags);
7969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7970
7971 IEM_MC_ADVANCE_RIP();
7972 IEM_MC_END();
7973 }
7974 else
7975 {
7976 /* memory target */
7977 uint32_t fAccess;
7978 if (pImpl->pfnLockedU64)
7979 fAccess = IEM_ACCESS_DATA_RW;
7980 else
7981 { /* CMP */
7982 IEMOP_HLP_NO_LOCK_PREFIX();
7983 fAccess = IEM_ACCESS_DATA_R;
7984 }
7985 IEM_MC_BEGIN(3, 2);
7986 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7987 IEM_MC_ARG(uint64_t, u64Src, 1);
7988 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7990
7991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7992 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7993 IEM_MC_ASSIGN(u64Src, u64Imm);
7994 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7995 IEM_MC_FETCH_EFLAGS(EFlags);
7996 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7998 else
7999 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8000
8001 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8002 IEM_MC_COMMIT_EFLAGS(EFlags);
8003 IEM_MC_ADVANCE_RIP();
8004 IEM_MC_END();
8005 }
8006 break;
8007 }
8008 }
8009 return VINF_SUCCESS;
8010}
8011
8012
8013/** Opcode 0x82. */
8014FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
8015{
8016 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
8017 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
8018}
8019
8020
8021/** Opcode 0x83. */
8022FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
8023{
8024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8025 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
8026 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
8027
8028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8029 {
8030 /*
8031 * Register target
8032 */
8033 IEMOP_HLP_NO_LOCK_PREFIX();
8034 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8035 switch (pIemCpu->enmEffOpSize)
8036 {
8037 case IEMMODE_16BIT:
8038 {
8039 IEM_MC_BEGIN(3, 0);
8040 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8041 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
8042 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8043
8044 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8045 IEM_MC_REF_EFLAGS(pEFlags);
8046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8047
8048 IEM_MC_ADVANCE_RIP();
8049 IEM_MC_END();
8050 break;
8051 }
8052
8053 case IEMMODE_32BIT:
8054 {
8055 IEM_MC_BEGIN(3, 0);
8056 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8057 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
8058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8059
8060 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8061 IEM_MC_REF_EFLAGS(pEFlags);
8062 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8063
8064 IEM_MC_ADVANCE_RIP();
8065 IEM_MC_END();
8066 break;
8067 }
8068
8069 case IEMMODE_64BIT:
8070 {
8071 IEM_MC_BEGIN(3, 0);
8072 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8073 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
8074 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8075
8076 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8077 IEM_MC_REF_EFLAGS(pEFlags);
8078 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8079
8080 IEM_MC_ADVANCE_RIP();
8081 IEM_MC_END();
8082 break;
8083 }
8084 }
8085 }
8086 else
8087 {
8088 /*
8089 * Memory target.
8090 */
8091 uint32_t fAccess;
8092 if (pImpl->pfnLockedU16)
8093 fAccess = IEM_ACCESS_DATA_RW;
8094 else
8095 { /* CMP */
8096 IEMOP_HLP_NO_LOCK_PREFIX();
8097 fAccess = IEM_ACCESS_DATA_R;
8098 }
8099
8100 switch (pIemCpu->enmEffOpSize)
8101 {
8102 case IEMMODE_16BIT:
8103 {
8104 IEM_MC_BEGIN(3, 2);
8105 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8106 IEM_MC_ARG(uint16_t, u16Src, 1);
8107 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8109
8110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8111 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8112 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
8113 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8114 IEM_MC_FETCH_EFLAGS(EFlags);
8115 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8116 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8117 else
8118 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8119
8120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8121 IEM_MC_COMMIT_EFLAGS(EFlags);
8122 IEM_MC_ADVANCE_RIP();
8123 IEM_MC_END();
8124 break;
8125 }
8126
8127 case IEMMODE_32BIT:
8128 {
8129 IEM_MC_BEGIN(3, 2);
8130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8131 IEM_MC_ARG(uint32_t, u32Src, 1);
8132 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8134
8135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8136 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8137 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
8138 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8139 IEM_MC_FETCH_EFLAGS(EFlags);
8140 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8141 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8142 else
8143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8144
8145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8146 IEM_MC_COMMIT_EFLAGS(EFlags);
8147 IEM_MC_ADVANCE_RIP();
8148 IEM_MC_END();
8149 break;
8150 }
8151
8152 case IEMMODE_64BIT:
8153 {
8154 IEM_MC_BEGIN(3, 2);
8155 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8156 IEM_MC_ARG(uint64_t, u64Src, 1);
8157 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8159
8160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8161 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8162 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
8163 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8164 IEM_MC_FETCH_EFLAGS(EFlags);
8165 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8167 else
8168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8169
8170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8171 IEM_MC_COMMIT_EFLAGS(EFlags);
8172 IEM_MC_ADVANCE_RIP();
8173 IEM_MC_END();
8174 break;
8175 }
8176 }
8177 }
8178 return VINF_SUCCESS;
8179}
8180
8181
8182/** Opcode 0x84. */
8183FNIEMOP_DEF(iemOp_test_Eb_Gb)
8184{
8185 IEMOP_MNEMONIC("test Eb,Gb");
8186 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
8187 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8188 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
8189}
8190
8191
8192/** Opcode 0x85. */
8193FNIEMOP_DEF(iemOp_test_Ev_Gv)
8194{
8195 IEMOP_MNEMONIC("test Ev,Gv");
8196 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
8197 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8198 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
8199}
8200
8201
8202/** Opcode 0x86. */
8203FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
8204{
8205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8206 IEMOP_MNEMONIC("xchg Eb,Gb");
8207
8208 /*
8209 * If rm is denoting a register, no more instruction bytes.
8210 */
8211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8212 {
8213 IEMOP_HLP_NO_LOCK_PREFIX();
8214
8215 IEM_MC_BEGIN(0, 2);
8216 IEM_MC_LOCAL(uint8_t, uTmp1);
8217 IEM_MC_LOCAL(uint8_t, uTmp2);
8218
8219 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8220 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8221 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8222 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8223
8224 IEM_MC_ADVANCE_RIP();
8225 IEM_MC_END();
8226 }
8227 else
8228 {
8229 /*
8230 * We're accessing memory.
8231 */
8232/** @todo the register must be committed separately! */
8233 IEM_MC_BEGIN(2, 2);
8234 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
8235 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8237
8238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8239 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8240 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8241 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
8242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
8243
8244 IEM_MC_ADVANCE_RIP();
8245 IEM_MC_END();
8246 }
8247 return VINF_SUCCESS;
8248}
8249
8250
8251/** Opcode 0x87. */
8252FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
8253{
8254 IEMOP_MNEMONIC("xchg Ev,Gv");
8255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8256
8257 /*
8258 * If rm is denoting a register, no more instruction bytes.
8259 */
8260 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8261 {
8262 IEMOP_HLP_NO_LOCK_PREFIX();
8263
8264 switch (pIemCpu->enmEffOpSize)
8265 {
8266 case IEMMODE_16BIT:
8267 IEM_MC_BEGIN(0, 2);
8268 IEM_MC_LOCAL(uint16_t, uTmp1);
8269 IEM_MC_LOCAL(uint16_t, uTmp2);
8270
8271 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8272 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8273 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8274 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8275
8276 IEM_MC_ADVANCE_RIP();
8277 IEM_MC_END();
8278 return VINF_SUCCESS;
8279
8280 case IEMMODE_32BIT:
8281 IEM_MC_BEGIN(0, 2);
8282 IEM_MC_LOCAL(uint32_t, uTmp1);
8283 IEM_MC_LOCAL(uint32_t, uTmp2);
8284
8285 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8286 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8287 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8288 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8289
8290 IEM_MC_ADVANCE_RIP();
8291 IEM_MC_END();
8292 return VINF_SUCCESS;
8293
8294 case IEMMODE_64BIT:
8295 IEM_MC_BEGIN(0, 2);
8296 IEM_MC_LOCAL(uint64_t, uTmp1);
8297 IEM_MC_LOCAL(uint64_t, uTmp2);
8298
8299 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8300 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8301 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8303
8304 IEM_MC_ADVANCE_RIP();
8305 IEM_MC_END();
8306 return VINF_SUCCESS;
8307
8308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8309 }
8310 }
8311 else
8312 {
8313 /*
8314 * We're accessing memory.
8315 */
8316 switch (pIemCpu->enmEffOpSize)
8317 {
8318/** @todo the register must be committed separately! */
8319 case IEMMODE_16BIT:
8320 IEM_MC_BEGIN(2, 2);
8321 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
8322 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8324
8325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8326 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8327 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8328 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
8329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
8330
8331 IEM_MC_ADVANCE_RIP();
8332 IEM_MC_END();
8333 return VINF_SUCCESS;
8334
8335 case IEMMODE_32BIT:
8336 IEM_MC_BEGIN(2, 2);
8337 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
8338 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8340
8341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8342 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8343 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8344 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
8345 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
8346
8347 IEM_MC_ADVANCE_RIP();
8348 IEM_MC_END();
8349 return VINF_SUCCESS;
8350
8351 case IEMMODE_64BIT:
8352 IEM_MC_BEGIN(2, 2);
8353 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
8354 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8356
8357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8358 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8359 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8360 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
8361 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
8362
8363 IEM_MC_ADVANCE_RIP();
8364 IEM_MC_END();
8365 return VINF_SUCCESS;
8366
8367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8368 }
8369 }
8370}
8371
8372
8373/** Opcode 0x88. */
8374FNIEMOP_DEF(iemOp_mov_Eb_Gb)
8375{
8376 IEMOP_MNEMONIC("mov Eb,Gb");
8377
8378 uint8_t bRm;
8379 IEM_OPCODE_GET_NEXT_U8(&bRm);
8380 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8381
8382 /*
8383 * If rm is denoting a register, no more instruction bytes.
8384 */
8385 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8386 {
8387 IEM_MC_BEGIN(0, 1);
8388 IEM_MC_LOCAL(uint8_t, u8Value);
8389 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8390 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
8391 IEM_MC_ADVANCE_RIP();
8392 IEM_MC_END();
8393 }
8394 else
8395 {
8396 /*
8397 * We're writing a register to memory.
8398 */
8399 IEM_MC_BEGIN(0, 2);
8400 IEM_MC_LOCAL(uint8_t, u8Value);
8401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8403 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8404 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
8405 IEM_MC_ADVANCE_RIP();
8406 IEM_MC_END();
8407 }
8408 return VINF_SUCCESS;
8409
8410}
8411
8412
8413/** Opcode 0x89. */
8414FNIEMOP_DEF(iemOp_mov_Ev_Gv)
8415{
8416 IEMOP_MNEMONIC("mov Ev,Gv");
8417
8418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8419 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8420
8421 /*
8422 * If rm is denoting a register, no more instruction bytes.
8423 */
8424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8425 {
8426 switch (pIemCpu->enmEffOpSize)
8427 {
8428 case IEMMODE_16BIT:
8429 IEM_MC_BEGIN(0, 1);
8430 IEM_MC_LOCAL(uint16_t, u16Value);
8431 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8432 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
8433 IEM_MC_ADVANCE_RIP();
8434 IEM_MC_END();
8435 break;
8436
8437 case IEMMODE_32BIT:
8438 IEM_MC_BEGIN(0, 1);
8439 IEM_MC_LOCAL(uint32_t, u32Value);
8440 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8441 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
8442 IEM_MC_ADVANCE_RIP();
8443 IEM_MC_END();
8444 break;
8445
8446 case IEMMODE_64BIT:
8447 IEM_MC_BEGIN(0, 1);
8448 IEM_MC_LOCAL(uint64_t, u64Value);
8449 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8450 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
8451 IEM_MC_ADVANCE_RIP();
8452 IEM_MC_END();
8453 break;
8454 }
8455 }
8456 else
8457 {
8458 /*
8459 * We're writing a register to memory.
8460 */
8461 switch (pIemCpu->enmEffOpSize)
8462 {
8463 case IEMMODE_16BIT:
8464 IEM_MC_BEGIN(0, 2);
8465 IEM_MC_LOCAL(uint16_t, u16Value);
8466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8468 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8469 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
8470 IEM_MC_ADVANCE_RIP();
8471 IEM_MC_END();
8472 break;
8473
8474 case IEMMODE_32BIT:
8475 IEM_MC_BEGIN(0, 2);
8476 IEM_MC_LOCAL(uint32_t, u32Value);
8477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8479 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8480 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
8481 IEM_MC_ADVANCE_RIP();
8482 IEM_MC_END();
8483 break;
8484
8485 case IEMMODE_64BIT:
8486 IEM_MC_BEGIN(0, 2);
8487 IEM_MC_LOCAL(uint64_t, u64Value);
8488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8490 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8491 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
8492 IEM_MC_ADVANCE_RIP();
8493 IEM_MC_END();
8494 break;
8495 }
8496 }
8497 return VINF_SUCCESS;
8498}
8499
8500
8501/** Opcode 0x8a. */
8502FNIEMOP_DEF(iemOp_mov_Gb_Eb)
8503{
8504 IEMOP_MNEMONIC("mov Gb,Eb");
8505
8506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8507 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8508
8509 /*
8510 * If rm is denoting a register, no more instruction bytes.
8511 */
8512 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8513 {
8514 IEM_MC_BEGIN(0, 1);
8515 IEM_MC_LOCAL(uint8_t, u8Value);
8516 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8517 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
8518 IEM_MC_ADVANCE_RIP();
8519 IEM_MC_END();
8520 }
8521 else
8522 {
8523 /*
8524 * We're loading a register from memory.
8525 */
8526 IEM_MC_BEGIN(0, 2);
8527 IEM_MC_LOCAL(uint8_t, u8Value);
8528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8530 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
8531 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 }
8535 return VINF_SUCCESS;
8536}
8537
8538
8539/** Opcode 0x8b. */
8540FNIEMOP_DEF(iemOp_mov_Gv_Ev)
8541{
8542 IEMOP_MNEMONIC("mov Gv,Ev");
8543
8544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8545 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8546
8547 /*
8548 * If rm is denoting a register, no more instruction bytes.
8549 */
8550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8551 {
8552 switch (pIemCpu->enmEffOpSize)
8553 {
8554 case IEMMODE_16BIT:
8555 IEM_MC_BEGIN(0, 1);
8556 IEM_MC_LOCAL(uint16_t, u16Value);
8557 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8558 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
8559 IEM_MC_ADVANCE_RIP();
8560 IEM_MC_END();
8561 break;
8562
8563 case IEMMODE_32BIT:
8564 IEM_MC_BEGIN(0, 1);
8565 IEM_MC_LOCAL(uint32_t, u32Value);
8566 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8567 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
8568 IEM_MC_ADVANCE_RIP();
8569 IEM_MC_END();
8570 break;
8571
8572 case IEMMODE_64BIT:
8573 IEM_MC_BEGIN(0, 1);
8574 IEM_MC_LOCAL(uint64_t, u64Value);
8575 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8576 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8577 IEM_MC_ADVANCE_RIP();
8578 IEM_MC_END();
8579 break;
8580 }
8581 }
8582 else
8583 {
8584 /*
8585 * We're loading a register from memory.
8586 */
8587 switch (pIemCpu->enmEffOpSize)
8588 {
8589 case IEMMODE_16BIT:
8590 IEM_MC_BEGIN(0, 2);
8591 IEM_MC_LOCAL(uint16_t, u16Value);
8592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8594 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
8595 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
8596 IEM_MC_ADVANCE_RIP();
8597 IEM_MC_END();
8598 break;
8599
8600 case IEMMODE_32BIT:
8601 IEM_MC_BEGIN(0, 2);
8602 IEM_MC_LOCAL(uint32_t, u32Value);
8603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8605 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
8606 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
8607 IEM_MC_ADVANCE_RIP();
8608 IEM_MC_END();
8609 break;
8610
8611 case IEMMODE_64BIT:
8612 IEM_MC_BEGIN(0, 2);
8613 IEM_MC_LOCAL(uint64_t, u64Value);
8614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8616 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8617 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8618 IEM_MC_ADVANCE_RIP();
8619 IEM_MC_END();
8620 break;
8621 }
8622 }
8623 return VINF_SUCCESS;
8624}
8625
8626
8627/** Opcode 0x63. */
8628FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
8629{
8630 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
8631 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
8632 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
8633 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
8634 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
8635}
8636
8637
8638/** Opcode 0x8c. */
8639FNIEMOP_DEF(iemOp_mov_Ev_Sw)
8640{
8641 IEMOP_MNEMONIC("mov Ev,Sw");
8642
8643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8644 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8645
8646 /*
8647 * Check that the destination register exists. The REX.R prefix is ignored.
8648 */
8649 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8650 if ( iSegReg > X86_SREG_GS)
8651 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8652
8653 /*
8654 * If rm is denoting a register, no more instruction bytes.
8655 * In that case, the operand size is respected and the upper bits are
8656 * cleared (starting with some pentium).
8657 */
8658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8659 {
8660 switch (pIemCpu->enmEffOpSize)
8661 {
8662 case IEMMODE_16BIT:
8663 IEM_MC_BEGIN(0, 1);
8664 IEM_MC_LOCAL(uint16_t, u16Value);
8665 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
8666 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
8667 IEM_MC_ADVANCE_RIP();
8668 IEM_MC_END();
8669 break;
8670
8671 case IEMMODE_32BIT:
8672 IEM_MC_BEGIN(0, 1);
8673 IEM_MC_LOCAL(uint32_t, u32Value);
8674 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
8675 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
8676 IEM_MC_ADVANCE_RIP();
8677 IEM_MC_END();
8678 break;
8679
8680 case IEMMODE_64BIT:
8681 IEM_MC_BEGIN(0, 1);
8682 IEM_MC_LOCAL(uint64_t, u64Value);
8683 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
8684 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
8685 IEM_MC_ADVANCE_RIP();
8686 IEM_MC_END();
8687 break;
8688 }
8689 }
8690 else
8691 {
8692 /*
8693 * We're saving the register to memory. The access is word sized
8694 * regardless of operand size prefixes.
8695 */
8696#if 0 /* not necessary */
8697 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
8698#endif
8699 IEM_MC_BEGIN(0, 2);
8700 IEM_MC_LOCAL(uint16_t, u16Value);
8701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8703 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
8704 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
8705 IEM_MC_ADVANCE_RIP();
8706 IEM_MC_END();
8707 }
8708 return VINF_SUCCESS;
8709}
8710
8711
8712
8713
8714/** Opcode 0x8d. */
8715FNIEMOP_DEF(iemOp_lea_Gv_M)
8716{
8717 IEMOP_MNEMONIC("lea Gv,M");
8718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8719 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8721 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* no register form */
8722
8723 switch (pIemCpu->enmEffOpSize)
8724 {
8725 case IEMMODE_16BIT:
8726 IEM_MC_BEGIN(0, 2);
8727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8728 IEM_MC_LOCAL(uint16_t, u16Cast);
8729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8730 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
8731 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
8732 IEM_MC_ADVANCE_RIP();
8733 IEM_MC_END();
8734 return VINF_SUCCESS;
8735
8736 case IEMMODE_32BIT:
8737 IEM_MC_BEGIN(0, 2);
8738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8739 IEM_MC_LOCAL(uint32_t, u32Cast);
8740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8741 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
8742 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
8743 IEM_MC_ADVANCE_RIP();
8744 IEM_MC_END();
8745 return VINF_SUCCESS;
8746
8747 case IEMMODE_64BIT:
8748 IEM_MC_BEGIN(0, 1);
8749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8751 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
8752 IEM_MC_ADVANCE_RIP();
8753 IEM_MC_END();
8754 return VINF_SUCCESS;
8755 }
8756 AssertFailedReturn(VERR_INTERNAL_ERROR_5);
8757}
8758
8759
8760/** Opcode 0x8e. */
8761FNIEMOP_DEF(iemOp_mov_Sw_Ev)
8762{
8763 IEMOP_MNEMONIC("mov Sw,Ev");
8764
8765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8766 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8767
8768 /*
8769 * The practical operand size is 16-bit.
8770 */
8771#if 0 /* not necessary */
8772 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
8773#endif
8774
8775 /*
8776 * Check that the destination register exists and can be used with this
8777 * instruction. The REX.R prefix is ignored.
8778 */
8779 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8780 if ( iSegReg == X86_SREG_CS
8781 || iSegReg > X86_SREG_GS)
8782 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8783
8784 /*
8785 * If rm is denoting a register, no more instruction bytes.
8786 */
8787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8788 {
8789 IEM_MC_BEGIN(2, 0);
8790 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
8791 IEM_MC_ARG(uint16_t, u16Value, 1);
8792 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8793 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
8794 IEM_MC_END();
8795 }
8796 else
8797 {
8798 /*
8799 * We're loading the register from memory. The access is word sized
8800 * regardless of operand size prefixes.
8801 */
8802 IEM_MC_BEGIN(2, 1);
8803 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
8804 IEM_MC_ARG(uint16_t, u16Value, 1);
8805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8807 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
8808 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
8809 IEM_MC_END();
8810 }
8811 return VINF_SUCCESS;
8812}
8813
8814
8815/** Opcode 0x8f /0. */
8816FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
8817{
8818 /* This bugger is rather annoying as it requires rSP to be updated before
8819 doing the effective address calculations. Will eventually require a
8820 split between the R/M+SIB decoding and the effective address
8821 calculation - which is something that is required for any attempt at
8822 reusing this code for a recompiler. It may also be good to have if we
8823 need to delay #UD exception caused by invalid lock prefixes.
8824
8825 For now, we'll do a mostly safe interpreter-only implementation here. */
8826 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
8827 * now until tests show it's checked.. */
8828 IEMOP_MNEMONIC("pop Ev");
8829 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8830
8831 /* Register access is relatively easy and can share code. */
8832 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8833 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8834
8835 /*
8836 * Memory target.
8837 *
8838 * Intel says that RSP is incremented before it's used in any effective
8839 * address calcuations. This means some serious extra annoyance here since
8840 * we decode and calculate the effective address in one step and like to
8841 * delay committing registers till everything is done.
8842 *
8843 * So, we'll decode and calculate the effective address twice. This will
8844 * require some recoding if turned into a recompiler.
8845 */
8846 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
8847
8848#ifndef TST_IEM_CHECK_MC
8849 /* Calc effective address with modified ESP. */
8850 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
8851 RTGCPTR GCPtrEff;
8852 VBOXSTRICTRC rcStrict;
8853 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, &GCPtrEff);
8854 if (rcStrict != VINF_SUCCESS)
8855 return rcStrict;
8856 pIemCpu->offOpcode = offOpcodeSaved;
8857
8858 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
8859 uint64_t const RspSaved = pCtx->rsp;
8860 switch (pIemCpu->enmEffOpSize)
8861 {
8862 case IEMMODE_16BIT: iemRegAddToRsp(pCtx, 2); break;
8863 case IEMMODE_32BIT: iemRegAddToRsp(pCtx, 4); break;
8864 case IEMMODE_64BIT: iemRegAddToRsp(pCtx, 8); break;
8865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8866 }
8867 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, &GCPtrEff);
8868 Assert(rcStrict == VINF_SUCCESS);
8869 pCtx->rsp = RspSaved;
8870
8871 /* Perform the operation - this should be CImpl. */
8872 RTUINT64U TmpRsp;
8873 TmpRsp.u = pCtx->rsp;
8874 switch (pIemCpu->enmEffOpSize)
8875 {
8876 case IEMMODE_16BIT:
8877 {
8878 uint16_t u16Value;
8879 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
8880 if (rcStrict == VINF_SUCCESS)
8881 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
8882 break;
8883 }
8884
8885 case IEMMODE_32BIT:
8886 {
8887 uint32_t u32Value;
8888 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
8889 if (rcStrict == VINF_SUCCESS)
8890 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
8891 break;
8892 }
8893
8894 case IEMMODE_64BIT:
8895 {
8896 uint64_t u64Value;
8897 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
8898 if (rcStrict == VINF_SUCCESS)
8899 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
8900 break;
8901 }
8902
8903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8904 }
8905 if (rcStrict == VINF_SUCCESS)
8906 {
8907 pCtx->rsp = TmpRsp.u;
8908 iemRegUpdateRip(pIemCpu);
8909 }
8910 return rcStrict;
8911
8912#else
8913 return VERR_IEM_IPE_2;
8914#endif
8915}
8916
8917
8918/** Opcode 0x8f. */
8919FNIEMOP_DEF(iemOp_Grp1A)
8920{
8921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8922 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only pop Ev in this group. */
8923 return IEMOP_RAISE_INVALID_OPCODE();
8924 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
8925}
8926
8927
8928/**
8929 * Common 'xchg reg,rAX' helper.
8930 */
8931FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
8932{
8933 IEMOP_HLP_NO_LOCK_PREFIX();
8934
8935 iReg |= pIemCpu->uRexB;
8936 switch (pIemCpu->enmEffOpSize)
8937 {
8938 case IEMMODE_16BIT:
8939 IEM_MC_BEGIN(0, 2);
8940 IEM_MC_LOCAL(uint16_t, u16Tmp1);
8941 IEM_MC_LOCAL(uint16_t, u16Tmp2);
8942 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
8943 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
8944 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
8945 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
8946 IEM_MC_ADVANCE_RIP();
8947 IEM_MC_END();
8948 return VINF_SUCCESS;
8949
8950 case IEMMODE_32BIT:
8951 IEM_MC_BEGIN(0, 2);
8952 IEM_MC_LOCAL(uint32_t, u32Tmp1);
8953 IEM_MC_LOCAL(uint32_t, u32Tmp2);
8954 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
8955 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
8956 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
8957 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
8958 IEM_MC_ADVANCE_RIP();
8959 IEM_MC_END();
8960 return VINF_SUCCESS;
8961
8962 case IEMMODE_64BIT:
8963 IEM_MC_BEGIN(0, 2);
8964 IEM_MC_LOCAL(uint64_t, u64Tmp1);
8965 IEM_MC_LOCAL(uint64_t, u64Tmp2);
8966 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
8967 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
8968 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
8969 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
8970 IEM_MC_ADVANCE_RIP();
8971 IEM_MC_END();
8972 return VINF_SUCCESS;
8973
8974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8975 }
8976}
8977
8978
8979/** Opcode 0x90. */
8980FNIEMOP_DEF(iemOp_nop)
8981{
8982 /* R8/R8D and RAX/EAX can be exchanged. */
8983 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
8984 {
8985 IEMOP_MNEMONIC("xchg r8,rAX");
8986 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
8987 }
8988
8989 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
8990 IEMOP_MNEMONIC("pause");
8991 else
8992 IEMOP_MNEMONIC("nop");
8993 IEM_MC_BEGIN(0, 0);
8994 IEM_MC_ADVANCE_RIP();
8995 IEM_MC_END();
8996 return VINF_SUCCESS;
8997}
8998
8999
9000/** Opcode 0x91. */
9001FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
9002{
9003 IEMOP_MNEMONIC("xchg rCX,rAX");
9004 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
9005}
9006
9007
9008/** Opcode 0x92. */
9009FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
9010{
9011 IEMOP_MNEMONIC("xchg rDX,rAX");
9012 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
9013}
9014
9015
9016/** Opcode 0x93. */
9017FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
9018{
9019 IEMOP_MNEMONIC("xchg rBX,rAX");
9020 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
9021}
9022
9023
9024/** Opcode 0x94. */
9025FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
9026{
9027 IEMOP_MNEMONIC("xchg rSX,rAX");
9028 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
9029}
9030
9031
9032/** Opcode 0x95. */
9033FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
9034{
9035 IEMOP_MNEMONIC("xchg rBP,rAX");
9036 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
9037}
9038
9039
9040/** Opcode 0x96. */
9041FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
9042{
9043 IEMOP_MNEMONIC("xchg rSI,rAX");
9044 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
9045}
9046
9047
9048/** Opcode 0x97. */
9049FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
9050{
9051 IEMOP_MNEMONIC("xchg rDI,rAX");
9052 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
9053}
9054
9055
9056/** Opcode 0x98. */
9057FNIEMOP_DEF(iemOp_cbw)
9058{
9059 IEMOP_HLP_NO_LOCK_PREFIX();
9060 switch (pIemCpu->enmEffOpSize)
9061 {
9062 case IEMMODE_16BIT:
9063 IEMOP_MNEMONIC("cbw");
9064 IEM_MC_BEGIN(0, 1);
9065 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
9066 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
9067 } IEM_MC_ELSE() {
9068 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
9069 } IEM_MC_ENDIF();
9070 IEM_MC_ADVANCE_RIP();
9071 IEM_MC_END();
9072 return VINF_SUCCESS;
9073
9074 case IEMMODE_32BIT:
9075 IEMOP_MNEMONIC("cwde");
9076 IEM_MC_BEGIN(0, 1);
9077 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
9078 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
9079 } IEM_MC_ELSE() {
9080 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
9081 } IEM_MC_ENDIF();
9082 IEM_MC_ADVANCE_RIP();
9083 IEM_MC_END();
9084 return VINF_SUCCESS;
9085
9086 case IEMMODE_64BIT:
9087 IEMOP_MNEMONIC("cdqe");
9088 IEM_MC_BEGIN(0, 1);
9089 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
9090 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
9091 } IEM_MC_ELSE() {
9092 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
9093 } IEM_MC_ENDIF();
9094 IEM_MC_ADVANCE_RIP();
9095 IEM_MC_END();
9096 return VINF_SUCCESS;
9097
9098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9099 }
9100}
9101
9102
9103/** Opcode 0x99. */
9104FNIEMOP_DEF(iemOp_cwd)
9105{
9106 IEMOP_HLP_NO_LOCK_PREFIX();
9107 switch (pIemCpu->enmEffOpSize)
9108 {
9109 case IEMMODE_16BIT:
9110 IEMOP_MNEMONIC("cwd");
9111 IEM_MC_BEGIN(0, 1);
9112 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
9113 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
9114 } IEM_MC_ELSE() {
9115 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
9116 } IEM_MC_ENDIF();
9117 IEM_MC_ADVANCE_RIP();
9118 IEM_MC_END();
9119 return VINF_SUCCESS;
9120
9121 case IEMMODE_32BIT:
9122 IEMOP_MNEMONIC("cdq");
9123 IEM_MC_BEGIN(0, 1);
9124 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
9125 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
9126 } IEM_MC_ELSE() {
9127 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
9128 } IEM_MC_ENDIF();
9129 IEM_MC_ADVANCE_RIP();
9130 IEM_MC_END();
9131 return VINF_SUCCESS;
9132
9133 case IEMMODE_64BIT:
9134 IEMOP_MNEMONIC("cqo");
9135 IEM_MC_BEGIN(0, 1);
9136 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
9137 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
9138 } IEM_MC_ELSE() {
9139 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
9140 } IEM_MC_ENDIF();
9141 IEM_MC_ADVANCE_RIP();
9142 IEM_MC_END();
9143 return VINF_SUCCESS;
9144
9145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9146 }
9147}
9148
9149
9150/** Opcode 0x9a. */
9151FNIEMOP_DEF(iemOp_call_Ap)
9152{
9153 IEMOP_MNEMONIC("call Ap");
9154 IEMOP_HLP_NO_64BIT();
9155
9156 /* Decode the far pointer address and pass it on to the far call C implementation. */
9157 uint32_t offSeg;
9158 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
9159 IEM_OPCODE_GET_NEXT_U32(&offSeg);
9160 else
9161 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
9162 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
9163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9164 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
9165}
9166
9167
9168/** Opcode 0x9b. (aka fwait) */
9169FNIEMOP_DEF(iemOp_wait)
9170{
9171 IEMOP_MNEMONIC("wait");
9172 IEMOP_HLP_NO_LOCK_PREFIX();
9173
9174 IEM_MC_BEGIN(0, 0);
9175 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9176 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9177 IEM_MC_ADVANCE_RIP();
9178 IEM_MC_END();
9179 return VINF_SUCCESS;
9180}
9181
9182
9183/** Opcode 0x9c. */
9184FNIEMOP_DEF(iemOp_pushf_Fv)
9185{
9186 IEMOP_HLP_NO_LOCK_PREFIX();
9187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9188 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
9189}
9190
9191
9192/** Opcode 0x9d. */
9193FNIEMOP_DEF(iemOp_popf_Fv)
9194{
9195 IEMOP_HLP_NO_LOCK_PREFIX();
9196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9197 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
9198}
9199
9200
9201/** Opcode 0x9e. */
9202FNIEMOP_DEF(iemOp_sahf)
9203{
9204 IEMOP_MNEMONIC("sahf");
9205 IEMOP_HLP_NO_LOCK_PREFIX();
9206 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
9207 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
9208 return IEMOP_RAISE_INVALID_OPCODE();
9209 IEM_MC_BEGIN(0, 2);
9210 IEM_MC_LOCAL(uint32_t, u32Flags);
9211 IEM_MC_LOCAL(uint32_t, EFlags);
9212 IEM_MC_FETCH_EFLAGS(EFlags);
9213 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
9214 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9215 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
9216 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
9217 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
9218 IEM_MC_COMMIT_EFLAGS(EFlags);
9219 IEM_MC_ADVANCE_RIP();
9220 IEM_MC_END();
9221 return VINF_SUCCESS;
9222}
9223
9224
9225/** Opcode 0x9f. */
9226FNIEMOP_DEF(iemOp_lahf)
9227{
9228 IEMOP_MNEMONIC("lahf");
9229 IEMOP_HLP_NO_LOCK_PREFIX();
9230 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
9231 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
9232 return IEMOP_RAISE_INVALID_OPCODE();
9233 IEM_MC_BEGIN(0, 1);
9234 IEM_MC_LOCAL(uint8_t, u8Flags);
9235 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
9236 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
9237 IEM_MC_ADVANCE_RIP();
9238 IEM_MC_END();
9239 return VINF_SUCCESS;
9240}
9241
9242
9243/**
9244 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
9245 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
9246 * prefixes. Will return on failures.
9247 * @param a_GCPtrMemOff The variable to store the offset in.
9248 */
9249#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
9250 do \
9251 { \
9252 switch (pIemCpu->enmEffAddrMode) \
9253 { \
9254 case IEMMODE_16BIT: \
9255 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
9256 break; \
9257 case IEMMODE_32BIT: \
9258 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
9259 break; \
9260 case IEMMODE_64BIT: \
9261 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
9262 break; \
9263 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9264 } \
9265 IEMOP_HLP_NO_LOCK_PREFIX(); \
9266 } while (0)
9267
9268/** Opcode 0xa0. */
9269FNIEMOP_DEF(iemOp_mov_Al_Ob)
9270{
9271 /*
9272 * Get the offset and fend of lock prefixes.
9273 */
9274 RTGCPTR GCPtrMemOff;
9275 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9276
9277 /*
9278 * Fetch AL.
9279 */
9280 IEM_MC_BEGIN(0,1);
9281 IEM_MC_LOCAL(uint8_t, u8Tmp);
9282 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9283 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9284 IEM_MC_ADVANCE_RIP();
9285 IEM_MC_END();
9286 return VINF_SUCCESS;
9287}
9288
9289
9290/** Opcode 0xa1. */
9291FNIEMOP_DEF(iemOp_mov_rAX_Ov)
9292{
9293 /*
9294 * Get the offset and fend of lock prefixes.
9295 */
9296 IEMOP_MNEMONIC("mov rAX,Ov");
9297 RTGCPTR GCPtrMemOff;
9298 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9299
9300 /*
9301 * Fetch rAX.
9302 */
9303 switch (pIemCpu->enmEffOpSize)
9304 {
9305 case IEMMODE_16BIT:
9306 IEM_MC_BEGIN(0,1);
9307 IEM_MC_LOCAL(uint16_t, u16Tmp);
9308 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9309 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9310 IEM_MC_ADVANCE_RIP();
9311 IEM_MC_END();
9312 return VINF_SUCCESS;
9313
9314 case IEMMODE_32BIT:
9315 IEM_MC_BEGIN(0,1);
9316 IEM_MC_LOCAL(uint32_t, u32Tmp);
9317 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9318 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
9319 IEM_MC_ADVANCE_RIP();
9320 IEM_MC_END();
9321 return VINF_SUCCESS;
9322
9323 case IEMMODE_64BIT:
9324 IEM_MC_BEGIN(0,1);
9325 IEM_MC_LOCAL(uint64_t, u64Tmp);
9326 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9327 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
9328 IEM_MC_ADVANCE_RIP();
9329 IEM_MC_END();
9330 return VINF_SUCCESS;
9331
9332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9333 }
9334}
9335
9336
9337/** Opcode 0xa2. */
9338FNIEMOP_DEF(iemOp_mov_Ob_AL)
9339{
9340 /*
9341 * Get the offset and fend of lock prefixes.
9342 */
9343 RTGCPTR GCPtrMemOff;
9344 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9345
9346 /*
9347 * Store AL.
9348 */
9349 IEM_MC_BEGIN(0,1);
9350 IEM_MC_LOCAL(uint8_t, u8Tmp);
9351 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
9352 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
9353 IEM_MC_ADVANCE_RIP();
9354 IEM_MC_END();
9355 return VINF_SUCCESS;
9356}
9357
9358
9359/** Opcode 0xa3. */
9360FNIEMOP_DEF(iemOp_mov_Ov_rAX)
9361{
9362 /*
9363 * Get the offset and fend of lock prefixes.
9364 */
9365 RTGCPTR GCPtrMemOff;
9366 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9367
9368 /*
9369 * Store rAX.
9370 */
9371 switch (pIemCpu->enmEffOpSize)
9372 {
9373 case IEMMODE_16BIT:
9374 IEM_MC_BEGIN(0,1);
9375 IEM_MC_LOCAL(uint16_t, u16Tmp);
9376 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
9377 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
9378 IEM_MC_ADVANCE_RIP();
9379 IEM_MC_END();
9380 return VINF_SUCCESS;
9381
9382 case IEMMODE_32BIT:
9383 IEM_MC_BEGIN(0,1);
9384 IEM_MC_LOCAL(uint32_t, u32Tmp);
9385 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
9386 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
9387 IEM_MC_ADVANCE_RIP();
9388 IEM_MC_END();
9389 return VINF_SUCCESS;
9390
9391 case IEMMODE_64BIT:
9392 IEM_MC_BEGIN(0,1);
9393 IEM_MC_LOCAL(uint64_t, u64Tmp);
9394 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
9395 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
9396 IEM_MC_ADVANCE_RIP();
9397 IEM_MC_END();
9398 return VINF_SUCCESS;
9399
9400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9401 }
9402}
9403
9404/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
9405#define IEM_MOVS_CASE(ValBits, AddrBits) \
9406 IEM_MC_BEGIN(0, 2); \
9407 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9408 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9409 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9410 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
9411 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9412 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
9413 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9414 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9415 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9416 } IEM_MC_ELSE() { \
9417 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9418 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9419 } IEM_MC_ENDIF(); \
9420 IEM_MC_ADVANCE_RIP(); \
9421 IEM_MC_END();
9422
9423/** Opcode 0xa4. */
9424FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
9425{
9426 IEMOP_HLP_NO_LOCK_PREFIX();
9427
9428 /*
9429 * Use the C implementation if a repeat prefix is encountered.
9430 */
9431 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9432 {
9433 IEMOP_MNEMONIC("rep movsb Xb,Yb");
9434 switch (pIemCpu->enmEffAddrMode)
9435 {
9436 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
9437 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
9438 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
9439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9440 }
9441 }
9442 IEMOP_MNEMONIC("movsb Xb,Yb");
9443
9444 /*
9445 * Sharing case implementation with movs[wdq] below.
9446 */
9447 switch (pIemCpu->enmEffAddrMode)
9448 {
9449 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
9450 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
9451 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
9452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9453 }
9454 return VINF_SUCCESS;
9455}
9456
9457
9458/** Opcode 0xa5. */
9459FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
9460{
9461 IEMOP_HLP_NO_LOCK_PREFIX();
9462
9463 /*
9464 * Use the C implementation if a repeat prefix is encountered.
9465 */
9466 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9467 {
9468 IEMOP_MNEMONIC("rep movs Xv,Yv");
9469 switch (pIemCpu->enmEffOpSize)
9470 {
9471 case IEMMODE_16BIT:
9472 switch (pIemCpu->enmEffAddrMode)
9473 {
9474 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
9475 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
9476 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
9477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9478 }
9479 break;
9480 case IEMMODE_32BIT:
9481 switch (pIemCpu->enmEffAddrMode)
9482 {
9483 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
9484 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
9485 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
9486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9487 }
9488 case IEMMODE_64BIT:
9489 switch (pIemCpu->enmEffAddrMode)
9490 {
9491 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9492 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
9493 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
9494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9495 }
9496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9497 }
9498 }
9499 IEMOP_MNEMONIC("movs Xv,Yv");
9500
9501 /*
9502 * Annoying double switch here.
9503 * Using ugly macro for implementing the cases, sharing it with movsb.
9504 */
9505 switch (pIemCpu->enmEffOpSize)
9506 {
9507 case IEMMODE_16BIT:
9508 switch (pIemCpu->enmEffAddrMode)
9509 {
9510 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
9511 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
9512 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
9513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9514 }
9515 break;
9516
9517 case IEMMODE_32BIT:
9518 switch (pIemCpu->enmEffAddrMode)
9519 {
9520 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
9521 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
9522 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
9523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9524 }
9525 break;
9526
9527 case IEMMODE_64BIT:
9528 switch (pIemCpu->enmEffAddrMode)
9529 {
9530 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9531 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
9532 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
9533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9534 }
9535 break;
9536 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9537 }
9538 return VINF_SUCCESS;
9539}
9540
9541#undef IEM_MOVS_CASE
9542
9543/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
9544#define IEM_CMPS_CASE(ValBits, AddrBits) \
9545 IEM_MC_BEGIN(3, 3); \
9546 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
9547 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
9548 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9549 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
9550 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9551 \
9552 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9553 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
9554 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9555 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
9556 IEM_MC_REF_LOCAL(puValue1, uValue1); \
9557 IEM_MC_REF_EFLAGS(pEFlags); \
9558 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
9559 \
9560 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9561 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9562 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9563 } IEM_MC_ELSE() { \
9564 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9565 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9566 } IEM_MC_ENDIF(); \
9567 IEM_MC_ADVANCE_RIP(); \
9568 IEM_MC_END(); \
9569
9570/** Opcode 0xa6. */
9571FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
9572{
9573 IEMOP_HLP_NO_LOCK_PREFIX();
9574
9575 /*
9576 * Use the C implementation if a repeat prefix is encountered.
9577 */
9578 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9579 {
9580 IEMOP_MNEMONIC("repe cmps Xb,Yb");
9581 switch (pIemCpu->enmEffAddrMode)
9582 {
9583 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
9584 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
9585 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
9586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9587 }
9588 }
9589 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9590 {
9591 IEMOP_MNEMONIC("repe cmps Xb,Yb");
9592 switch (pIemCpu->enmEffAddrMode)
9593 {
9594 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
9595 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
9596 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
9597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9598 }
9599 }
9600 IEMOP_MNEMONIC("cmps Xb,Yb");
9601
9602 /*
9603 * Sharing case implementation with cmps[wdq] below.
9604 */
9605 switch (pIemCpu->enmEffAddrMode)
9606 {
9607 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
9608 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
9609 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
9610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9611 }
9612 return VINF_SUCCESS;
9613
9614}
9615
9616
9617/** Opcode 0xa7. */
9618FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
9619{
9620 IEMOP_HLP_NO_LOCK_PREFIX();
9621
9622 /*
9623 * Use the C implementation if a repeat prefix is encountered.
9624 */
9625 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9626 {
9627 IEMOP_MNEMONIC("repe cmps Xv,Yv");
9628 switch (pIemCpu->enmEffOpSize)
9629 {
9630 case IEMMODE_16BIT:
9631 switch (pIemCpu->enmEffAddrMode)
9632 {
9633 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
9634 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
9635 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
9636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9637 }
9638 break;
9639 case IEMMODE_32BIT:
9640 switch (pIemCpu->enmEffAddrMode)
9641 {
9642 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
9643 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
9644 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
9645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9646 }
9647 case IEMMODE_64BIT:
9648 switch (pIemCpu->enmEffAddrMode)
9649 {
9650 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9651 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
9652 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
9653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9654 }
9655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9656 }
9657 }
9658
9659 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9660 {
9661 IEMOP_MNEMONIC("repne cmps Xv,Yv");
9662 switch (pIemCpu->enmEffOpSize)
9663 {
9664 case IEMMODE_16BIT:
9665 switch (pIemCpu->enmEffAddrMode)
9666 {
9667 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
9668 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
9669 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
9670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9671 }
9672 break;
9673 case IEMMODE_32BIT:
9674 switch (pIemCpu->enmEffAddrMode)
9675 {
9676 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
9677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
9678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
9679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9680 }
9681 case IEMMODE_64BIT:
9682 switch (pIemCpu->enmEffAddrMode)
9683 {
9684 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9685 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
9686 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
9687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9688 }
9689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9690 }
9691 }
9692
9693 IEMOP_MNEMONIC("cmps Xv,Yv");
9694
9695 /*
9696 * Annoying double switch here.
9697 * Using ugly macro for implementing the cases, sharing it with cmpsb.
9698 */
9699 switch (pIemCpu->enmEffOpSize)
9700 {
9701 case IEMMODE_16BIT:
9702 switch (pIemCpu->enmEffAddrMode)
9703 {
9704 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
9705 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
9706 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
9707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9708 }
9709 break;
9710
9711 case IEMMODE_32BIT:
9712 switch (pIemCpu->enmEffAddrMode)
9713 {
9714 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
9715 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
9716 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
9717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9718 }
9719 break;
9720
9721 case IEMMODE_64BIT:
9722 switch (pIemCpu->enmEffAddrMode)
9723 {
9724 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9725 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
9726 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
9727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9728 }
9729 break;
9730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9731 }
9732 return VINF_SUCCESS;
9733
9734}
9735
9736#undef IEM_CMPS_CASE
9737
9738/** Opcode 0xa8. */
9739FNIEMOP_DEF(iemOp_test_AL_Ib)
9740{
9741 IEMOP_MNEMONIC("test al,Ib");
9742 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9743 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
9744}
9745
9746
9747/** Opcode 0xa9. */
9748FNIEMOP_DEF(iemOp_test_eAX_Iz)
9749{
9750 IEMOP_MNEMONIC("test rAX,Iz");
9751 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9752 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
9753}
9754
9755
9756/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
9757#define IEM_STOS_CASE(ValBits, AddrBits) \
9758 IEM_MC_BEGIN(0, 2); \
9759 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9760 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9761 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
9762 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9763 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
9764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9765 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9766 } IEM_MC_ELSE() { \
9767 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9768 } IEM_MC_ENDIF(); \
9769 IEM_MC_ADVANCE_RIP(); \
9770 IEM_MC_END(); \
9771
9772/** Opcode 0xaa. */
9773FNIEMOP_DEF(iemOp_stosb_Yb_AL)
9774{
9775 IEMOP_HLP_NO_LOCK_PREFIX();
9776
9777 /*
9778 * Use the C implementation if a repeat prefix is encountered.
9779 */
9780 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9781 {
9782 IEMOP_MNEMONIC("rep stos Yb,al");
9783 switch (pIemCpu->enmEffAddrMode)
9784 {
9785 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
9786 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
9787 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
9788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9789 }
9790 }
9791 IEMOP_MNEMONIC("stos Yb,al");
9792
9793 /*
9794 * Sharing case implementation with stos[wdq] below.
9795 */
9796 switch (pIemCpu->enmEffAddrMode)
9797 {
9798 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
9799 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
9800 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
9801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9802 }
9803 return VINF_SUCCESS;
9804}
9805
9806
9807/** Opcode 0xab. */
9808FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
9809{
9810 IEMOP_HLP_NO_LOCK_PREFIX();
9811
9812 /*
9813 * Use the C implementation if a repeat prefix is encountered.
9814 */
9815 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9816 {
9817 IEMOP_MNEMONIC("rep stos Yv,rAX");
9818 switch (pIemCpu->enmEffOpSize)
9819 {
9820 case IEMMODE_16BIT:
9821 switch (pIemCpu->enmEffAddrMode)
9822 {
9823 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
9824 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
9825 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
9826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9827 }
9828 break;
9829 case IEMMODE_32BIT:
9830 switch (pIemCpu->enmEffAddrMode)
9831 {
9832 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
9833 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
9834 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
9835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9836 }
9837 case IEMMODE_64BIT:
9838 switch (pIemCpu->enmEffAddrMode)
9839 {
9840 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9841 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
9842 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
9843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9844 }
9845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9846 }
9847 }
9848 IEMOP_MNEMONIC("stos Yv,rAX");
9849
9850 /*
9851 * Annoying double switch here.
9852 * Using ugly macro for implementing the cases, sharing it with stosb.
9853 */
9854 switch (pIemCpu->enmEffOpSize)
9855 {
9856 case IEMMODE_16BIT:
9857 switch (pIemCpu->enmEffAddrMode)
9858 {
9859 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
9860 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
9861 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
9862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9863 }
9864 break;
9865
9866 case IEMMODE_32BIT:
9867 switch (pIemCpu->enmEffAddrMode)
9868 {
9869 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
9870 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
9871 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
9872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9873 }
9874 break;
9875
9876 case IEMMODE_64BIT:
9877 switch (pIemCpu->enmEffAddrMode)
9878 {
9879 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9880 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
9881 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
9882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9883 }
9884 break;
9885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9886 }
9887 return VINF_SUCCESS;
9888}
9889
9890#undef IEM_STOS_CASE
9891
9892/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
9893#define IEM_LODS_CASE(ValBits, AddrBits) \
9894 IEM_MC_BEGIN(0, 2); \
9895 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9896 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9897 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9898 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
9899 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
9900 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9901 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9902 } IEM_MC_ELSE() { \
9903 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9904 } IEM_MC_ENDIF(); \
9905 IEM_MC_ADVANCE_RIP(); \
9906 IEM_MC_END();
9907
9908/** Opcode 0xac. */
9909FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
9910{
9911 IEMOP_HLP_NO_LOCK_PREFIX();
9912
9913 /*
9914 * Use the C implementation if a repeat prefix is encountered.
9915 */
9916 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9917 {
9918 IEMOP_MNEMONIC("rep lodsb al,Xb");
9919 switch (pIemCpu->enmEffAddrMode)
9920 {
9921 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
9922 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
9923 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
9924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9925 }
9926 }
9927 IEMOP_MNEMONIC("lodsb al,Xb");
9928
9929 /*
9930 * Sharing case implementation with stos[wdq] below.
9931 */
9932 switch (pIemCpu->enmEffAddrMode)
9933 {
9934 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
9935 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
9936 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
9937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9938 }
9939 return VINF_SUCCESS;
9940}
9941
9942
9943/** Opcode 0xad. */
9944FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
9945{
9946 IEMOP_HLP_NO_LOCK_PREFIX();
9947
9948 /*
9949 * Use the C implementation if a repeat prefix is encountered.
9950 */
9951 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9952 {
9953 IEMOP_MNEMONIC("rep lods rAX,Xv");
9954 switch (pIemCpu->enmEffOpSize)
9955 {
9956 case IEMMODE_16BIT:
9957 switch (pIemCpu->enmEffAddrMode)
9958 {
9959 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
9960 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
9961 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
9962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9963 }
9964 break;
9965 case IEMMODE_32BIT:
9966 switch (pIemCpu->enmEffAddrMode)
9967 {
9968 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
9969 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
9970 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
9971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9972 }
9973 case IEMMODE_64BIT:
9974 switch (pIemCpu->enmEffAddrMode)
9975 {
9976 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9977 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
9978 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
9979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9980 }
9981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9982 }
9983 }
9984 IEMOP_MNEMONIC("lods rAX,Xv");
9985
9986 /*
9987 * Annoying double switch here.
9988 * Using ugly macro for implementing the cases, sharing it with lodsb.
9989 */
9990 switch (pIemCpu->enmEffOpSize)
9991 {
9992 case IEMMODE_16BIT:
9993 switch (pIemCpu->enmEffAddrMode)
9994 {
9995 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
9996 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
9997 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
9998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9999 }
10000 break;
10001
10002 case IEMMODE_32BIT:
10003 switch (pIemCpu->enmEffAddrMode)
10004 {
10005 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
10006 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
10007 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
10008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10009 }
10010 break;
10011
10012 case IEMMODE_64BIT:
10013 switch (pIemCpu->enmEffAddrMode)
10014 {
10015 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
10016 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
10017 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
10018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10019 }
10020 break;
10021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10022 }
10023 return VINF_SUCCESS;
10024}
10025
10026#undef IEM_LODS_CASE
10027
10028/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
10029#define IEM_SCAS_CASE(ValBits, AddrBits) \
10030 IEM_MC_BEGIN(3, 2); \
10031 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
10032 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
10033 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10034 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10035 \
10036 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10037 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
10038 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
10039 IEM_MC_REF_EFLAGS(pEFlags); \
10040 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
10041 \
10042 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10043 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10044 } IEM_MC_ELSE() { \
10045 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10046 } IEM_MC_ENDIF(); \
10047 IEM_MC_ADVANCE_RIP(); \
10048 IEM_MC_END();
10049
10050/** Opcode 0xae. */
10051FNIEMOP_DEF(iemOp_scasb_AL_Xb)
10052{
10053 IEMOP_HLP_NO_LOCK_PREFIX();
10054
10055 /*
10056 * Use the C implementation if a repeat prefix is encountered.
10057 */
10058 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10059 {
10060 IEMOP_MNEMONIC("repe scasb al,Xb");
10061 switch (pIemCpu->enmEffAddrMode)
10062 {
10063 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
10064 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
10065 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
10066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10067 }
10068 }
10069 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10070 {
10071 IEMOP_MNEMONIC("repne scasb al,Xb");
10072 switch (pIemCpu->enmEffAddrMode)
10073 {
10074 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
10075 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
10076 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
10077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10078 }
10079 }
10080 IEMOP_MNEMONIC("scasb al,Xb");
10081
10082 /*
10083 * Sharing case implementation with stos[wdq] below.
10084 */
10085 switch (pIemCpu->enmEffAddrMode)
10086 {
10087 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
10088 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
10089 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
10090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10091 }
10092 return VINF_SUCCESS;
10093}
10094
10095
10096/** Opcode 0xaf. */
10097FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
10098{
10099 IEMOP_HLP_NO_LOCK_PREFIX();
10100
10101 /*
10102 * Use the C implementation if a repeat prefix is encountered.
10103 */
10104 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10105 {
10106 IEMOP_MNEMONIC("repe scas rAX,Xv");
10107 switch (pIemCpu->enmEffOpSize)
10108 {
10109 case IEMMODE_16BIT:
10110 switch (pIemCpu->enmEffAddrMode)
10111 {
10112 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
10113 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
10114 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
10115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10116 }
10117 break;
10118 case IEMMODE_32BIT:
10119 switch (pIemCpu->enmEffAddrMode)
10120 {
10121 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
10122 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
10123 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
10124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10125 }
10126 case IEMMODE_64BIT:
10127 switch (pIemCpu->enmEffAddrMode)
10128 {
10129 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
10130 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
10131 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
10132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10133 }
10134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10135 }
10136 }
10137 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10138 {
10139 IEMOP_MNEMONIC("repne scas rAX,Xv");
10140 switch (pIemCpu->enmEffOpSize)
10141 {
10142 case IEMMODE_16BIT:
10143 switch (pIemCpu->enmEffAddrMode)
10144 {
10145 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
10146 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
10147 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
10148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10149 }
10150 break;
10151 case IEMMODE_32BIT:
10152 switch (pIemCpu->enmEffAddrMode)
10153 {
10154 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
10155 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
10156 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
10157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10158 }
10159 case IEMMODE_64BIT:
10160 switch (pIemCpu->enmEffAddrMode)
10161 {
10162 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
10163 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
10164 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
10165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10166 }
10167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10168 }
10169 }
10170 IEMOP_MNEMONIC("scas rAX,Xv");
10171
10172 /*
10173 * Annoying double switch here.
10174 * Using ugly macro for implementing the cases, sharing it with scasb.
10175 */
10176 switch (pIemCpu->enmEffOpSize)
10177 {
10178 case IEMMODE_16BIT:
10179 switch (pIemCpu->enmEffAddrMode)
10180 {
10181 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
10182 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
10183 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
10184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10185 }
10186 break;
10187
10188 case IEMMODE_32BIT:
10189 switch (pIemCpu->enmEffAddrMode)
10190 {
10191 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
10192 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
10193 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
10194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10195 }
10196 break;
10197
10198 case IEMMODE_64BIT:
10199 switch (pIemCpu->enmEffAddrMode)
10200 {
10201 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
10202 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
10203 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
10204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10205 }
10206 break;
10207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10208 }
10209 return VINF_SUCCESS;
10210}
10211
10212#undef IEM_SCAS_CASE
10213
10214/**
10215 * Common 'mov r8, imm8' helper.
10216 */
10217FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
10218{
10219 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10220 IEMOP_HLP_NO_LOCK_PREFIX();
10221
10222 IEM_MC_BEGIN(0, 1);
10223 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
10224 IEM_MC_STORE_GREG_U8(iReg, u8Value);
10225 IEM_MC_ADVANCE_RIP();
10226 IEM_MC_END();
10227
10228 return VINF_SUCCESS;
10229}
10230
10231
10232/** Opcode 0xb0. */
10233FNIEMOP_DEF(iemOp_mov_AL_Ib)
10234{
10235 IEMOP_MNEMONIC("mov AL,Ib");
10236 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
10237}
10238
10239
10240/** Opcode 0xb1. */
10241FNIEMOP_DEF(iemOp_CL_Ib)
10242{
10243 IEMOP_MNEMONIC("mov CL,Ib");
10244 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
10245}
10246
10247
10248/** Opcode 0xb2. */
10249FNIEMOP_DEF(iemOp_DL_Ib)
10250{
10251 IEMOP_MNEMONIC("mov DL,Ib");
10252 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
10253}
10254
10255
10256/** Opcode 0xb3. */
10257FNIEMOP_DEF(iemOp_BL_Ib)
10258{
10259 IEMOP_MNEMONIC("mov BL,Ib");
10260 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
10261}
10262
10263
10264/** Opcode 0xb4. */
10265FNIEMOP_DEF(iemOp_mov_AH_Ib)
10266{
10267 IEMOP_MNEMONIC("mov AH,Ib");
10268 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
10269}
10270
10271
10272/** Opcode 0xb5. */
10273FNIEMOP_DEF(iemOp_CH_Ib)
10274{
10275 IEMOP_MNEMONIC("mov CH,Ib");
10276 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
10277}
10278
10279
10280/** Opcode 0xb6. */
10281FNIEMOP_DEF(iemOp_DH_Ib)
10282{
10283 IEMOP_MNEMONIC("mov DH,Ib");
10284 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
10285}
10286
10287
10288/** Opcode 0xb7. */
10289FNIEMOP_DEF(iemOp_BH_Ib)
10290{
10291 IEMOP_MNEMONIC("mov BH,Ib");
10292 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
10293}
10294
10295
10296/**
10297 * Common 'mov regX,immX' helper.
10298 */
10299FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
10300{
10301 switch (pIemCpu->enmEffOpSize)
10302 {
10303 case IEMMODE_16BIT:
10304 {
10305 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10306 IEMOP_HLP_NO_LOCK_PREFIX();
10307
10308 IEM_MC_BEGIN(0, 1);
10309 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
10310 IEM_MC_STORE_GREG_U16(iReg, u16Value);
10311 IEM_MC_ADVANCE_RIP();
10312 IEM_MC_END();
10313 break;
10314 }
10315
10316 case IEMMODE_32BIT:
10317 {
10318 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10319 IEMOP_HLP_NO_LOCK_PREFIX();
10320
10321 IEM_MC_BEGIN(0, 1);
10322 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
10323 IEM_MC_STORE_GREG_U32(iReg, u32Value);
10324 IEM_MC_ADVANCE_RIP();
10325 IEM_MC_END();
10326 break;
10327 }
10328 case IEMMODE_64BIT:
10329 {
10330 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
10331 IEMOP_HLP_NO_LOCK_PREFIX();
10332
10333 IEM_MC_BEGIN(0, 1);
10334 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
10335 IEM_MC_STORE_GREG_U64(iReg, u64Value);
10336 IEM_MC_ADVANCE_RIP();
10337 IEM_MC_END();
10338 break;
10339 }
10340 }
10341
10342 return VINF_SUCCESS;
10343}
10344
10345
10346/** Opcode 0xb8. */
10347FNIEMOP_DEF(iemOp_eAX_Iv)
10348{
10349 IEMOP_MNEMONIC("mov rAX,IV");
10350 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
10351}
10352
10353
10354/** Opcode 0xb9. */
10355FNIEMOP_DEF(iemOp_eCX_Iv)
10356{
10357 IEMOP_MNEMONIC("mov rCX,IV");
10358 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
10359}
10360
10361
10362/** Opcode 0xba. */
10363FNIEMOP_DEF(iemOp_eDX_Iv)
10364{
10365 IEMOP_MNEMONIC("mov rDX,IV");
10366 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
10367}
10368
10369
10370/** Opcode 0xbb. */
10371FNIEMOP_DEF(iemOp_eBX_Iv)
10372{
10373 IEMOP_MNEMONIC("mov rBX,IV");
10374 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
10375}
10376
10377
10378/** Opcode 0xbc. */
10379FNIEMOP_DEF(iemOp_eSP_Iv)
10380{
10381 IEMOP_MNEMONIC("mov rSP,IV");
10382 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
10383}
10384
10385
10386/** Opcode 0xbd. */
10387FNIEMOP_DEF(iemOp_eBP_Iv)
10388{
10389 IEMOP_MNEMONIC("mov rBP,IV");
10390 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
10391}
10392
10393
10394/** Opcode 0xbe. */
10395FNIEMOP_DEF(iemOp_eSI_Iv)
10396{
10397 IEMOP_MNEMONIC("mov rSI,IV");
10398 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
10399}
10400
10401
10402/** Opcode 0xbf. */
10403FNIEMOP_DEF(iemOp_eDI_Iv)
10404{
10405 IEMOP_MNEMONIC("mov rDI,IV");
10406 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
10407}
10408
10409
10410/** Opcode 0xc0. */
10411FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
10412{
10413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10414 PCIEMOPSHIFTSIZES pImpl;
10415 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10416 {
10417 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
10418 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
10419 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
10420 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
10421 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
10422 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
10423 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
10424 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10425 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
10426 }
10427 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10428
10429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10430 {
10431 /* register */
10432 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10433 IEMOP_HLP_NO_LOCK_PREFIX();
10434 IEM_MC_BEGIN(3, 0);
10435 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10436 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10437 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10438 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10439 IEM_MC_REF_EFLAGS(pEFlags);
10440 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10441 IEM_MC_ADVANCE_RIP();
10442 IEM_MC_END();
10443 }
10444 else
10445 {
10446 /* memory */
10447 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10448 IEM_MC_BEGIN(3, 2);
10449 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10450 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10451 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10453
10454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10455 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10456 IEM_MC_ASSIGN(cShiftArg, cShift);
10457 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10458 IEM_MC_FETCH_EFLAGS(EFlags);
10459 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10460
10461 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10462 IEM_MC_COMMIT_EFLAGS(EFlags);
10463 IEM_MC_ADVANCE_RIP();
10464 IEM_MC_END();
10465 }
10466 return VINF_SUCCESS;
10467}
10468
10469
10470/** Opcode 0xc1. */
10471FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
10472{
10473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10474 PCIEMOPSHIFTSIZES pImpl;
10475 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10476 {
10477 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
10478 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
10479 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
10480 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
10481 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
10482 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
10483 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
10484 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10485 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
10486 }
10487 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10488
10489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10490 {
10491 /* register */
10492 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10493 IEMOP_HLP_NO_LOCK_PREFIX();
10494 switch (pIemCpu->enmEffOpSize)
10495 {
10496 case IEMMODE_16BIT:
10497 IEM_MC_BEGIN(3, 0);
10498 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10499 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10500 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10501 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10502 IEM_MC_REF_EFLAGS(pEFlags);
10503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10504 IEM_MC_ADVANCE_RIP();
10505 IEM_MC_END();
10506 return VINF_SUCCESS;
10507
10508 case IEMMODE_32BIT:
10509 IEM_MC_BEGIN(3, 0);
10510 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10511 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10512 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10513 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10514 IEM_MC_REF_EFLAGS(pEFlags);
10515 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10516 IEM_MC_ADVANCE_RIP();
10517 IEM_MC_END();
10518 return VINF_SUCCESS;
10519
10520 case IEMMODE_64BIT:
10521 IEM_MC_BEGIN(3, 0);
10522 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10523 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10524 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10525 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10526 IEM_MC_REF_EFLAGS(pEFlags);
10527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10528 IEM_MC_ADVANCE_RIP();
10529 IEM_MC_END();
10530 return VINF_SUCCESS;
10531
10532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10533 }
10534 }
10535 else
10536 {
10537 /* memory */
10538 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10539 switch (pIemCpu->enmEffOpSize)
10540 {
10541 case IEMMODE_16BIT:
10542 IEM_MC_BEGIN(3, 2);
10543 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10544 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10545 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10547
10548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10549 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10550 IEM_MC_ASSIGN(cShiftArg, cShift);
10551 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10552 IEM_MC_FETCH_EFLAGS(EFlags);
10553 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10554
10555 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10556 IEM_MC_COMMIT_EFLAGS(EFlags);
10557 IEM_MC_ADVANCE_RIP();
10558 IEM_MC_END();
10559 return VINF_SUCCESS;
10560
10561 case IEMMODE_32BIT:
10562 IEM_MC_BEGIN(3, 2);
10563 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10564 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10565 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10567
10568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10569 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10570 IEM_MC_ASSIGN(cShiftArg, cShift);
10571 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10572 IEM_MC_FETCH_EFLAGS(EFlags);
10573 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10574
10575 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10576 IEM_MC_COMMIT_EFLAGS(EFlags);
10577 IEM_MC_ADVANCE_RIP();
10578 IEM_MC_END();
10579 return VINF_SUCCESS;
10580
10581 case IEMMODE_64BIT:
10582 IEM_MC_BEGIN(3, 2);
10583 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10584 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10585 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10587
10588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10589 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10590 IEM_MC_ASSIGN(cShiftArg, cShift);
10591 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10592 IEM_MC_FETCH_EFLAGS(EFlags);
10593 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10594
10595 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10596 IEM_MC_COMMIT_EFLAGS(EFlags);
10597 IEM_MC_ADVANCE_RIP();
10598 IEM_MC_END();
10599 return VINF_SUCCESS;
10600
10601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10602 }
10603 }
10604}
10605
10606
10607/** Opcode 0xc2. */
10608FNIEMOP_DEF(iemOp_retn_Iw)
10609{
10610 IEMOP_MNEMONIC("retn Iw");
10611 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10612 IEMOP_HLP_NO_LOCK_PREFIX();
10613 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10614 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
10615}
10616
10617
10618/** Opcode 0xc3. */
10619FNIEMOP_DEF(iemOp_retn)
10620{
10621 IEMOP_MNEMONIC("retn");
10622 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10623 IEMOP_HLP_NO_LOCK_PREFIX();
10624 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
10625}
10626
10627
10628/** Opcode 0xc4. */
10629FNIEMOP_DEF(iemOp_les_Gv_Mp)
10630{
10631 IEMOP_MNEMONIC("les Gv,Mp");
10632 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_ES);
10633}
10634
10635
10636/** Opcode 0xc5. */
10637FNIEMOP_DEF(iemOp_lds_Gv_Mp)
10638{
10639 IEMOP_MNEMONIC("lds Gv,Mp");
10640 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_DS);
10641}
10642
10643
10644/** Opcode 0xc6. */
10645FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
10646{
10647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10648 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10649 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
10650 return IEMOP_RAISE_INVALID_OPCODE();
10651 IEMOP_MNEMONIC("mov Eb,Ib");
10652
10653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10654 {
10655 /* register access */
10656 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10657 IEM_MC_BEGIN(0, 0);
10658 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
10659 IEM_MC_ADVANCE_RIP();
10660 IEM_MC_END();
10661 }
10662 else
10663 {
10664 /* memory access. */
10665 IEM_MC_BEGIN(0, 1);
10666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10668 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10669 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
10670 IEM_MC_ADVANCE_RIP();
10671 IEM_MC_END();
10672 }
10673 return VINF_SUCCESS;
10674}
10675
10676
10677/** Opcode 0xc7. */
10678FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
10679{
10680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10681 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10682 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
10683 return IEMOP_RAISE_INVALID_OPCODE();
10684 IEMOP_MNEMONIC("mov Ev,Iz");
10685
10686 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10687 {
10688 /* register access */
10689 switch (pIemCpu->enmEffOpSize)
10690 {
10691 case IEMMODE_16BIT:
10692 IEM_MC_BEGIN(0, 0);
10693 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10694 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
10695 IEM_MC_ADVANCE_RIP();
10696 IEM_MC_END();
10697 return VINF_SUCCESS;
10698
10699 case IEMMODE_32BIT:
10700 IEM_MC_BEGIN(0, 0);
10701 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10702 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
10703 IEM_MC_ADVANCE_RIP();
10704 IEM_MC_END();
10705 return VINF_SUCCESS;
10706
10707 case IEMMODE_64BIT:
10708 IEM_MC_BEGIN(0, 0);
10709 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10710 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
10711 IEM_MC_ADVANCE_RIP();
10712 IEM_MC_END();
10713 return VINF_SUCCESS;
10714
10715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10716 }
10717 }
10718 else
10719 {
10720 /* memory access. */
10721 switch (pIemCpu->enmEffOpSize)
10722 {
10723 case IEMMODE_16BIT:
10724 IEM_MC_BEGIN(0, 1);
10725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10727 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10728 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
10729 IEM_MC_ADVANCE_RIP();
10730 IEM_MC_END();
10731 return VINF_SUCCESS;
10732
10733 case IEMMODE_32BIT:
10734 IEM_MC_BEGIN(0, 1);
10735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10737 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10738 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
10739 IEM_MC_ADVANCE_RIP();
10740 IEM_MC_END();
10741 return VINF_SUCCESS;
10742
10743 case IEMMODE_64BIT:
10744 IEM_MC_BEGIN(0, 1);
10745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10747 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10748 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
10749 IEM_MC_ADVANCE_RIP();
10750 IEM_MC_END();
10751 return VINF_SUCCESS;
10752
10753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10754 }
10755 }
10756}
10757
10758
10759
10760
10761/** Opcode 0xc8. */
10762FNIEMOP_DEF(iemOp_enter_Iw_Ib)
10763{
10764 IEMOP_MNEMONIC("enter Iw,Ib");
10765 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10766 IEMOP_HLP_NO_LOCK_PREFIX();
10767 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
10768 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
10769 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
10770}
10771
10772
10773/** Opcode 0xc9. */
10774FNIEMOP_DEF(iemOp_leave)
10775{
10776 IEMOP_MNEMONIC("retn");
10777 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10778 IEMOP_HLP_NO_LOCK_PREFIX();
10779 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
10780}
10781
10782
10783/** Opcode 0xca. */
10784FNIEMOP_DEF(iemOp_retf_Iw)
10785{
10786 IEMOP_MNEMONIC("retf Iw");
10787 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10788 IEMOP_HLP_NO_LOCK_PREFIX();
10789 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10790 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
10791}
10792
10793
10794/** Opcode 0xcb. */
10795FNIEMOP_DEF(iemOp_retf)
10796{
10797 IEMOP_MNEMONIC("retf");
10798 IEMOP_HLP_NO_LOCK_PREFIX();
10799 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10800 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
10801}
10802
10803
10804/** Opcode 0xcc. */
10805FNIEMOP_DEF(iemOp_int_3)
10806{
10807 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
10808}
10809
10810
10811/** Opcode 0xcd. */
10812FNIEMOP_DEF(iemOp_int_Ib)
10813{
10814 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
10815 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
10816}
10817
10818
10819/** Opcode 0xce. */
10820FNIEMOP_DEF(iemOp_into)
10821{
10822 IEM_MC_BEGIN(2, 0);
10823 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
10824 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
10825 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
10826 IEM_MC_END();
10827 return VINF_SUCCESS;
10828}
10829
10830
10831/** Opcode 0xcf. */
10832FNIEMOP_DEF(iemOp_iret)
10833{
10834 IEMOP_MNEMONIC("iret");
10835 IEMOP_HLP_NO_LOCK_PREFIX();
10836 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
10837}
10838
10839
10840/** Opcode 0xd0. */
10841FNIEMOP_DEF(iemOp_Grp2_Eb_1)
10842{
10843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10844 PCIEMOPSHIFTSIZES pImpl;
10845 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10846 {
10847 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
10848 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
10849 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
10850 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
10851 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
10852 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
10853 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
10854 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10855 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10856 }
10857 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10858
10859 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10860 {
10861 /* register */
10862 IEMOP_HLP_NO_LOCK_PREFIX();
10863 IEM_MC_BEGIN(3, 0);
10864 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10865 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
10866 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10867 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10868 IEM_MC_REF_EFLAGS(pEFlags);
10869 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10870 IEM_MC_ADVANCE_RIP();
10871 IEM_MC_END();
10872 }
10873 else
10874 {
10875 /* memory */
10876 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10877 IEM_MC_BEGIN(3, 2);
10878 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10879 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
10880 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10882
10883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10884 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10885 IEM_MC_FETCH_EFLAGS(EFlags);
10886 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10887
10888 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10889 IEM_MC_COMMIT_EFLAGS(EFlags);
10890 IEM_MC_ADVANCE_RIP();
10891 IEM_MC_END();
10892 }
10893 return VINF_SUCCESS;
10894}
10895
10896
10897
10898/** Opcode 0xd1. */
10899FNIEMOP_DEF(iemOp_Grp2_Ev_1)
10900{
10901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10902 PCIEMOPSHIFTSIZES pImpl;
10903 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10904 {
10905 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
10906 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
10907 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
10908 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
10909 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
10910 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
10911 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
10912 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10913 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10914 }
10915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10916
10917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10918 {
10919 /* register */
10920 IEMOP_HLP_NO_LOCK_PREFIX();
10921 switch (pIemCpu->enmEffOpSize)
10922 {
10923 case IEMMODE_16BIT:
10924 IEM_MC_BEGIN(3, 0);
10925 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10926 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10928 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10929 IEM_MC_REF_EFLAGS(pEFlags);
10930 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10931 IEM_MC_ADVANCE_RIP();
10932 IEM_MC_END();
10933 return VINF_SUCCESS;
10934
10935 case IEMMODE_32BIT:
10936 IEM_MC_BEGIN(3, 0);
10937 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10938 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10939 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10940 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10941 IEM_MC_REF_EFLAGS(pEFlags);
10942 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10943 IEM_MC_ADVANCE_RIP();
10944 IEM_MC_END();
10945 return VINF_SUCCESS;
10946
10947 case IEMMODE_64BIT:
10948 IEM_MC_BEGIN(3, 0);
10949 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10950 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10951 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10952 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10953 IEM_MC_REF_EFLAGS(pEFlags);
10954 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10955 IEM_MC_ADVANCE_RIP();
10956 IEM_MC_END();
10957 return VINF_SUCCESS;
10958
10959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10960 }
10961 }
10962 else
10963 {
10964 /* memory */
10965 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10966 switch (pIemCpu->enmEffOpSize)
10967 {
10968 case IEMMODE_16BIT:
10969 IEM_MC_BEGIN(3, 2);
10970 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10971 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10972 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10974
10975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10976 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10977 IEM_MC_FETCH_EFLAGS(EFlags);
10978 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10979
10980 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10981 IEM_MC_COMMIT_EFLAGS(EFlags);
10982 IEM_MC_ADVANCE_RIP();
10983 IEM_MC_END();
10984 return VINF_SUCCESS;
10985
10986 case IEMMODE_32BIT:
10987 IEM_MC_BEGIN(3, 2);
10988 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10989 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10990 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10992
10993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10994 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10995 IEM_MC_FETCH_EFLAGS(EFlags);
10996 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10997
10998 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10999 IEM_MC_COMMIT_EFLAGS(EFlags);
11000 IEM_MC_ADVANCE_RIP();
11001 IEM_MC_END();
11002 return VINF_SUCCESS;
11003
11004 case IEMMODE_64BIT:
11005 IEM_MC_BEGIN(3, 2);
11006 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11007 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
11008 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11010
11011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11012 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11013 IEM_MC_FETCH_EFLAGS(EFlags);
11014 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11015
11016 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11017 IEM_MC_COMMIT_EFLAGS(EFlags);
11018 IEM_MC_ADVANCE_RIP();
11019 IEM_MC_END();
11020 return VINF_SUCCESS;
11021
11022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11023 }
11024 }
11025}
11026
11027
11028/** Opcode 0xd2. */
11029FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
11030{
11031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11032 PCIEMOPSHIFTSIZES pImpl;
11033 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11034 {
11035 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
11036 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
11037 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
11038 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
11039 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
11040 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
11041 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
11042 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11043 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
11044 }
11045 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11046
11047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11048 {
11049 /* register */
11050 IEMOP_HLP_NO_LOCK_PREFIX();
11051 IEM_MC_BEGIN(3, 0);
11052 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11053 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11054 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11055 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11056 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11057 IEM_MC_REF_EFLAGS(pEFlags);
11058 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11059 IEM_MC_ADVANCE_RIP();
11060 IEM_MC_END();
11061 }
11062 else
11063 {
11064 /* memory */
11065 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11066 IEM_MC_BEGIN(3, 2);
11067 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11068 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11069 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11071
11072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11073 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11074 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11075 IEM_MC_FETCH_EFLAGS(EFlags);
11076 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11077
11078 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11079 IEM_MC_COMMIT_EFLAGS(EFlags);
11080 IEM_MC_ADVANCE_RIP();
11081 IEM_MC_END();
11082 }
11083 return VINF_SUCCESS;
11084}
11085
11086
11087/** Opcode 0xd3. */
11088FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
11089{
11090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11091 PCIEMOPSHIFTSIZES pImpl;
11092 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11093 {
11094 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
11095 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
11096 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
11097 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
11098 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
11099 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
11100 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
11101 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11102 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11103 }
11104 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11105
11106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11107 {
11108 /* register */
11109 IEMOP_HLP_NO_LOCK_PREFIX();
11110 switch (pIemCpu->enmEffOpSize)
11111 {
11112 case IEMMODE_16BIT:
11113 IEM_MC_BEGIN(3, 0);
11114 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11115 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11116 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11117 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11118 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11119 IEM_MC_REF_EFLAGS(pEFlags);
11120 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11121 IEM_MC_ADVANCE_RIP();
11122 IEM_MC_END();
11123 return VINF_SUCCESS;
11124
11125 case IEMMODE_32BIT:
11126 IEM_MC_BEGIN(3, 0);
11127 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11128 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11129 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11130 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11131 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11132 IEM_MC_REF_EFLAGS(pEFlags);
11133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11134 IEM_MC_ADVANCE_RIP();
11135 IEM_MC_END();
11136 return VINF_SUCCESS;
11137
11138 case IEMMODE_64BIT:
11139 IEM_MC_BEGIN(3, 0);
11140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11141 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11143 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11144 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11145 IEM_MC_REF_EFLAGS(pEFlags);
11146 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11147 IEM_MC_ADVANCE_RIP();
11148 IEM_MC_END();
11149 return VINF_SUCCESS;
11150
11151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11152 }
11153 }
11154 else
11155 {
11156 /* memory */
11157 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11158 switch (pIemCpu->enmEffOpSize)
11159 {
11160 case IEMMODE_16BIT:
11161 IEM_MC_BEGIN(3, 2);
11162 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11163 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11164 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11166
11167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11168 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11169 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11170 IEM_MC_FETCH_EFLAGS(EFlags);
11171 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11172
11173 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11174 IEM_MC_COMMIT_EFLAGS(EFlags);
11175 IEM_MC_ADVANCE_RIP();
11176 IEM_MC_END();
11177 return VINF_SUCCESS;
11178
11179 case IEMMODE_32BIT:
11180 IEM_MC_BEGIN(3, 2);
11181 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11182 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11183 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11185
11186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11187 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11188 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11189 IEM_MC_FETCH_EFLAGS(EFlags);
11190 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11191
11192 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11193 IEM_MC_COMMIT_EFLAGS(EFlags);
11194 IEM_MC_ADVANCE_RIP();
11195 IEM_MC_END();
11196 return VINF_SUCCESS;
11197
11198 case IEMMODE_64BIT:
11199 IEM_MC_BEGIN(3, 2);
11200 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11201 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11202 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11204
11205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11206 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11207 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11208 IEM_MC_FETCH_EFLAGS(EFlags);
11209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11210
11211 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11212 IEM_MC_COMMIT_EFLAGS(EFlags);
11213 IEM_MC_ADVANCE_RIP();
11214 IEM_MC_END();
11215 return VINF_SUCCESS;
11216
11217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11218 }
11219 }
11220}
11221
11222/** Opcode 0xd4. */
11223FNIEMOP_DEF(iemOp_aam_Ib)
11224{
11225 IEMOP_MNEMONIC("aam Ib");
11226 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11227 IEMOP_HLP_NO_LOCK_PREFIX();
11228 IEMOP_HLP_NO_64BIT();
11229 if (!bImm)
11230 return IEMOP_RAISE_DIVIDE_ERROR();
11231 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
11232}
11233
11234
11235/** Opcode 0xd5. */
11236FNIEMOP_DEF(iemOp_aad_Ib)
11237{
11238 IEMOP_MNEMONIC("aad Ib");
11239 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11240 IEMOP_HLP_NO_LOCK_PREFIX();
11241 IEMOP_HLP_NO_64BIT();
11242 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
11243}
11244
11245
11246/** Opcode 0xd7. */
11247FNIEMOP_DEF(iemOp_xlat)
11248{
11249 IEMOP_MNEMONIC("xlat");
11250 IEMOP_HLP_NO_LOCK_PREFIX();
11251 switch (pIemCpu->enmEffAddrMode)
11252 {
11253 case IEMMODE_16BIT:
11254 IEM_MC_BEGIN(2, 0);
11255 IEM_MC_LOCAL(uint8_t, u8Tmp);
11256 IEM_MC_LOCAL(uint16_t, u16Addr);
11257 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
11258 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
11259 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
11260 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11261 IEM_MC_ADVANCE_RIP();
11262 IEM_MC_END();
11263 return VINF_SUCCESS;
11264
11265 case IEMMODE_32BIT:
11266 IEM_MC_BEGIN(2, 0);
11267 IEM_MC_LOCAL(uint8_t, u8Tmp);
11268 IEM_MC_LOCAL(uint32_t, u32Addr);
11269 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
11270 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
11271 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
11272 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11273 IEM_MC_ADVANCE_RIP();
11274 IEM_MC_END();
11275 return VINF_SUCCESS;
11276
11277 case IEMMODE_64BIT:
11278 IEM_MC_BEGIN(2, 0);
11279 IEM_MC_LOCAL(uint8_t, u8Tmp);
11280 IEM_MC_LOCAL(uint64_t, u64Addr);
11281 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
11282 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
11283 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
11284 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11285 IEM_MC_ADVANCE_RIP();
11286 IEM_MC_END();
11287 return VINF_SUCCESS;
11288
11289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11290 }
11291}
11292
11293
11294/**
11295 * Common worker for FPU instructions working on ST0 and STn, and storing the
11296 * result in ST0.
11297 *
11298 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11299 */
11300FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11301{
11302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11303
11304 IEM_MC_BEGIN(3, 1);
11305 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11306 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11307 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11308 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11309
11310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11312 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11313 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11314 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11315 IEM_MC_ELSE()
11316 IEM_MC_FPU_STACK_UNDERFLOW(0);
11317 IEM_MC_ENDIF();
11318 IEM_MC_USED_FPU();
11319 IEM_MC_ADVANCE_RIP();
11320
11321 IEM_MC_END();
11322 return VINF_SUCCESS;
11323}
11324
11325
11326/**
11327 * Common worker for FPU instructions working on ST0 and STn, and only affecting
11328 * flags.
11329 *
11330 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11331 */
11332FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11333{
11334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11335
11336 IEM_MC_BEGIN(3, 1);
11337 IEM_MC_LOCAL(uint16_t, u16Fsw);
11338 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11339 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11340 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11341
11342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11344 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11345 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11346 IEM_MC_UPDATE_FSW(u16Fsw);
11347 IEM_MC_ELSE()
11348 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
11349 IEM_MC_ENDIF();
11350 IEM_MC_USED_FPU();
11351 IEM_MC_ADVANCE_RIP();
11352
11353 IEM_MC_END();
11354 return VINF_SUCCESS;
11355}
11356
11357
11358/**
11359 * Common worker for FPU instructions working on ST0 and STn, only affecting
11360 * flags, and popping when done.
11361 *
11362 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11363 */
11364FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11365{
11366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11367
11368 IEM_MC_BEGIN(3, 1);
11369 IEM_MC_LOCAL(uint16_t, u16Fsw);
11370 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11371 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11372 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11373
11374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11376 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11377 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11378 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
11379 IEM_MC_ELSE()
11380 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
11381 IEM_MC_ENDIF();
11382 IEM_MC_USED_FPU();
11383 IEM_MC_ADVANCE_RIP();
11384
11385 IEM_MC_END();
11386 return VINF_SUCCESS;
11387}
11388
11389
11390/** Opcode 0xd8 11/0. */
11391FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
11392{
11393 IEMOP_MNEMONIC("fadd st0,stN");
11394 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
11395}
11396
11397
11398/** Opcode 0xd8 11/1. */
11399FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
11400{
11401 IEMOP_MNEMONIC("fmul st0,stN");
11402 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
11403}
11404
11405
11406/** Opcode 0xd8 11/2. */
11407FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
11408{
11409 IEMOP_MNEMONIC("fcom st0,stN");
11410 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
11411}
11412
11413
11414/** Opcode 0xd8 11/3. */
11415FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
11416{
11417 IEMOP_MNEMONIC("fcomp st0,stN");
11418 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
11419}
11420
11421
11422/** Opcode 0xd8 11/4. */
11423FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
11424{
11425 IEMOP_MNEMONIC("fsub st0,stN");
11426 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
11427}
11428
11429
11430/** Opcode 0xd8 11/5. */
11431FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
11432{
11433 IEMOP_MNEMONIC("fsubr st0,stN");
11434 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
11435}
11436
11437
11438/** Opcode 0xd8 11/6. */
11439FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
11440{
11441 IEMOP_MNEMONIC("fdiv st0,stN");
11442 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
11443}
11444
11445
11446/** Opcode 0xd8 11/7. */
11447FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
11448{
11449 IEMOP_MNEMONIC("fdivr st0,stN");
11450 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
11451}
11452
11453
11454/**
11455 * Common worker for FPU instructions working on ST0 and an m32r, and storing
11456 * the result in ST0.
11457 *
11458 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11459 */
11460FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
11461{
11462 IEM_MC_BEGIN(3, 3);
11463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11464 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11465 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11466 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11467 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11468 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11469
11470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11472
11473 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11474 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11475 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11476
11477 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11478 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
11479 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11480 IEM_MC_ELSE()
11481 IEM_MC_FPU_STACK_UNDERFLOW(0);
11482 IEM_MC_ENDIF();
11483 IEM_MC_USED_FPU();
11484 IEM_MC_ADVANCE_RIP();
11485
11486 IEM_MC_END();
11487 return VINF_SUCCESS;
11488}
11489
11490
11491/** Opcode 0xd8 !11/0. */
11492FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
11493{
11494 IEMOP_MNEMONIC("fadd st0,m32r");
11495 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
11496}
11497
11498
11499/** Opcode 0xd8 !11/1. */
11500FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
11501{
11502 IEMOP_MNEMONIC("fmul st0,m32r");
11503 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
11504}
11505
11506
11507/** Opcode 0xd8 !11/2. */
11508FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
11509{
11510 IEMOP_MNEMONIC("fcom st0,m32r");
11511
11512 IEM_MC_BEGIN(3, 3);
11513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11514 IEM_MC_LOCAL(uint16_t, u16Fsw);
11515 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11516 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11517 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11518 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11519
11520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11522
11523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11524 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11525 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11526
11527 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11528 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
11529 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11530 IEM_MC_ELSE()
11531 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
11532 IEM_MC_ENDIF();
11533 IEM_MC_USED_FPU();
11534 IEM_MC_ADVANCE_RIP();
11535
11536 IEM_MC_END();
11537 return VINF_SUCCESS;
11538}
11539
11540
11541/** Opcode 0xd8 !11/3. */
11542FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
11543{
11544 IEMOP_MNEMONIC("fcomp st0,m32r");
11545
11546 IEM_MC_BEGIN(3, 3);
11547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11548 IEM_MC_LOCAL(uint16_t, u16Fsw);
11549 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11550 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11551 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11552 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11553
11554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11556
11557 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11558 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11559 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11560
11561 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11562 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
11563 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11564 IEM_MC_ELSE()
11565 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
11566 IEM_MC_ENDIF();
11567 IEM_MC_USED_FPU();
11568 IEM_MC_ADVANCE_RIP();
11569
11570 IEM_MC_END();
11571 return VINF_SUCCESS;
11572}
11573
11574
11575/** Opcode 0xd8 !11/4. */
11576FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
11577{
11578 IEMOP_MNEMONIC("fsub st0,m32r");
11579 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
11580}
11581
11582
11583/** Opcode 0xd8 !11/5. */
11584FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
11585{
11586 IEMOP_MNEMONIC("fsubr st0,m32r");
11587 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
11588}
11589
11590
11591/** Opcode 0xd8 !11/6. */
11592FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
11593{
11594 IEMOP_MNEMONIC("fdiv st0,m32r");
11595 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
11596}
11597
11598
11599/** Opcode 0xd8 !11/7. */
11600FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
11601{
11602 IEMOP_MNEMONIC("fdivr st0,m32r");
11603 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
11604}
11605
11606
11607/** Opcode 0xd8. */
11608FNIEMOP_DEF(iemOp_EscF0)
11609{
11610 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
11611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11612
11613 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11614 {
11615 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11616 {
11617 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
11618 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
11619 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
11620 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11621 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
11622 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
11623 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
11624 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
11625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11626 }
11627 }
11628 else
11629 {
11630 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11631 {
11632 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
11633 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
11634 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
11635 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
11636 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
11637 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
11638 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
11639 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
11640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11641 }
11642 }
11643}
11644
11645
11646/** Opcode 0xd9 /0 mem32real
11647 * @sa iemOp_fld_m64r */
11648FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
11649{
11650 IEMOP_MNEMONIC("fld m32r");
11651
11652 IEM_MC_BEGIN(2, 3);
11653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11654 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11655 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
11656 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11657 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
11658
11659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11661
11662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11663 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11664 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
11665
11666 IEM_MC_IF_FPUREG_IS_EMPTY(7)
11667 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
11668 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
11669 IEM_MC_ELSE()
11670 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
11671 IEM_MC_ENDIF();
11672 IEM_MC_USED_FPU();
11673 IEM_MC_ADVANCE_RIP();
11674
11675 IEM_MC_END();
11676 return VINF_SUCCESS;
11677}
11678
11679
11680/** Opcode 0xd9 !11/2 mem32real */
11681FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
11682{
11683 IEMOP_MNEMONIC("fst m32r");
11684 IEM_MC_BEGIN(3, 2);
11685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11686 IEM_MC_LOCAL(uint16_t, u16Fsw);
11687 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11688 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
11689 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11690
11691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11693 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11694 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11695
11696 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11697 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11698 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
11699 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11700 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11701 IEM_MC_ELSE()
11702 IEM_MC_IF_FCW_IM()
11703 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11704 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
11705 IEM_MC_ENDIF();
11706 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11707 IEM_MC_ENDIF();
11708 IEM_MC_USED_FPU();
11709 IEM_MC_ADVANCE_RIP();
11710
11711 IEM_MC_END();
11712 return VINF_SUCCESS;
11713}
11714
11715
11716/** Opcode 0xd9 !11/3 */
11717FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
11718{
11719 IEMOP_MNEMONIC("fstp m32r");
11720 IEM_MC_BEGIN(3, 2);
11721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11722 IEM_MC_LOCAL(uint16_t, u16Fsw);
11723 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11724 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
11725 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11726
11727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11731
11732 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11733 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11734 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
11735 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11736 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11737 IEM_MC_ELSE()
11738 IEM_MC_IF_FCW_IM()
11739 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11740 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
11741 IEM_MC_ENDIF();
11742 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11743 IEM_MC_ENDIF();
11744 IEM_MC_USED_FPU();
11745 IEM_MC_ADVANCE_RIP();
11746
11747 IEM_MC_END();
11748 return VINF_SUCCESS;
11749}
11750
11751
11752/** Opcode 0xd9 !11/4 */
11753FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
11754{
11755 IEMOP_MNEMONIC("fldenv m14/28byte");
11756 IEM_MC_BEGIN(3, 0);
11757 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
11758 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
11759 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11763 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11764 IEM_MC_END();
11765 return VINF_SUCCESS;
11766}
11767
11768
11769/** Opcode 0xd9 !11/5 */
11770FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
11771{
11772 IEMOP_MNEMONIC("fldcw m2byte");
11773 IEM_MC_BEGIN(1, 1);
11774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11775 IEM_MC_ARG(uint16_t, u16Fsw, 0);
11776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11778 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11779 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11780 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
11781 IEM_MC_END();
11782 return VINF_SUCCESS;
11783}
11784
11785
11786/** Opcode 0xd9 !11/6 */
11787FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
11788{
11789 IEMOP_MNEMONIC("fstenv m14/m28byte");
11790 IEM_MC_BEGIN(3, 0);
11791 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
11792 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
11793 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11797 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
11798 IEM_MC_END();
11799 return VINF_SUCCESS;
11800}
11801
11802
11803/** Opcode 0xd9 !11/7 */
11804FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
11805{
11806 IEMOP_MNEMONIC("fnstcw m2byte");
11807 IEM_MC_BEGIN(2, 0);
11808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11809 IEM_MC_LOCAL(uint16_t, u16Fcw);
11810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11813 IEM_MC_FETCH_FCW(u16Fcw);
11814 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
11815 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11816 IEM_MC_END();
11817 return VINF_SUCCESS;
11818}
11819
11820
11821/** Opcode 0xd9 0xc9, 0xd9 0xd8-0xdf, ++?. */
11822FNIEMOP_DEF(iemOp_fnop)
11823{
11824 IEMOP_MNEMONIC("fnop");
11825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11826
11827 IEM_MC_BEGIN(0, 0);
11828 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11829 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11830 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
11831 * intel optimizations. Investigate. */
11832 IEM_MC_UPDATE_FPU_OPCODE_IP();
11833 IEM_MC_USED_FPU();
11834 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11835 IEM_MC_END();
11836 return VINF_SUCCESS;
11837}
11838
11839
11840/** Opcode 0xd9 11/0 stN */
11841FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
11842{
11843 IEMOP_MNEMONIC("fld stN");
11844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11845
11846 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11847 * indicates that it does. */
11848 IEM_MC_BEGIN(0, 2);
11849 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11850 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11853 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
11854 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11855 IEM_MC_PUSH_FPU_RESULT(FpuRes);
11856 IEM_MC_ELSE()
11857 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
11858 IEM_MC_ENDIF();
11859 IEM_MC_USED_FPU();
11860 IEM_MC_ADVANCE_RIP();
11861 IEM_MC_END();
11862
11863 return VINF_SUCCESS;
11864}
11865
11866
11867/** Opcode 0xd9 11/3 stN */
11868FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
11869{
11870 IEMOP_MNEMONIC("fxch stN");
11871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11872
11873 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11874 * indicates that it does. */
11875 IEM_MC_BEGIN(1, 3);
11876 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
11877 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
11878 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11879 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
11880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11882 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11883 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
11884 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
11885 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11886 IEM_MC_ELSE()
11887 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
11888 IEM_MC_ENDIF();
11889 IEM_MC_USED_FPU();
11890 IEM_MC_ADVANCE_RIP();
11891 IEM_MC_END();
11892
11893 return VINF_SUCCESS;
11894}
11895
11896
11897/** Opcode 0xd9 11/4, 0xdd 11/2. */
11898FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
11899{
11900 IEMOP_MNEMONIC("fstp st0,stN");
11901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11902
11903 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
11904 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
11905 if (!iDstReg)
11906 {
11907 IEM_MC_BEGIN(0, 1);
11908 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
11909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11911 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
11912 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
11913 IEM_MC_ELSE()
11914 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
11915 IEM_MC_ENDIF();
11916 IEM_MC_USED_FPU();
11917 IEM_MC_ADVANCE_RIP();
11918 IEM_MC_END();
11919 }
11920 else
11921 {
11922 IEM_MC_BEGIN(0, 2);
11923 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11924 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11926 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11927 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11928 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11929 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
11930 IEM_MC_ELSE()
11931 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
11932 IEM_MC_ENDIF();
11933 IEM_MC_USED_FPU();
11934 IEM_MC_ADVANCE_RIP();
11935 IEM_MC_END();
11936 }
11937 return VINF_SUCCESS;
11938}
11939
11940
11941/**
11942 * Common worker for FPU instructions working on ST0 and replaces it with the
11943 * result, i.e. unary operators.
11944 *
11945 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11946 */
11947FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
11948{
11949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11950
11951 IEM_MC_BEGIN(2, 1);
11952 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11953 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11954 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11955
11956 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11957 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11958 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11959 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
11960 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11961 IEM_MC_ELSE()
11962 IEM_MC_FPU_STACK_UNDERFLOW(0);
11963 IEM_MC_ENDIF();
11964 IEM_MC_USED_FPU();
11965 IEM_MC_ADVANCE_RIP();
11966
11967 IEM_MC_END();
11968 return VINF_SUCCESS;
11969}
11970
11971
11972/** Opcode 0xd9 0xe0. */
11973FNIEMOP_DEF(iemOp_fchs)
11974{
11975 IEMOP_MNEMONIC("fchs st0");
11976 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11977}
11978
11979
11980/** Opcode 0xd9 0xe1. */
11981FNIEMOP_DEF(iemOp_fabs)
11982{
11983 IEMOP_MNEMONIC("fabs st0");
11984 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
11985}
11986
11987
11988/**
11989 * Common worker for FPU instructions working on ST0 and only returns FSW.
11990 *
11991 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11992 */
11993FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
11994{
11995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11996
11997 IEM_MC_BEGIN(2, 1);
11998 IEM_MC_LOCAL(uint16_t, u16Fsw);
11999 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12000 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
12001
12002 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12003 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12004 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12005 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
12006 IEM_MC_UPDATE_FSW(u16Fsw);
12007 IEM_MC_ELSE()
12008 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12009 IEM_MC_ENDIF();
12010 IEM_MC_USED_FPU();
12011 IEM_MC_ADVANCE_RIP();
12012
12013 IEM_MC_END();
12014 return VINF_SUCCESS;
12015}
12016
12017
12018/** Opcode 0xd9 0xe4. */
12019FNIEMOP_DEF(iemOp_ftst)
12020{
12021 IEMOP_MNEMONIC("ftst st0");
12022 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
12023}
12024
12025
12026/** Opcode 0xd9 0xe5. */
12027FNIEMOP_DEF(iemOp_fxam)
12028{
12029 IEMOP_MNEMONIC("fxam st0");
12030 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
12031}
12032
12033
12034/**
12035 * Common worker for FPU instructions pushing a constant onto the FPU stack.
12036 *
12037 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12038 */
12039FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
12040{
12041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12042
12043 IEM_MC_BEGIN(1, 1);
12044 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12045 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12046
12047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12048 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12049 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12050 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
12051 IEM_MC_PUSH_FPU_RESULT(FpuRes);
12052 IEM_MC_ELSE()
12053 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
12054 IEM_MC_ENDIF();
12055 IEM_MC_USED_FPU();
12056 IEM_MC_ADVANCE_RIP();
12057
12058 IEM_MC_END();
12059 return VINF_SUCCESS;
12060}
12061
12062
12063/** Opcode 0xd9 0xe8. */
12064FNIEMOP_DEF(iemOp_fld1)
12065{
12066 IEMOP_MNEMONIC("fld1");
12067 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
12068}
12069
12070
12071/** Opcode 0xd9 0xe9. */
12072FNIEMOP_DEF(iemOp_fldl2t)
12073{
12074 IEMOP_MNEMONIC("fldl2t");
12075 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
12076}
12077
12078
12079/** Opcode 0xd9 0xea. */
12080FNIEMOP_DEF(iemOp_fldl2e)
12081{
12082 IEMOP_MNEMONIC("fldl2e");
12083 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
12084}
12085
12086/** Opcode 0xd9 0xeb. */
12087FNIEMOP_DEF(iemOp_fldpi)
12088{
12089 IEMOP_MNEMONIC("fldpi");
12090 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
12091}
12092
12093
12094/** Opcode 0xd9 0xec. */
12095FNIEMOP_DEF(iemOp_fldlg2)
12096{
12097 IEMOP_MNEMONIC("fldlg2");
12098 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
12099}
12100
12101/** Opcode 0xd9 0xed. */
12102FNIEMOP_DEF(iemOp_fldln2)
12103{
12104 IEMOP_MNEMONIC("fldln2");
12105 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
12106}
12107
12108
12109/** Opcode 0xd9 0xee. */
12110FNIEMOP_DEF(iemOp_fldz)
12111{
12112 IEMOP_MNEMONIC("fldz");
12113 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
12114}
12115
12116
12117/** Opcode 0xd9 0xf0. */
12118FNIEMOP_DEF(iemOp_f2xm1)
12119{
12120 IEMOP_MNEMONIC("f2xm1 st0");
12121 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
12122}
12123
12124
12125/** Opcode 0xd9 0xf1. */
12126FNIEMOP_DEF(iemOp_fylx2)
12127{
12128 IEMOP_MNEMONIC("fylx2 st0");
12129 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
12130}
12131
12132
12133/**
12134 * Common worker for FPU instructions working on ST0 and having two outputs, one
12135 * replacing ST0 and one pushed onto the stack.
12136 *
12137 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12138 */
12139FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
12140{
12141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12142
12143 IEM_MC_BEGIN(2, 1);
12144 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
12145 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
12146 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
12147
12148 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12149 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12150 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12151 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
12152 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
12153 IEM_MC_ELSE()
12154 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
12155 IEM_MC_ENDIF();
12156 IEM_MC_USED_FPU();
12157 IEM_MC_ADVANCE_RIP();
12158
12159 IEM_MC_END();
12160 return VINF_SUCCESS;
12161}
12162
12163
12164/** Opcode 0xd9 0xf2. */
12165FNIEMOP_DEF(iemOp_fptan)
12166{
12167 IEMOP_MNEMONIC("fptan st0");
12168 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
12169}
12170
12171
12172/**
12173 * Common worker for FPU instructions working on STn and ST0, storing the result
12174 * in STn, and popping the stack unless IE, DE or ZE was raised.
12175 *
12176 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12177 */
12178FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12179{
12180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12181
12182 IEM_MC_BEGIN(3, 1);
12183 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12184 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12185 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12186 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12187
12188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12189 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12190
12191 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
12192 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12193 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
12194 IEM_MC_ELSE()
12195 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
12196 IEM_MC_ENDIF();
12197 IEM_MC_USED_FPU();
12198 IEM_MC_ADVANCE_RIP();
12199
12200 IEM_MC_END();
12201 return VINF_SUCCESS;
12202}
12203
12204
12205/** Opcode 0xd9 0xf3. */
12206FNIEMOP_DEF(iemOp_fpatan)
12207{
12208 IEMOP_MNEMONIC("fpatan st1,st0");
12209 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
12210}
12211
12212
12213/** Opcode 0xd9 0xf4. */
12214FNIEMOP_DEF(iemOp_fxtract)
12215{
12216 IEMOP_MNEMONIC("fxtract st0");
12217 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
12218}
12219
12220
12221/** Opcode 0xd9 0xf5. */
12222FNIEMOP_DEF(iemOp_fprem1)
12223{
12224 IEMOP_MNEMONIC("fprem1 st0, st1");
12225 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
12226}
12227
12228
12229/** Opcode 0xd9 0xf6. */
12230FNIEMOP_DEF(iemOp_fdecstp)
12231{
12232 IEMOP_MNEMONIC("fdecstp");
12233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12234 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
12235 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
12236 * FINCSTP and FDECSTP. */
12237
12238 IEM_MC_BEGIN(0,0);
12239
12240 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12241 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12242
12243 IEM_MC_FPU_STACK_DEC_TOP();
12244 IEM_MC_UPDATE_FSW_CONST(0);
12245
12246 IEM_MC_USED_FPU();
12247 IEM_MC_ADVANCE_RIP();
12248 IEM_MC_END();
12249 return VINF_SUCCESS;
12250}
12251
12252
12253/** Opcode 0xd9 0xf7. */
12254FNIEMOP_DEF(iemOp_fincstp)
12255{
12256 IEMOP_MNEMONIC("fincstp");
12257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12258 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
12259 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
12260 * FINCSTP and FDECSTP. */
12261
12262 IEM_MC_BEGIN(0,0);
12263
12264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12265 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12266
12267 IEM_MC_FPU_STACK_INC_TOP();
12268 IEM_MC_UPDATE_FSW_CONST(0);
12269
12270 IEM_MC_USED_FPU();
12271 IEM_MC_ADVANCE_RIP();
12272 IEM_MC_END();
12273 return VINF_SUCCESS;
12274}
12275
12276
12277/** Opcode 0xd9 0xf8. */
12278FNIEMOP_DEF(iemOp_fprem)
12279{
12280 IEMOP_MNEMONIC("fprem st0, st1");
12281 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
12282}
12283
12284
12285/** Opcode 0xd9 0xf9. */
12286FNIEMOP_DEF(iemOp_fyl2xp1)
12287{
12288 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
12289 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
12290}
12291
12292
12293/** Opcode 0xd9 0xfa. */
12294FNIEMOP_DEF(iemOp_fsqrt)
12295{
12296 IEMOP_MNEMONIC("fsqrt st0");
12297 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
12298}
12299
12300
12301/** Opcode 0xd9 0xfb. */
12302FNIEMOP_DEF(iemOp_fsincos)
12303{
12304 IEMOP_MNEMONIC("fsincos st0");
12305 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
12306}
12307
12308
12309/** Opcode 0xd9 0xfc. */
12310FNIEMOP_DEF(iemOp_frndint)
12311{
12312 IEMOP_MNEMONIC("frndint st0");
12313 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
12314}
12315
12316
12317/** Opcode 0xd9 0xfd. */
12318FNIEMOP_DEF(iemOp_fscale)
12319{
12320 IEMOP_MNEMONIC("fscale st0, st1");
12321 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
12322}
12323
12324
12325/** Opcode 0xd9 0xfe. */
12326FNIEMOP_DEF(iemOp_fsin)
12327{
12328 IEMOP_MNEMONIC("fsin st0");
12329 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
12330}
12331
12332
12333/** Opcode 0xd9 0xff. */
12334FNIEMOP_DEF(iemOp_fcos)
12335{
12336 IEMOP_MNEMONIC("fcos st0");
12337 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
12338}
12339
12340
12341/** Used by iemOp_EscF1. */
12342static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
12343{
12344 /* 0xe0 */ iemOp_fchs,
12345 /* 0xe1 */ iemOp_fabs,
12346 /* 0xe2 */ iemOp_Invalid,
12347 /* 0xe3 */ iemOp_Invalid,
12348 /* 0xe4 */ iemOp_ftst,
12349 /* 0xe5 */ iemOp_fxam,
12350 /* 0xe6 */ iemOp_Invalid,
12351 /* 0xe7 */ iemOp_Invalid,
12352 /* 0xe8 */ iemOp_fld1,
12353 /* 0xe9 */ iemOp_fldl2t,
12354 /* 0xea */ iemOp_fldl2e,
12355 /* 0xeb */ iemOp_fldpi,
12356 /* 0xec */ iemOp_fldlg2,
12357 /* 0xed */ iemOp_fldln2,
12358 /* 0xee */ iemOp_fldz,
12359 /* 0xef */ iemOp_Invalid,
12360 /* 0xf0 */ iemOp_f2xm1,
12361 /* 0xf1 */ iemOp_fylx2,
12362 /* 0xf2 */ iemOp_fptan,
12363 /* 0xf3 */ iemOp_fpatan,
12364 /* 0xf4 */ iemOp_fxtract,
12365 /* 0xf5 */ iemOp_fprem1,
12366 /* 0xf6 */ iemOp_fdecstp,
12367 /* 0xf7 */ iemOp_fincstp,
12368 /* 0xf8 */ iemOp_fprem,
12369 /* 0xf9 */ iemOp_fyl2xp1,
12370 /* 0xfa */ iemOp_fsqrt,
12371 /* 0xfb */ iemOp_fsincos,
12372 /* 0xfc */ iemOp_frndint,
12373 /* 0xfd */ iemOp_fscale,
12374 /* 0xfe */ iemOp_fsin,
12375 /* 0xff */ iemOp_fcos
12376};
12377
12378
12379/** Opcode 0xd9. */
12380FNIEMOP_DEF(iemOp_EscF1)
12381{
12382 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12384 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12385 {
12386 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12387 {
12388 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
12389 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
12390 case 2:
12391 if (bRm == 0xc9)
12392 return FNIEMOP_CALL(iemOp_fnop);
12393 return IEMOP_RAISE_INVALID_OPCODE();
12394 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
12395 case 4:
12396 case 5:
12397 case 6:
12398 case 7:
12399 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
12400 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
12401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12402 }
12403 }
12404 else
12405 {
12406 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12407 {
12408 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
12409 case 1: return IEMOP_RAISE_INVALID_OPCODE();
12410 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
12411 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
12412 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
12413 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
12414 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
12415 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
12416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12417 }
12418 }
12419}
12420
12421
12422/** Opcode 0xda 11/0. */
12423FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
12424{
12425 IEMOP_MNEMONIC("fcmovb st0,stN");
12426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12427
12428 IEM_MC_BEGIN(0, 1);
12429 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12430
12431 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12432 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12433
12434 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
12436 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12437 IEM_MC_ENDIF();
12438 IEM_MC_UPDATE_FPU_OPCODE_IP();
12439 IEM_MC_ELSE()
12440 IEM_MC_FPU_STACK_UNDERFLOW(0);
12441 IEM_MC_ENDIF();
12442 IEM_MC_USED_FPU();
12443 IEM_MC_ADVANCE_RIP();
12444
12445 IEM_MC_END();
12446 return VINF_SUCCESS;
12447}
12448
12449
12450/** Opcode 0xda 11/1. */
12451FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
12452{
12453 IEMOP_MNEMONIC("fcmove st0,stN");
12454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12455
12456 IEM_MC_BEGIN(0, 1);
12457 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12458
12459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12461
12462 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12463 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
12464 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12465 IEM_MC_ENDIF();
12466 IEM_MC_UPDATE_FPU_OPCODE_IP();
12467 IEM_MC_ELSE()
12468 IEM_MC_FPU_STACK_UNDERFLOW(0);
12469 IEM_MC_ENDIF();
12470 IEM_MC_USED_FPU();
12471 IEM_MC_ADVANCE_RIP();
12472
12473 IEM_MC_END();
12474 return VINF_SUCCESS;
12475}
12476
12477
12478/** Opcode 0xda 11/2. */
12479FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
12480{
12481 IEMOP_MNEMONIC("fcmovbe st0,stN");
12482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12483
12484 IEM_MC_BEGIN(0, 1);
12485 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12486
12487 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12488 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12489
12490 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12491 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
12492 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12493 IEM_MC_ENDIF();
12494 IEM_MC_UPDATE_FPU_OPCODE_IP();
12495 IEM_MC_ELSE()
12496 IEM_MC_FPU_STACK_UNDERFLOW(0);
12497 IEM_MC_ENDIF();
12498 IEM_MC_USED_FPU();
12499 IEM_MC_ADVANCE_RIP();
12500
12501 IEM_MC_END();
12502 return VINF_SUCCESS;
12503}
12504
12505
12506/** Opcode 0xda 11/3. */
12507FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
12508{
12509 IEMOP_MNEMONIC("fcmovu st0,stN");
12510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12511
12512 IEM_MC_BEGIN(0, 1);
12513 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12514
12515 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12516 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12517
12518 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
12520 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12521 IEM_MC_ENDIF();
12522 IEM_MC_UPDATE_FPU_OPCODE_IP();
12523 IEM_MC_ELSE()
12524 IEM_MC_FPU_STACK_UNDERFLOW(0);
12525 IEM_MC_ENDIF();
12526 IEM_MC_USED_FPU();
12527 IEM_MC_ADVANCE_RIP();
12528
12529 IEM_MC_END();
12530 return VINF_SUCCESS;
12531}
12532
12533
12534/**
12535 * Common worker for FPU instructions working on ST0 and STn, only affecting
12536 * flags, and popping twice when done.
12537 *
12538 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12539 */
12540FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12541{
12542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12543
12544 IEM_MC_BEGIN(3, 1);
12545 IEM_MC_LOCAL(uint16_t, u16Fsw);
12546 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12547 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12548 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12549
12550 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12551 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12552 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
12553 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12554 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
12555 IEM_MC_ELSE()
12556 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
12557 IEM_MC_ENDIF();
12558 IEM_MC_USED_FPU();
12559 IEM_MC_ADVANCE_RIP();
12560
12561 IEM_MC_END();
12562 return VINF_SUCCESS;
12563}
12564
12565
12566/** Opcode 0xda 0xe9. */
12567FNIEMOP_DEF(iemOp_fucompp)
12568{
12569 IEMOP_MNEMONIC("fucompp st0,stN");
12570 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
12571}
12572
12573
12574/**
12575 * Common worker for FPU instructions working on ST0 and an m32i, and storing
12576 * the result in ST0.
12577 *
12578 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12579 */
12580FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
12581{
12582 IEM_MC_BEGIN(3, 3);
12583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12584 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12585 IEM_MC_LOCAL(int32_t, i32Val2);
12586 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12587 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12588 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12589
12590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12592
12593 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12594 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12595 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12596
12597 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12598 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
12599 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12600 IEM_MC_ELSE()
12601 IEM_MC_FPU_STACK_UNDERFLOW(0);
12602 IEM_MC_ENDIF();
12603 IEM_MC_USED_FPU();
12604 IEM_MC_ADVANCE_RIP();
12605
12606 IEM_MC_END();
12607 return VINF_SUCCESS;
12608}
12609
12610
12611/** Opcode 0xda !11/0. */
12612FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
12613{
12614 IEMOP_MNEMONIC("fiadd m32i");
12615 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
12616}
12617
12618
12619/** Opcode 0xda !11/1. */
12620FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
12621{
12622 IEMOP_MNEMONIC("fimul m32i");
12623 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
12624}
12625
12626
12627/** Opcode 0xda !11/2. */
12628FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
12629{
12630 IEMOP_MNEMONIC("ficom st0,m32i");
12631
12632 IEM_MC_BEGIN(3, 3);
12633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12634 IEM_MC_LOCAL(uint16_t, u16Fsw);
12635 IEM_MC_LOCAL(int32_t, i32Val2);
12636 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12637 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12638 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12639
12640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12642
12643 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12644 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12645 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12646
12647 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12648 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
12649 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12650 IEM_MC_ELSE()
12651 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12652 IEM_MC_ENDIF();
12653 IEM_MC_USED_FPU();
12654 IEM_MC_ADVANCE_RIP();
12655
12656 IEM_MC_END();
12657 return VINF_SUCCESS;
12658}
12659
12660
12661/** Opcode 0xda !11/3. */
12662FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
12663{
12664 IEMOP_MNEMONIC("ficomp st0,m32i");
12665
12666 IEM_MC_BEGIN(3, 3);
12667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12668 IEM_MC_LOCAL(uint16_t, u16Fsw);
12669 IEM_MC_LOCAL(int32_t, i32Val2);
12670 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12671 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12672 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12673
12674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12676
12677 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12678 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12679 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12680
12681 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12682 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
12683 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12684 IEM_MC_ELSE()
12685 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12686 IEM_MC_ENDIF();
12687 IEM_MC_USED_FPU();
12688 IEM_MC_ADVANCE_RIP();
12689
12690 IEM_MC_END();
12691 return VINF_SUCCESS;
12692}
12693
12694
12695/** Opcode 0xda !11/4. */
12696FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
12697{
12698 IEMOP_MNEMONIC("fisub m32i");
12699 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
12700}
12701
12702
12703/** Opcode 0xda !11/5. */
12704FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
12705{
12706 IEMOP_MNEMONIC("fisubr m32i");
12707 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
12708}
12709
12710
12711/** Opcode 0xda !11/6. */
12712FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
12713{
12714 IEMOP_MNEMONIC("fidiv m32i");
12715 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
12716}
12717
12718
12719/** Opcode 0xda !11/7. */
12720FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
12721{
12722 IEMOP_MNEMONIC("fidivr m32i");
12723 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
12724}
12725
12726
12727/** Opcode 0xda. */
12728FNIEMOP_DEF(iemOp_EscF2)
12729{
12730 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12733 {
12734 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12735 {
12736 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
12737 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
12738 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
12739 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
12740 case 4: return IEMOP_RAISE_INVALID_OPCODE();
12741 case 5:
12742 if (bRm == 0xe9)
12743 return FNIEMOP_CALL(iemOp_fucompp);
12744 return IEMOP_RAISE_INVALID_OPCODE();
12745 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12746 case 7: return IEMOP_RAISE_INVALID_OPCODE();
12747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12748 }
12749 }
12750 else
12751 {
12752 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12753 {
12754 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
12755 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
12756 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
12757 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
12758 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
12759 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
12760 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
12761 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
12762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12763 }
12764 }
12765}
12766
12767
12768/** Opcode 0xdb !11/0. */
12769FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
12770{
12771 IEMOP_MNEMONIC("fild m32i");
12772
12773 IEM_MC_BEGIN(2, 3);
12774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12775 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12776 IEM_MC_LOCAL(int32_t, i32Val);
12777 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12778 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
12779
12780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12782
12783 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12784 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12785 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12786
12787 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12788 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
12789 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12790 IEM_MC_ELSE()
12791 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12792 IEM_MC_ENDIF();
12793 IEM_MC_USED_FPU();
12794 IEM_MC_ADVANCE_RIP();
12795
12796 IEM_MC_END();
12797 return VINF_SUCCESS;
12798}
12799
12800
12801/** Opcode 0xdb !11/1. */
12802FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
12803{
12804 IEMOP_MNEMONIC("fisttp m32i");
12805 IEM_MC_BEGIN(3, 2);
12806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12807 IEM_MC_LOCAL(uint16_t, u16Fsw);
12808 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12809 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12810 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12811
12812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12815 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12816
12817 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12818 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12819 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12820 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12821 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12822 IEM_MC_ELSE()
12823 IEM_MC_IF_FCW_IM()
12824 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12825 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12826 IEM_MC_ENDIF();
12827 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12828 IEM_MC_ENDIF();
12829 IEM_MC_USED_FPU();
12830 IEM_MC_ADVANCE_RIP();
12831
12832 IEM_MC_END();
12833 return VINF_SUCCESS;
12834}
12835
12836
12837/** Opcode 0xdb !11/2. */
12838FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
12839{
12840 IEMOP_MNEMONIC("fist m32i");
12841 IEM_MC_BEGIN(3, 2);
12842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12843 IEM_MC_LOCAL(uint16_t, u16Fsw);
12844 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12845 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12846 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12847
12848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12852
12853 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12854 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12855 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12856 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12857 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12858 IEM_MC_ELSE()
12859 IEM_MC_IF_FCW_IM()
12860 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12861 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12862 IEM_MC_ENDIF();
12863 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12864 IEM_MC_ENDIF();
12865 IEM_MC_USED_FPU();
12866 IEM_MC_ADVANCE_RIP();
12867
12868 IEM_MC_END();
12869 return VINF_SUCCESS;
12870}
12871
12872
12873/** Opcode 0xdb !11/3. */
12874FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
12875{
12876 IEMOP_MNEMONIC("fisttp m32i");
12877 IEM_MC_BEGIN(3, 2);
12878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12879 IEM_MC_LOCAL(uint16_t, u16Fsw);
12880 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12881 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12882 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12883
12884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12886 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12887 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12888
12889 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12890 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12891 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12892 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12893 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12894 IEM_MC_ELSE()
12895 IEM_MC_IF_FCW_IM()
12896 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12897 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12898 IEM_MC_ENDIF();
12899 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12900 IEM_MC_ENDIF();
12901 IEM_MC_USED_FPU();
12902 IEM_MC_ADVANCE_RIP();
12903
12904 IEM_MC_END();
12905 return VINF_SUCCESS;
12906}
12907
12908
12909/** Opcode 0xdb !11/5. */
12910FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
12911{
12912 IEMOP_MNEMONIC("fld m80r");
12913
12914 IEM_MC_BEGIN(2, 3);
12915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12916 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12917 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
12918 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12919 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
12920
12921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12923
12924 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12925 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12926 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12927
12928 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12929 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
12930 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12931 IEM_MC_ELSE()
12932 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12933 IEM_MC_ENDIF();
12934 IEM_MC_USED_FPU();
12935 IEM_MC_ADVANCE_RIP();
12936
12937 IEM_MC_END();
12938 return VINF_SUCCESS;
12939}
12940
12941
12942/** Opcode 0xdb !11/7. */
12943FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
12944{
12945 IEMOP_MNEMONIC("fstp m80r");
12946 IEM_MC_BEGIN(3, 2);
12947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12948 IEM_MC_LOCAL(uint16_t, u16Fsw);
12949 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12950 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
12951 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12952
12953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12957
12958 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12959 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12960 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
12961 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
12962 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12963 IEM_MC_ELSE()
12964 IEM_MC_IF_FCW_IM()
12965 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12966 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
12967 IEM_MC_ENDIF();
12968 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12969 IEM_MC_ENDIF();
12970 IEM_MC_USED_FPU();
12971 IEM_MC_ADVANCE_RIP();
12972
12973 IEM_MC_END();
12974 return VINF_SUCCESS;
12975}
12976
12977
12978/** Opcode 0xdb 11/0. */
12979FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12980{
12981 IEMOP_MNEMONIC("fcmovnb st0,stN");
12982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12983
12984 IEM_MC_BEGIN(0, 1);
12985 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12986
12987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12988 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12989
12990 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12991 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
12992 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12993 IEM_MC_ENDIF();
12994 IEM_MC_UPDATE_FPU_OPCODE_IP();
12995 IEM_MC_ELSE()
12996 IEM_MC_FPU_STACK_UNDERFLOW(0);
12997 IEM_MC_ENDIF();
12998 IEM_MC_USED_FPU();
12999 IEM_MC_ADVANCE_RIP();
13000
13001 IEM_MC_END();
13002 return VINF_SUCCESS;
13003}
13004
13005
13006/** Opcode 0xdb 11/1. */
13007FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
13008{
13009 IEMOP_MNEMONIC("fcmovne st0,stN");
13010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13011
13012 IEM_MC_BEGIN(0, 1);
13013 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13014
13015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13017
13018 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13019 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
13020 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13021 IEM_MC_ENDIF();
13022 IEM_MC_UPDATE_FPU_OPCODE_IP();
13023 IEM_MC_ELSE()
13024 IEM_MC_FPU_STACK_UNDERFLOW(0);
13025 IEM_MC_ENDIF();
13026 IEM_MC_USED_FPU();
13027 IEM_MC_ADVANCE_RIP();
13028
13029 IEM_MC_END();
13030 return VINF_SUCCESS;
13031}
13032
13033
13034/** Opcode 0xdb 11/2. */
13035FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
13036{
13037 IEMOP_MNEMONIC("fcmovnbe st0,stN");
13038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13039
13040 IEM_MC_BEGIN(0, 1);
13041 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13042
13043 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13044 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13045
13046 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13047 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
13048 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13049 IEM_MC_ENDIF();
13050 IEM_MC_UPDATE_FPU_OPCODE_IP();
13051 IEM_MC_ELSE()
13052 IEM_MC_FPU_STACK_UNDERFLOW(0);
13053 IEM_MC_ENDIF();
13054 IEM_MC_USED_FPU();
13055 IEM_MC_ADVANCE_RIP();
13056
13057 IEM_MC_END();
13058 return VINF_SUCCESS;
13059}
13060
13061
13062/** Opcode 0xdb 11/3. */
13063FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
13064{
13065 IEMOP_MNEMONIC("fcmovnnu st0,stN");
13066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13067
13068 IEM_MC_BEGIN(0, 1);
13069 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13070
13071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13073
13074 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13075 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
13076 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13077 IEM_MC_ENDIF();
13078 IEM_MC_UPDATE_FPU_OPCODE_IP();
13079 IEM_MC_ELSE()
13080 IEM_MC_FPU_STACK_UNDERFLOW(0);
13081 IEM_MC_ENDIF();
13082 IEM_MC_USED_FPU();
13083 IEM_MC_ADVANCE_RIP();
13084
13085 IEM_MC_END();
13086 return VINF_SUCCESS;
13087}
13088
13089
13090/** Opcode 0xdb 0xe0. */
13091FNIEMOP_DEF(iemOp_fneni)
13092{
13093 IEMOP_MNEMONIC("fneni (8087/ign)");
13094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13095 IEM_MC_BEGIN(0,0);
13096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13097 IEM_MC_ADVANCE_RIP();
13098 IEM_MC_END();
13099 return VINF_SUCCESS;
13100}
13101
13102
13103/** Opcode 0xdb 0xe1. */
13104FNIEMOP_DEF(iemOp_fndisi)
13105{
13106 IEMOP_MNEMONIC("fndisi (8087/ign)");
13107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13108 IEM_MC_BEGIN(0,0);
13109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13110 IEM_MC_ADVANCE_RIP();
13111 IEM_MC_END();
13112 return VINF_SUCCESS;
13113}
13114
13115
13116/** Opcode 0xdb 0xe2. */
13117FNIEMOP_DEF(iemOp_fnclex)
13118{
13119 IEMOP_MNEMONIC("fnclex");
13120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13121
13122 IEM_MC_BEGIN(0,0);
13123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13124 IEM_MC_CLEAR_FSW_EX();
13125 IEM_MC_ADVANCE_RIP();
13126 IEM_MC_END();
13127 return VINF_SUCCESS;
13128}
13129
13130
13131/** Opcode 0xdb 0xe3. */
13132FNIEMOP_DEF(iemOp_fninit)
13133{
13134 IEMOP_MNEMONIC("fninit");
13135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13136 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
13137}
13138
13139
13140/** Opcode 0xdb 0xe4. */
13141FNIEMOP_DEF(iemOp_fnsetpm)
13142{
13143 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
13144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13145 IEM_MC_BEGIN(0,0);
13146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13147 IEM_MC_ADVANCE_RIP();
13148 IEM_MC_END();
13149 return VINF_SUCCESS;
13150}
13151
13152
13153/** Opcode 0xdb 0xe5. */
13154FNIEMOP_DEF(iemOp_frstpm)
13155{
13156 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
13157#if 0 /* #UDs on newer CPUs */
13158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13159 IEM_MC_BEGIN(0,0);
13160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13161 IEM_MC_ADVANCE_RIP();
13162 IEM_MC_END();
13163 return VINF_SUCCESS;
13164#else
13165 return IEMOP_RAISE_INVALID_OPCODE();
13166#endif
13167}
13168
13169
13170/** Opcode 0xdb 11/5. */
13171FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
13172{
13173 IEMOP_MNEMONIC("fucomi st0,stN");
13174 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
13175}
13176
13177
13178/** Opcode 0xdb 11/6. */
13179FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
13180{
13181 IEMOP_MNEMONIC("fcomi st0,stN");
13182 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
13183}
13184
13185
13186/** Opcode 0xdb. */
13187FNIEMOP_DEF(iemOp_EscF3)
13188{
13189 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13191 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13192 {
13193 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13194 {
13195 case 0: FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
13196 case 1: FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
13197 case 2: FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
13198 case 3: FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
13199 case 4:
13200 switch (bRm)
13201 {
13202 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
13203 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
13204 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
13205 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
13206 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
13207 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
13208 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
13209 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
13210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13211 }
13212 break;
13213 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
13214 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
13215 case 7: return IEMOP_RAISE_INVALID_OPCODE();
13216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13217 }
13218 }
13219 else
13220 {
13221 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13222 {
13223 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
13224 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
13225 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
13226 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
13227 case 4: return IEMOP_RAISE_INVALID_OPCODE();
13228 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
13229 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13230 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
13231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13232 }
13233 }
13234}
13235
13236
13237/**
13238 * Common worker for FPU instructions working on STn and ST0, and storing the
13239 * result in STn unless IE, DE or ZE was raised.
13240 *
13241 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13242 */
13243FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13244{
13245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13246
13247 IEM_MC_BEGIN(3, 1);
13248 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13249 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13250 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13251 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13252
13253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13255
13256 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13257 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13258 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
13259 IEM_MC_ELSE()
13260 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
13261 IEM_MC_ENDIF();
13262 IEM_MC_USED_FPU();
13263 IEM_MC_ADVANCE_RIP();
13264
13265 IEM_MC_END();
13266 return VINF_SUCCESS;
13267}
13268
13269
13270/** Opcode 0xdc 11/0. */
13271FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
13272{
13273 IEMOP_MNEMONIC("fadd stN,st0");
13274 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
13275}
13276
13277
13278/** Opcode 0xdc 11/1. */
13279FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
13280{
13281 IEMOP_MNEMONIC("fmul stN,st0");
13282 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
13283}
13284
13285
13286/** Opcode 0xdc 11/4. */
13287FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
13288{
13289 IEMOP_MNEMONIC("fsubr stN,st0");
13290 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
13291}
13292
13293
13294/** Opcode 0xdc 11/5. */
13295FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
13296{
13297 IEMOP_MNEMONIC("fsub stN,st0");
13298 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
13299}
13300
13301
13302/** Opcode 0xdc 11/6. */
13303FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
13304{
13305 IEMOP_MNEMONIC("fdivr stN,st0");
13306 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
13307}
13308
13309
13310/** Opcode 0xdc 11/7. */
13311FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
13312{
13313 IEMOP_MNEMONIC("fdiv stN,st0");
13314 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
13315}
13316
13317
13318/**
13319 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
13320 * memory operand, and storing the result in ST0.
13321 *
13322 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13323 */
13324FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
13325{
13326 IEM_MC_BEGIN(3, 3);
13327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13328 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13329 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
13330 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13331 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
13332 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
13333
13334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13338
13339 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
13340 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
13341 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
13342 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
13343 IEM_MC_ELSE()
13344 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
13345 IEM_MC_ENDIF();
13346 IEM_MC_USED_FPU();
13347 IEM_MC_ADVANCE_RIP();
13348
13349 IEM_MC_END();
13350 return VINF_SUCCESS;
13351}
13352
13353
13354/** Opcode 0xdc !11/0. */
13355FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
13356{
13357 IEMOP_MNEMONIC("fadd m64r");
13358 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
13359}
13360
13361
13362/** Opcode 0xdc !11/1. */
13363FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
13364{
13365 IEMOP_MNEMONIC("fmul m64r");
13366 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
13367}
13368
13369
13370/** Opcode 0xdc !11/2. */
13371FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
13372{
13373 IEMOP_MNEMONIC("fcom st0,m64r");
13374
13375 IEM_MC_BEGIN(3, 3);
13376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13377 IEM_MC_LOCAL(uint16_t, u16Fsw);
13378 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
13379 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13380 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13381 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
13382
13383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13385
13386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13388 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13389
13390 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13391 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
13392 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13393 IEM_MC_ELSE()
13394 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13395 IEM_MC_ENDIF();
13396 IEM_MC_USED_FPU();
13397 IEM_MC_ADVANCE_RIP();
13398
13399 IEM_MC_END();
13400 return VINF_SUCCESS;
13401}
13402
13403
13404/** Opcode 0xdc !11/3. */
13405FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
13406{
13407 IEMOP_MNEMONIC("fcomp st0,m64r");
13408
13409 IEM_MC_BEGIN(3, 3);
13410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13411 IEM_MC_LOCAL(uint16_t, u16Fsw);
13412 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
13413 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13414 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13415 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
13416
13417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13419
13420 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13421 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13422 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13423
13424 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13425 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
13426 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13427 IEM_MC_ELSE()
13428 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13429 IEM_MC_ENDIF();
13430 IEM_MC_USED_FPU();
13431 IEM_MC_ADVANCE_RIP();
13432
13433 IEM_MC_END();
13434 return VINF_SUCCESS;
13435}
13436
13437
13438/** Opcode 0xdc !11/4. */
13439FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
13440{
13441 IEMOP_MNEMONIC("fsub m64r");
13442 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
13443}
13444
13445
13446/** Opcode 0xdc !11/5. */
13447FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
13448{
13449 IEMOP_MNEMONIC("fsubr m64r");
13450 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
13451}
13452
13453
13454/** Opcode 0xdc !11/6. */
13455FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
13456{
13457 IEMOP_MNEMONIC("fdiv m64r");
13458 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
13459}
13460
13461
13462/** Opcode 0xdc !11/7. */
13463FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
13464{
13465 IEMOP_MNEMONIC("fdivr m64r");
13466 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
13467}
13468
13469
13470/** Opcode 0xdc. */
13471FNIEMOP_DEF(iemOp_EscF4)
13472{
13473 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13475 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13476 {
13477 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13478 {
13479 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
13480 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
13481 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
13482 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
13483 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
13484 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
13485 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
13486 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
13487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13488 }
13489 }
13490 else
13491 {
13492 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13493 {
13494 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
13495 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
13496 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
13497 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
13498 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
13499 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
13500 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
13501 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
13502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13503 }
13504 }
13505}
13506
13507
13508/** Opcode 0xdd !11/0.
13509 * @sa iemOp_fld_m32r */
13510FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
13511{
13512 IEMOP_MNEMONIC("fld m64r");
13513
13514 IEM_MC_BEGIN(2, 3);
13515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13516 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13517 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
13518 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13519 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
13520
13521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13524 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13525
13526 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13527 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13528 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
13529 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13530 IEM_MC_ELSE()
13531 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13532 IEM_MC_ENDIF();
13533 IEM_MC_USED_FPU();
13534 IEM_MC_ADVANCE_RIP();
13535
13536 IEM_MC_END();
13537 return VINF_SUCCESS;
13538}
13539
13540
13541/** Opcode 0xdd !11/0. */
13542FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
13543{
13544 IEMOP_MNEMONIC("fisttp m64i");
13545 IEM_MC_BEGIN(3, 2);
13546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13547 IEM_MC_LOCAL(uint16_t, u16Fsw);
13548 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13549 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13550 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13551
13552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13554 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13555 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13556
13557 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13558 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13559 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13560 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13561 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13562 IEM_MC_ELSE()
13563 IEM_MC_IF_FCW_IM()
13564 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13565 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
13566 IEM_MC_ENDIF();
13567 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13568 IEM_MC_ENDIF();
13569 IEM_MC_USED_FPU();
13570 IEM_MC_ADVANCE_RIP();
13571
13572 IEM_MC_END();
13573 return VINF_SUCCESS;
13574}
13575
13576
13577/** Opcode 0xdd !11/0. */
13578FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
13579{
13580 IEMOP_MNEMONIC("fst m64r");
13581 IEM_MC_BEGIN(3, 2);
13582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13583 IEM_MC_LOCAL(uint16_t, u16Fsw);
13584 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13585 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
13586 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13587
13588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13591 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13592
13593 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13594 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13595 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
13596 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13597 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13598 IEM_MC_ELSE()
13599 IEM_MC_IF_FCW_IM()
13600 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
13601 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
13602 IEM_MC_ENDIF();
13603 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13604 IEM_MC_ENDIF();
13605 IEM_MC_USED_FPU();
13606 IEM_MC_ADVANCE_RIP();
13607
13608 IEM_MC_END();
13609 return VINF_SUCCESS;
13610}
13611
13612
13613
13614
13615/** Opcode 0xdd !11/0. */
13616FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
13617{
13618 IEMOP_MNEMONIC("fstp m64r");
13619 IEM_MC_BEGIN(3, 2);
13620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13621 IEM_MC_LOCAL(uint16_t, u16Fsw);
13622 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13623 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
13624 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13625
13626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13629 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13630
13631 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13632 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13633 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
13634 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13635 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13636 IEM_MC_ELSE()
13637 IEM_MC_IF_FCW_IM()
13638 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
13639 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
13640 IEM_MC_ENDIF();
13641 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13642 IEM_MC_ENDIF();
13643 IEM_MC_USED_FPU();
13644 IEM_MC_ADVANCE_RIP();
13645
13646 IEM_MC_END();
13647 return VINF_SUCCESS;
13648}
13649
13650
13651/** Opcode 0xdd !11/0. */
13652FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
13653{
13654 IEMOP_MNEMONIC("fxrstor m94/108byte");
13655 IEM_MC_BEGIN(3, 0);
13656 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13657 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
13658 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13661 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13662 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13663 IEM_MC_END();
13664 return VINF_SUCCESS;
13665}
13666
13667
13668/** Opcode 0xdd !11/0. */
13669FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
13670{
13671 IEMOP_MNEMONIC("fnsave m94/108byte");
13672 IEM_MC_BEGIN(3, 0);
13673 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13674 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
13675 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13679 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
13680 IEM_MC_END();
13681 return VINF_SUCCESS;
13682
13683}
13684
13685/** Opcode 0xdd !11/0. */
13686FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
13687{
13688 IEMOP_MNEMONIC("fnstsw m16");
13689
13690 IEM_MC_BEGIN(0, 2);
13691 IEM_MC_LOCAL(uint16_t, u16Tmp);
13692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13693
13694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13696
13697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13698 IEM_MC_FETCH_FSW(u16Tmp);
13699 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
13700 IEM_MC_ADVANCE_RIP();
13701
13702/** @todo Debug / drop a hint to the verifier that things may differ
13703 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
13704 * NT4SP1. (X86_FSW_PE) */
13705 IEM_MC_END();
13706 return VINF_SUCCESS;
13707}
13708
13709
13710/** Opcode 0xdd 11/0. */
13711FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
13712{
13713 IEMOP_MNEMONIC("ffree stN");
13714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13715 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
13716 unmodified. */
13717
13718 IEM_MC_BEGIN(0, 0);
13719
13720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13722
13723 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
13724 IEM_MC_UPDATE_FPU_OPCODE_IP();
13725
13726 IEM_MC_USED_FPU();
13727 IEM_MC_ADVANCE_RIP();
13728 IEM_MC_END();
13729 return VINF_SUCCESS;
13730}
13731
13732
13733/** Opcode 0xdd 11/1. */
13734FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
13735{
13736 IEMOP_MNEMONIC("fst st0,stN");
13737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13738
13739 IEM_MC_BEGIN(0, 2);
13740 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13741 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13743 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13744 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13745 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13746 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
13747 IEM_MC_ELSE()
13748 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
13749 IEM_MC_ENDIF();
13750 IEM_MC_USED_FPU();
13751 IEM_MC_ADVANCE_RIP();
13752 IEM_MC_END();
13753 return VINF_SUCCESS;
13754}
13755
13756
13757/** Opcode 0xdd 11/3. */
13758FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
13759{
13760 IEMOP_MNEMONIC("fcom st0,stN");
13761 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
13762}
13763
13764
13765/** Opcode 0xdd 11/4. */
13766FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
13767{
13768 IEMOP_MNEMONIC("fcomp st0,stN");
13769 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
13770}
13771
13772
13773/** Opcode 0xdd. */
13774FNIEMOP_DEF(iemOp_EscF5)
13775{
13776 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13779 {
13780 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13781 {
13782 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
13783 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
13784 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
13785 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
13786 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
13787 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
13788 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13789 case 7: return IEMOP_RAISE_INVALID_OPCODE();
13790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13791 }
13792 }
13793 else
13794 {
13795 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13796 {
13797 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
13798 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
13799 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
13800 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
13801 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
13802 case 5: return IEMOP_RAISE_INVALID_OPCODE();
13803 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
13804 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
13805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13806 }
13807 }
13808}
13809
13810
13811/** Opcode 0xde 11/0. */
13812FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
13813{
13814 IEMOP_MNEMONIC("faddp stN,st0");
13815 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
13816}
13817
13818
13819/** Opcode 0xde 11/0. */
13820FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
13821{
13822 IEMOP_MNEMONIC("fmulp stN,st0");
13823 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
13824}
13825
13826
13827/** Opcode 0xde 0xd9. */
13828FNIEMOP_DEF(iemOp_fcompp)
13829{
13830 IEMOP_MNEMONIC("fucompp st0,stN");
13831 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
13832}
13833
13834
13835/** Opcode 0xde 11/4. */
13836FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
13837{
13838 IEMOP_MNEMONIC("fsubrp stN,st0");
13839 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
13840}
13841
13842
13843/** Opcode 0xde 11/5. */
13844FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
13845{
13846 IEMOP_MNEMONIC("fsubp stN,st0");
13847 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
13848}
13849
13850
13851/** Opcode 0xde 11/6. */
13852FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
13853{
13854 IEMOP_MNEMONIC("fdivrp stN,st0");
13855 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
13856}
13857
13858
13859/** Opcode 0xde 11/7. */
13860FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
13861{
13862 IEMOP_MNEMONIC("fdivp stN,st0");
13863 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
13864}
13865
13866
13867/**
13868 * Common worker for FPU instructions working on ST0 and an m16i, and storing
13869 * the result in ST0.
13870 *
13871 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13872 */
13873FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
13874{
13875 IEM_MC_BEGIN(3, 3);
13876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13877 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13878 IEM_MC_LOCAL(int16_t, i16Val2);
13879 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13880 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13881 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13882
13883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13885
13886 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13887 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13888 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13889
13890 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13891 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
13892 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13893 IEM_MC_ELSE()
13894 IEM_MC_FPU_STACK_UNDERFLOW(0);
13895 IEM_MC_ENDIF();
13896 IEM_MC_USED_FPU();
13897 IEM_MC_ADVANCE_RIP();
13898
13899 IEM_MC_END();
13900 return VINF_SUCCESS;
13901}
13902
13903
13904/** Opcode 0xde !11/0. */
13905FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
13906{
13907 IEMOP_MNEMONIC("fiadd m16i");
13908 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
13909}
13910
13911
13912/** Opcode 0xde !11/1. */
13913FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
13914{
13915 IEMOP_MNEMONIC("fimul m16i");
13916 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
13917}
13918
13919
13920/** Opcode 0xde !11/2. */
13921FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
13922{
13923 IEMOP_MNEMONIC("ficom st0,m16i");
13924
13925 IEM_MC_BEGIN(3, 3);
13926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13927 IEM_MC_LOCAL(uint16_t, u16Fsw);
13928 IEM_MC_LOCAL(int16_t, i16Val2);
13929 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13930 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13931 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13932
13933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13935
13936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13938 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13939
13940 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13941 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13942 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13943 IEM_MC_ELSE()
13944 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13945 IEM_MC_ENDIF();
13946 IEM_MC_USED_FPU();
13947 IEM_MC_ADVANCE_RIP();
13948
13949 IEM_MC_END();
13950 return VINF_SUCCESS;
13951}
13952
13953
13954/** Opcode 0xde !11/3. */
13955FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
13956{
13957 IEMOP_MNEMONIC("ficomp st0,m16i");
13958
13959 IEM_MC_BEGIN(3, 3);
13960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13961 IEM_MC_LOCAL(uint16_t, u16Fsw);
13962 IEM_MC_LOCAL(int16_t, i16Val2);
13963 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13964 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13965 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13966
13967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13969
13970 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13971 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13972 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13973
13974 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13975 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13976 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13977 IEM_MC_ELSE()
13978 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13979 IEM_MC_ENDIF();
13980 IEM_MC_USED_FPU();
13981 IEM_MC_ADVANCE_RIP();
13982
13983 IEM_MC_END();
13984 return VINF_SUCCESS;
13985}
13986
13987
13988/** Opcode 0xde !11/4. */
13989FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
13990{
13991 IEMOP_MNEMONIC("fisub m16i");
13992 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
13993}
13994
13995
13996/** Opcode 0xde !11/5. */
13997FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
13998{
13999 IEMOP_MNEMONIC("fisubr m16i");
14000 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
14001}
14002
14003
14004/** Opcode 0xde !11/6. */
14005FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
14006{
14007 IEMOP_MNEMONIC("fiadd m16i");
14008 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
14009}
14010
14011
14012/** Opcode 0xde !11/7. */
14013FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
14014{
14015 IEMOP_MNEMONIC("fiadd m16i");
14016 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
14017}
14018
14019
14020/** Opcode 0xde. */
14021FNIEMOP_DEF(iemOp_EscF6)
14022{
14023 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14025 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14026 {
14027 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14028 {
14029 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
14030 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
14031 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14032 case 3: if (bRm == 0xd9)
14033 return FNIEMOP_CALL(iemOp_fcompp);
14034 return IEMOP_RAISE_INVALID_OPCODE();
14035 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
14036 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
14037 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
14038 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
14039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14040 }
14041 }
14042 else
14043 {
14044 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14045 {
14046 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
14047 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
14048 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
14049 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
14050 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
14051 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
14052 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
14053 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
14054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14055 }
14056 }
14057}
14058
14059
14060/** Opcode 0xdf 11/0.
14061 * Undocument instruction, assumed to work like ffree + fincstp. */
14062FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
14063{
14064 IEMOP_MNEMONIC("ffreep stN");
14065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14066
14067 IEM_MC_BEGIN(0, 0);
14068
14069 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14070 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14071
14072 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
14073 IEM_MC_FPU_STACK_INC_TOP();
14074 IEM_MC_UPDATE_FPU_OPCODE_IP();
14075
14076 IEM_MC_USED_FPU();
14077 IEM_MC_ADVANCE_RIP();
14078 IEM_MC_END();
14079 return VINF_SUCCESS;
14080}
14081
14082
14083/** Opcode 0xdf 0xe0. */
14084FNIEMOP_DEF(iemOp_fnstsw_ax)
14085{
14086 IEMOP_MNEMONIC("fnstsw ax");
14087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14088
14089 IEM_MC_BEGIN(0, 1);
14090 IEM_MC_LOCAL(uint16_t, u16Tmp);
14091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14092 IEM_MC_FETCH_FSW(u16Tmp);
14093 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
14094 IEM_MC_ADVANCE_RIP();
14095 IEM_MC_END();
14096 return VINF_SUCCESS;
14097}
14098
14099
14100/** Opcode 0xdf 11/5. */
14101FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
14102{
14103 IEMOP_MNEMONIC("fcomip st0,stN");
14104 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
14105}
14106
14107
14108/** Opcode 0xdf 11/6. */
14109FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
14110{
14111 IEMOP_MNEMONIC("fcomip st0,stN");
14112 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
14113}
14114
14115
14116/** Opcode 0xdf !11/0. */
14117FNIEMOP_STUB_1(iemOp_fild_m16i, uint8_t, bRm);
14118
14119
14120/** Opcode 0xdf !11/1. */
14121FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
14122{
14123 IEMOP_MNEMONIC("fisttp m16i");
14124 IEM_MC_BEGIN(3, 2);
14125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14126 IEM_MC_LOCAL(uint16_t, u16Fsw);
14127 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14128 IEM_MC_ARG(int16_t *, pi16Dst, 1);
14129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14130
14131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14133 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14134 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14135
14136 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14137 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14138 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
14139 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
14140 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14141 IEM_MC_ELSE()
14142 IEM_MC_IF_FCW_IM()
14143 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
14144 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
14145 IEM_MC_ENDIF();
14146 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14147 IEM_MC_ENDIF();
14148 IEM_MC_USED_FPU();
14149 IEM_MC_ADVANCE_RIP();
14150
14151 IEM_MC_END();
14152 return VINF_SUCCESS;
14153}
14154
14155
14156/** Opcode 0xdf !11/2. */
14157FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
14158{
14159 IEMOP_MNEMONIC("fistp m16i");
14160 IEM_MC_BEGIN(3, 2);
14161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14162 IEM_MC_LOCAL(uint16_t, u16Fsw);
14163 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14164 IEM_MC_ARG(int16_t *, pi16Dst, 1);
14165 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14166
14167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14171
14172 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14173 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14174 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
14175 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
14176 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14177 IEM_MC_ELSE()
14178 IEM_MC_IF_FCW_IM()
14179 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
14180 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
14181 IEM_MC_ENDIF();
14182 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14183 IEM_MC_ENDIF();
14184 IEM_MC_USED_FPU();
14185 IEM_MC_ADVANCE_RIP();
14186
14187 IEM_MC_END();
14188 return VINF_SUCCESS;
14189}
14190
14191
14192/** Opcode 0xdf !11/3. */
14193FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
14194{
14195 IEMOP_MNEMONIC("fistp m16i");
14196 IEM_MC_BEGIN(3, 2);
14197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14198 IEM_MC_LOCAL(uint16_t, u16Fsw);
14199 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14200 IEM_MC_ARG(int16_t *, pi16Dst, 1);
14201 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14202
14203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14205 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14206 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14207
14208 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14209 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14210 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
14211 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
14212 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14213 IEM_MC_ELSE()
14214 IEM_MC_IF_FCW_IM()
14215 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
14216 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
14217 IEM_MC_ENDIF();
14218 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14219 IEM_MC_ENDIF();
14220 IEM_MC_USED_FPU();
14221 IEM_MC_ADVANCE_RIP();
14222
14223 IEM_MC_END();
14224 return VINF_SUCCESS;
14225}
14226
14227
14228/** Opcode 0xdf !11/4. */
14229FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
14230
14231/** Opcode 0xdf !11/5. */
14232FNIEMOP_STUB_1(iemOp_fild_m64i, uint8_t, bRm);
14233
14234/** Opcode 0xdf !11/6. */
14235FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
14236
14237
14238/** Opcode 0xdf !11/7. */
14239FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
14240{
14241 IEMOP_MNEMONIC("fistp m64i");
14242 IEM_MC_BEGIN(3, 2);
14243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14244 IEM_MC_LOCAL(uint16_t, u16Fsw);
14245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14246 IEM_MC_ARG(int64_t *, pi64Dst, 1);
14247 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14248
14249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14253
14254 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14255 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14256 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
14257 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14258 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14259 IEM_MC_ELSE()
14260 IEM_MC_IF_FCW_IM()
14261 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
14262 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
14263 IEM_MC_ENDIF();
14264 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14265 IEM_MC_ENDIF();
14266 IEM_MC_USED_FPU();
14267 IEM_MC_ADVANCE_RIP();
14268
14269 IEM_MC_END();
14270 return VINF_SUCCESS;
14271}
14272
14273
14274/** Opcode 0xdf. */
14275FNIEMOP_DEF(iemOp_EscF7)
14276{
14277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14279 {
14280 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14281 {
14282 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
14283 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
14284 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
14285 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
14286 case 4: if (bRm == 0xe0)
14287 return FNIEMOP_CALL(iemOp_fnstsw_ax);
14288 return IEMOP_RAISE_INVALID_OPCODE();
14289 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
14290 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
14291 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14293 }
14294 }
14295 else
14296 {
14297 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14298 {
14299 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
14300 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
14301 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
14302 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
14303 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
14304 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
14305 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
14306 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
14307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14308 }
14309 }
14310}
14311
14312
14313/** Opcode 0xe0. */
14314FNIEMOP_DEF(iemOp_loopne_Jb)
14315{
14316 IEMOP_MNEMONIC("loopne Jb");
14317 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14318 IEMOP_HLP_NO_LOCK_PREFIX();
14319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14320
14321 switch (pIemCpu->enmEffAddrMode)
14322 {
14323 case IEMMODE_16BIT:
14324 IEM_MC_BEGIN(0,0);
14325 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
14326 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
14327 IEM_MC_REL_JMP_S8(i8Imm);
14328 } IEM_MC_ELSE() {
14329 IEM_MC_ADVANCE_RIP();
14330 } IEM_MC_ENDIF();
14331 IEM_MC_END();
14332 return VINF_SUCCESS;
14333
14334 case IEMMODE_32BIT:
14335 IEM_MC_BEGIN(0,0);
14336 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
14337 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
14338 IEM_MC_REL_JMP_S8(i8Imm);
14339 } IEM_MC_ELSE() {
14340 IEM_MC_ADVANCE_RIP();
14341 } IEM_MC_ENDIF();
14342 IEM_MC_END();
14343 return VINF_SUCCESS;
14344
14345 case IEMMODE_64BIT:
14346 IEM_MC_BEGIN(0,0);
14347 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
14348 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
14349 IEM_MC_REL_JMP_S8(i8Imm);
14350 } IEM_MC_ELSE() {
14351 IEM_MC_ADVANCE_RIP();
14352 } IEM_MC_ENDIF();
14353 IEM_MC_END();
14354 return VINF_SUCCESS;
14355
14356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14357 }
14358}
14359
14360
14361/** Opcode 0xe1. */
14362FNIEMOP_DEF(iemOp_loope_Jb)
14363{
14364 IEMOP_MNEMONIC("loope Jb");
14365 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14366 IEMOP_HLP_NO_LOCK_PREFIX();
14367 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14368
14369 switch (pIemCpu->enmEffAddrMode)
14370 {
14371 case IEMMODE_16BIT:
14372 IEM_MC_BEGIN(0,0);
14373 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
14374 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
14375 IEM_MC_REL_JMP_S8(i8Imm);
14376 } IEM_MC_ELSE() {
14377 IEM_MC_ADVANCE_RIP();
14378 } IEM_MC_ENDIF();
14379 IEM_MC_END();
14380 return VINF_SUCCESS;
14381
14382 case IEMMODE_32BIT:
14383 IEM_MC_BEGIN(0,0);
14384 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
14385 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
14386 IEM_MC_REL_JMP_S8(i8Imm);
14387 } IEM_MC_ELSE() {
14388 IEM_MC_ADVANCE_RIP();
14389 } IEM_MC_ENDIF();
14390 IEM_MC_END();
14391 return VINF_SUCCESS;
14392
14393 case IEMMODE_64BIT:
14394 IEM_MC_BEGIN(0,0);
14395 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
14396 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
14397 IEM_MC_REL_JMP_S8(i8Imm);
14398 } IEM_MC_ELSE() {
14399 IEM_MC_ADVANCE_RIP();
14400 } IEM_MC_ENDIF();
14401 IEM_MC_END();
14402 return VINF_SUCCESS;
14403
14404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14405 }
14406}
14407
14408
14409/** Opcode 0xe2. */
14410FNIEMOP_DEF(iemOp_loop_Jb)
14411{
14412 IEMOP_MNEMONIC("loop Jb");
14413 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14414 IEMOP_HLP_NO_LOCK_PREFIX();
14415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14416
14417 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
14418 * using the 32-bit operand size override. How can that be restarted? See
14419 * weird pseudo code in intel manual. */
14420 switch (pIemCpu->enmEffAddrMode)
14421 {
14422 case IEMMODE_16BIT:
14423 IEM_MC_BEGIN(0,0);
14424 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
14425 IEM_MC_IF_CX_IS_NZ() {
14426 IEM_MC_REL_JMP_S8(i8Imm);
14427 } IEM_MC_ELSE() {
14428 IEM_MC_ADVANCE_RIP();
14429 } IEM_MC_ENDIF();
14430 IEM_MC_END();
14431 return VINF_SUCCESS;
14432
14433 case IEMMODE_32BIT:
14434 IEM_MC_BEGIN(0,0);
14435 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
14436 IEM_MC_IF_ECX_IS_NZ() {
14437 IEM_MC_REL_JMP_S8(i8Imm);
14438 } IEM_MC_ELSE() {
14439 IEM_MC_ADVANCE_RIP();
14440 } IEM_MC_ENDIF();
14441 IEM_MC_END();
14442 return VINF_SUCCESS;
14443
14444 case IEMMODE_64BIT:
14445 IEM_MC_BEGIN(0,0);
14446 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
14447 IEM_MC_IF_RCX_IS_NZ() {
14448 IEM_MC_REL_JMP_S8(i8Imm);
14449 } IEM_MC_ELSE() {
14450 IEM_MC_ADVANCE_RIP();
14451 } IEM_MC_ENDIF();
14452 IEM_MC_END();
14453 return VINF_SUCCESS;
14454
14455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14456 }
14457}
14458
14459
14460/** Opcode 0xe3. */
14461FNIEMOP_DEF(iemOp_jecxz_Jb)
14462{
14463 IEMOP_MNEMONIC("jecxz Jb");
14464 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14465 IEMOP_HLP_NO_LOCK_PREFIX();
14466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14467
14468 switch (pIemCpu->enmEffAddrMode)
14469 {
14470 case IEMMODE_16BIT:
14471 IEM_MC_BEGIN(0,0);
14472 IEM_MC_IF_CX_IS_NZ() {
14473 IEM_MC_ADVANCE_RIP();
14474 } IEM_MC_ELSE() {
14475 IEM_MC_REL_JMP_S8(i8Imm);
14476 } IEM_MC_ENDIF();
14477 IEM_MC_END();
14478 return VINF_SUCCESS;
14479
14480 case IEMMODE_32BIT:
14481 IEM_MC_BEGIN(0,0);
14482 IEM_MC_IF_ECX_IS_NZ() {
14483 IEM_MC_ADVANCE_RIP();
14484 } IEM_MC_ELSE() {
14485 IEM_MC_REL_JMP_S8(i8Imm);
14486 } IEM_MC_ENDIF();
14487 IEM_MC_END();
14488 return VINF_SUCCESS;
14489
14490 case IEMMODE_64BIT:
14491 IEM_MC_BEGIN(0,0);
14492 IEM_MC_IF_RCX_IS_NZ() {
14493 IEM_MC_ADVANCE_RIP();
14494 } IEM_MC_ELSE() {
14495 IEM_MC_REL_JMP_S8(i8Imm);
14496 } IEM_MC_ENDIF();
14497 IEM_MC_END();
14498 return VINF_SUCCESS;
14499
14500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14501 }
14502}
14503
14504
14505/** Opcode 0xe4 */
14506FNIEMOP_DEF(iemOp_in_AL_Ib)
14507{
14508 IEMOP_MNEMONIC("in eAX,Ib");
14509 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14510 IEMOP_HLP_NO_LOCK_PREFIX();
14511 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
14512}
14513
14514
14515/** Opcode 0xe5 */
14516FNIEMOP_DEF(iemOp_in_eAX_Ib)
14517{
14518 IEMOP_MNEMONIC("in eAX,Ib");
14519 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14520 IEMOP_HLP_NO_LOCK_PREFIX();
14521 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14522}
14523
14524
14525/** Opcode 0xe6 */
14526FNIEMOP_DEF(iemOp_out_Ib_AL)
14527{
14528 IEMOP_MNEMONIC("out Ib,AL");
14529 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14530 IEMOP_HLP_NO_LOCK_PREFIX();
14531 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
14532}
14533
14534
14535/** Opcode 0xe7 */
14536FNIEMOP_DEF(iemOp_out_Ib_eAX)
14537{
14538 IEMOP_MNEMONIC("out Ib,eAX");
14539 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14540 IEMOP_HLP_NO_LOCK_PREFIX();
14541 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14542}
14543
14544
14545/** Opcode 0xe8. */
14546FNIEMOP_DEF(iemOp_call_Jv)
14547{
14548 IEMOP_MNEMONIC("call Jv");
14549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14550 switch (pIemCpu->enmEffOpSize)
14551 {
14552 case IEMMODE_16BIT:
14553 {
14554 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14555 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
14556 }
14557
14558 case IEMMODE_32BIT:
14559 {
14560 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14561 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
14562 }
14563
14564 case IEMMODE_64BIT:
14565 {
14566 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14567 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
14568 }
14569
14570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14571 }
14572}
14573
14574
14575/** Opcode 0xe9. */
14576FNIEMOP_DEF(iemOp_jmp_Jv)
14577{
14578 IEMOP_MNEMONIC("jmp Jv");
14579 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14580 switch (pIemCpu->enmEffOpSize)
14581 {
14582 case IEMMODE_16BIT:
14583 {
14584 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
14585 IEM_MC_BEGIN(0, 0);
14586 IEM_MC_REL_JMP_S16(i16Imm);
14587 IEM_MC_END();
14588 return VINF_SUCCESS;
14589 }
14590
14591 case IEMMODE_64BIT:
14592 case IEMMODE_32BIT:
14593 {
14594 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
14595 IEM_MC_BEGIN(0, 0);
14596 IEM_MC_REL_JMP_S32(i32Imm);
14597 IEM_MC_END();
14598 return VINF_SUCCESS;
14599 }
14600
14601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14602 }
14603}
14604
14605
14606/** Opcode 0xea. */
14607FNIEMOP_DEF(iemOp_jmp_Ap)
14608{
14609 IEMOP_MNEMONIC("jmp Ap");
14610 IEMOP_HLP_NO_64BIT();
14611
14612 /* Decode the far pointer address and pass it on to the far call C implementation. */
14613 uint32_t offSeg;
14614 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
14615 IEM_OPCODE_GET_NEXT_U32(&offSeg);
14616 else
14617 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
14618 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
14619 IEMOP_HLP_NO_LOCK_PREFIX();
14620 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
14621}
14622
14623
14624/** Opcode 0xeb. */
14625FNIEMOP_DEF(iemOp_jmp_Jb)
14626{
14627 IEMOP_MNEMONIC("jmp Jb");
14628 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14629 IEMOP_HLP_NO_LOCK_PREFIX();
14630 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14631
14632 IEM_MC_BEGIN(0, 0);
14633 IEM_MC_REL_JMP_S8(i8Imm);
14634 IEM_MC_END();
14635 return VINF_SUCCESS;
14636}
14637
14638
14639/** Opcode 0xec */
14640FNIEMOP_DEF(iemOp_in_AL_DX)
14641{
14642 IEMOP_MNEMONIC("in AL,DX");
14643 IEMOP_HLP_NO_LOCK_PREFIX();
14644 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
14645}
14646
14647
14648/** Opcode 0xed */
14649FNIEMOP_DEF(iemOp_eAX_DX)
14650{
14651 IEMOP_MNEMONIC("in eAX,DX");
14652 IEMOP_HLP_NO_LOCK_PREFIX();
14653 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14654}
14655
14656
14657/** Opcode 0xee */
14658FNIEMOP_DEF(iemOp_out_DX_AL)
14659{
14660 IEMOP_MNEMONIC("out DX,AL");
14661 IEMOP_HLP_NO_LOCK_PREFIX();
14662 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
14663}
14664
14665
14666/** Opcode 0xef */
14667FNIEMOP_DEF(iemOp_out_DX_eAX)
14668{
14669 IEMOP_MNEMONIC("out DX,eAX");
14670 IEMOP_HLP_NO_LOCK_PREFIX();
14671 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14672}
14673
14674
14675/** Opcode 0xf0. */
14676FNIEMOP_DEF(iemOp_lock)
14677{
14678 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
14679
14680 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14681 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14682}
14683
14684
14685/** Opcode 0xf2. */
14686FNIEMOP_DEF(iemOp_repne)
14687{
14688 /* This overrides any previous REPE prefix. */
14689 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
14690 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
14691
14692 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14693 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14694}
14695
14696
14697/** Opcode 0xf3. */
14698FNIEMOP_DEF(iemOp_repe)
14699{
14700 /* This overrides any previous REPNE prefix. */
14701 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
14702 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
14703
14704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14705 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14706}
14707
14708
14709/** Opcode 0xf4. */
14710FNIEMOP_DEF(iemOp_hlt)
14711{
14712 IEMOP_HLP_NO_LOCK_PREFIX();
14713 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
14714}
14715
14716
14717/** Opcode 0xf5. */
14718FNIEMOP_DEF(iemOp_cmc)
14719{
14720 IEMOP_MNEMONIC("cmc");
14721 IEMOP_HLP_NO_LOCK_PREFIX();
14722 IEM_MC_BEGIN(0, 0);
14723 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14724 IEM_MC_ADVANCE_RIP();
14725 IEM_MC_END();
14726 return VINF_SUCCESS;
14727}
14728
14729
14730/**
14731 * Common implementation of 'inc/dec/not/neg Eb'.
14732 *
14733 * @param bRm The RM byte.
14734 * @param pImpl The instruction implementation.
14735 */
14736FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
14737{
14738 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14739 {
14740 /* register access */
14741 IEM_MC_BEGIN(2, 0);
14742 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14743 IEM_MC_ARG(uint32_t *, pEFlags, 1);
14744 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14745 IEM_MC_REF_EFLAGS(pEFlags);
14746 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
14747 IEM_MC_ADVANCE_RIP();
14748 IEM_MC_END();
14749 }
14750 else
14751 {
14752 /* memory access. */
14753 IEM_MC_BEGIN(2, 2);
14754 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14755 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14757
14758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14759 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14760 IEM_MC_FETCH_EFLAGS(EFlags);
14761 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14762 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
14763 else
14764 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
14765
14766 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14767 IEM_MC_COMMIT_EFLAGS(EFlags);
14768 IEM_MC_ADVANCE_RIP();
14769 IEM_MC_END();
14770 }
14771 return VINF_SUCCESS;
14772}
14773
14774
14775/**
14776 * Common implementation of 'inc/dec/not/neg Ev'.
14777 *
14778 * @param bRm The RM byte.
14779 * @param pImpl The instruction implementation.
14780 */
14781FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
14782{
14783 /* Registers are handled by a common worker. */
14784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14785 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14786
14787 /* Memory we do here. */
14788 switch (pIemCpu->enmEffOpSize)
14789 {
14790 case IEMMODE_16BIT:
14791 IEM_MC_BEGIN(2, 2);
14792 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14793 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14795
14796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14797 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14798 IEM_MC_FETCH_EFLAGS(EFlags);
14799 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14800 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
14801 else
14802 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
14803
14804 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14805 IEM_MC_COMMIT_EFLAGS(EFlags);
14806 IEM_MC_ADVANCE_RIP();
14807 IEM_MC_END();
14808 return VINF_SUCCESS;
14809
14810 case IEMMODE_32BIT:
14811 IEM_MC_BEGIN(2, 2);
14812 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14813 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14815
14816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14817 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14818 IEM_MC_FETCH_EFLAGS(EFlags);
14819 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14820 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
14821 else
14822 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
14823
14824 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14825 IEM_MC_COMMIT_EFLAGS(EFlags);
14826 IEM_MC_ADVANCE_RIP();
14827 IEM_MC_END();
14828 return VINF_SUCCESS;
14829
14830 case IEMMODE_64BIT:
14831 IEM_MC_BEGIN(2, 2);
14832 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14833 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14835
14836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14837 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14838 IEM_MC_FETCH_EFLAGS(EFlags);
14839 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14840 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
14841 else
14842 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
14843
14844 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14845 IEM_MC_COMMIT_EFLAGS(EFlags);
14846 IEM_MC_ADVANCE_RIP();
14847 IEM_MC_END();
14848 return VINF_SUCCESS;
14849
14850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14851 }
14852}
14853
14854
14855/** Opcode 0xf6 /0. */
14856FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
14857{
14858 IEMOP_MNEMONIC("test Eb,Ib");
14859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14860
14861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14862 {
14863 /* register access */
14864 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14865 IEMOP_HLP_NO_LOCK_PREFIX();
14866
14867 IEM_MC_BEGIN(3, 0);
14868 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14869 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
14870 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14871 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14872 IEM_MC_REF_EFLAGS(pEFlags);
14873 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14874 IEM_MC_ADVANCE_RIP();
14875 IEM_MC_END();
14876 }
14877 else
14878 {
14879 /* memory access. */
14880 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14881
14882 IEM_MC_BEGIN(3, 2);
14883 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14884 IEM_MC_ARG(uint8_t, u8Src, 1);
14885 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14887
14888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14889 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14890 IEM_MC_ASSIGN(u8Src, u8Imm);
14891 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14892 IEM_MC_FETCH_EFLAGS(EFlags);
14893 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14894
14895 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
14896 IEM_MC_COMMIT_EFLAGS(EFlags);
14897 IEM_MC_ADVANCE_RIP();
14898 IEM_MC_END();
14899 }
14900 return VINF_SUCCESS;
14901}
14902
14903
14904/** Opcode 0xf7 /0. */
14905FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14906{
14907 IEMOP_MNEMONIC("test Ev,Iv");
14908 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14909 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14910
14911 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14912 {
14913 /* register access */
14914 switch (pIemCpu->enmEffOpSize)
14915 {
14916 case IEMMODE_16BIT:
14917 {
14918 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14919 IEM_MC_BEGIN(3, 0);
14920 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14921 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14922 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14923 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14924 IEM_MC_REF_EFLAGS(pEFlags);
14925 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14926 IEM_MC_ADVANCE_RIP();
14927 IEM_MC_END();
14928 return VINF_SUCCESS;
14929 }
14930
14931 case IEMMODE_32BIT:
14932 {
14933 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14934 IEM_MC_BEGIN(3, 0);
14935 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14936 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14937 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14938 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14939 IEM_MC_REF_EFLAGS(pEFlags);
14940 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14941 IEM_MC_ADVANCE_RIP();
14942 IEM_MC_END();
14943 return VINF_SUCCESS;
14944 }
14945
14946 case IEMMODE_64BIT:
14947 {
14948 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14949 IEM_MC_BEGIN(3, 0);
14950 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14951 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14952 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14953 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14954 IEM_MC_REF_EFLAGS(pEFlags);
14955 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14956 IEM_MC_ADVANCE_RIP();
14957 IEM_MC_END();
14958 return VINF_SUCCESS;
14959 }
14960
14961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14962 }
14963 }
14964 else
14965 {
14966 /* memory access. */
14967 switch (pIemCpu->enmEffOpSize)
14968 {
14969 case IEMMODE_16BIT:
14970 {
14971 IEM_MC_BEGIN(3, 2);
14972 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14973 IEM_MC_ARG(uint16_t, u16Src, 1);
14974 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14976
14977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14978 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14979 IEM_MC_ASSIGN(u16Src, u16Imm);
14980 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14981 IEM_MC_FETCH_EFLAGS(EFlags);
14982 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14983
14984 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
14985 IEM_MC_COMMIT_EFLAGS(EFlags);
14986 IEM_MC_ADVANCE_RIP();
14987 IEM_MC_END();
14988 return VINF_SUCCESS;
14989 }
14990
14991 case IEMMODE_32BIT:
14992 {
14993 IEM_MC_BEGIN(3, 2);
14994 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14995 IEM_MC_ARG(uint32_t, u32Src, 1);
14996 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14998
14999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15000 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
15001 IEM_MC_ASSIGN(u32Src, u32Imm);
15002 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
15003 IEM_MC_FETCH_EFLAGS(EFlags);
15004 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
15005
15006 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
15007 IEM_MC_COMMIT_EFLAGS(EFlags);
15008 IEM_MC_ADVANCE_RIP();
15009 IEM_MC_END();
15010 return VINF_SUCCESS;
15011 }
15012
15013 case IEMMODE_64BIT:
15014 {
15015 IEM_MC_BEGIN(3, 2);
15016 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
15017 IEM_MC_ARG(uint64_t, u64Src, 1);
15018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
15019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15020
15021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15022 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
15023 IEM_MC_ASSIGN(u64Src, u64Imm);
15024 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
15025 IEM_MC_FETCH_EFLAGS(EFlags);
15026 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
15027
15028 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
15029 IEM_MC_COMMIT_EFLAGS(EFlags);
15030 IEM_MC_ADVANCE_RIP();
15031 IEM_MC_END();
15032 return VINF_SUCCESS;
15033 }
15034
15035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15036 }
15037 }
15038}
15039
15040
15041/** Opcode 0xf6 /4, /5, /6 and /7. */
15042FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
15043{
15044 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
15045
15046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15047 {
15048 /* register access */
15049 IEMOP_HLP_NO_LOCK_PREFIX();
15050 IEM_MC_BEGIN(3, 0);
15051 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15052 IEM_MC_ARG(uint8_t, u8Value, 1);
15053 IEM_MC_ARG(uint32_t *, pEFlags, 2);
15054 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15055 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15056 IEM_MC_REF_EFLAGS(pEFlags);
15057 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
15058 IEM_MC_ADVANCE_RIP();
15059 IEM_MC_END();
15060 }
15061 else
15062 {
15063 /* memory access. */
15064 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
15065
15066 IEM_MC_BEGIN(3, 1);
15067 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15068 IEM_MC_ARG(uint8_t, u8Value, 1);
15069 IEM_MC_ARG(uint32_t *, pEFlags, 2);
15070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15071
15072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15073 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
15074 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15075 IEM_MC_REF_EFLAGS(pEFlags);
15076 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
15077
15078 IEM_MC_ADVANCE_RIP();
15079 IEM_MC_END();
15080 }
15081 return VINF_SUCCESS;
15082}
15083
15084
15085/** Opcode 0xf7 /4, /5, /6 and /7. */
15086FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
15087{
15088 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
15089 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15090
15091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15092 {
15093 /* register access */
15094 switch (pIemCpu->enmEffOpSize)
15095 {
15096 case IEMMODE_16BIT:
15097 {
15098 IEMOP_HLP_NO_LOCK_PREFIX();
15099 IEM_MC_BEGIN(4, 1);
15100 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15101 IEM_MC_ARG(uint16_t *, pu16DX, 1);
15102 IEM_MC_ARG(uint16_t, u16Value, 2);
15103 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15104 IEM_MC_LOCAL(int32_t, rc);
15105
15106 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15107 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15108 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
15109 IEM_MC_REF_EFLAGS(pEFlags);
15110 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
15111 IEM_MC_IF_LOCAL_IS_Z(rc) {
15112 IEM_MC_ADVANCE_RIP();
15113 } IEM_MC_ELSE() {
15114 IEM_MC_RAISE_DIVIDE_ERROR();
15115 } IEM_MC_ENDIF();
15116
15117 IEM_MC_END();
15118 return VINF_SUCCESS;
15119 }
15120
15121 case IEMMODE_32BIT:
15122 {
15123 IEMOP_HLP_NO_LOCK_PREFIX();
15124 IEM_MC_BEGIN(4, 1);
15125 IEM_MC_ARG(uint32_t *, pu32AX, 0);
15126 IEM_MC_ARG(uint32_t *, pu32DX, 1);
15127 IEM_MC_ARG(uint32_t, u32Value, 2);
15128 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15129 IEM_MC_LOCAL(int32_t, rc);
15130
15131 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15132 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
15133 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
15134 IEM_MC_REF_EFLAGS(pEFlags);
15135 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
15136 IEM_MC_IF_LOCAL_IS_Z(rc) {
15137 IEM_MC_ADVANCE_RIP();
15138 } IEM_MC_ELSE() {
15139 IEM_MC_RAISE_DIVIDE_ERROR();
15140 } IEM_MC_ENDIF();
15141
15142 IEM_MC_END();
15143 return VINF_SUCCESS;
15144 }
15145
15146 case IEMMODE_64BIT:
15147 {
15148 IEMOP_HLP_NO_LOCK_PREFIX();
15149 IEM_MC_BEGIN(4, 1);
15150 IEM_MC_ARG(uint64_t *, pu64AX, 0);
15151 IEM_MC_ARG(uint64_t *, pu64DX, 1);
15152 IEM_MC_ARG(uint64_t, u64Value, 2);
15153 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15154 IEM_MC_LOCAL(int32_t, rc);
15155
15156 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15157 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
15158 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
15159 IEM_MC_REF_EFLAGS(pEFlags);
15160 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
15161 IEM_MC_IF_LOCAL_IS_Z(rc) {
15162 IEM_MC_ADVANCE_RIP();
15163 } IEM_MC_ELSE() {
15164 IEM_MC_RAISE_DIVIDE_ERROR();
15165 } IEM_MC_ENDIF();
15166
15167 IEM_MC_END();
15168 return VINF_SUCCESS;
15169 }
15170
15171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15172 }
15173 }
15174 else
15175 {
15176 /* memory access. */
15177 switch (pIemCpu->enmEffOpSize)
15178 {
15179 case IEMMODE_16BIT:
15180 {
15181 IEMOP_HLP_NO_LOCK_PREFIX();
15182 IEM_MC_BEGIN(4, 2);
15183 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15184 IEM_MC_ARG(uint16_t *, pu16DX, 1);
15185 IEM_MC_ARG(uint16_t, u16Value, 2);
15186 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15188 IEM_MC_LOCAL(int32_t, rc);
15189
15190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15191 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
15192 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15193 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
15194 IEM_MC_REF_EFLAGS(pEFlags);
15195 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
15196 IEM_MC_IF_LOCAL_IS_Z(rc) {
15197 IEM_MC_ADVANCE_RIP();
15198 } IEM_MC_ELSE() {
15199 IEM_MC_RAISE_DIVIDE_ERROR();
15200 } IEM_MC_ENDIF();
15201
15202 IEM_MC_END();
15203 return VINF_SUCCESS;
15204 }
15205
15206 case IEMMODE_32BIT:
15207 {
15208 IEMOP_HLP_NO_LOCK_PREFIX();
15209 IEM_MC_BEGIN(4, 2);
15210 IEM_MC_ARG(uint32_t *, pu32AX, 0);
15211 IEM_MC_ARG(uint32_t *, pu32DX, 1);
15212 IEM_MC_ARG(uint32_t, u32Value, 2);
15213 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15215 IEM_MC_LOCAL(int32_t, rc);
15216
15217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15218 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
15219 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
15220 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
15221 IEM_MC_REF_EFLAGS(pEFlags);
15222 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
15223 IEM_MC_IF_LOCAL_IS_Z(rc) {
15224 IEM_MC_ADVANCE_RIP();
15225 } IEM_MC_ELSE() {
15226 IEM_MC_RAISE_DIVIDE_ERROR();
15227 } IEM_MC_ENDIF();
15228
15229 IEM_MC_END();
15230 return VINF_SUCCESS;
15231 }
15232
15233 case IEMMODE_64BIT:
15234 {
15235 IEMOP_HLP_NO_LOCK_PREFIX();
15236 IEM_MC_BEGIN(4, 2);
15237 IEM_MC_ARG(uint64_t *, pu64AX, 0);
15238 IEM_MC_ARG(uint64_t *, pu64DX, 1);
15239 IEM_MC_ARG(uint64_t, u64Value, 2);
15240 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15242 IEM_MC_LOCAL(int32_t, rc);
15243
15244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15245 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
15246 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
15247 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
15248 IEM_MC_REF_EFLAGS(pEFlags);
15249 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
15250 IEM_MC_IF_LOCAL_IS_Z(rc) {
15251 IEM_MC_ADVANCE_RIP();
15252 } IEM_MC_ELSE() {
15253 IEM_MC_RAISE_DIVIDE_ERROR();
15254 } IEM_MC_ENDIF();
15255
15256 IEM_MC_END();
15257 return VINF_SUCCESS;
15258 }
15259
15260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15261 }
15262 }
15263}
15264
15265/** Opcode 0xf6. */
15266FNIEMOP_DEF(iemOp_Grp3_Eb)
15267{
15268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15269 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15270 {
15271 case 0:
15272 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
15273 case 1:
15274 return IEMOP_RAISE_INVALID_OPCODE();
15275 case 2:
15276 IEMOP_MNEMONIC("not Eb");
15277 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
15278 case 3:
15279 IEMOP_MNEMONIC("neg Eb");
15280 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
15281 case 4:
15282 IEMOP_MNEMONIC("mul Eb");
15283 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15284 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
15285 case 5:
15286 IEMOP_MNEMONIC("imul Eb");
15287 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15288 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
15289 case 6:
15290 IEMOP_MNEMONIC("div Eb");
15291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15292 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
15293 case 7:
15294 IEMOP_MNEMONIC("idiv Eb");
15295 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15296 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
15297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15298 }
15299}
15300
15301
15302/** Opcode 0xf7. */
15303FNIEMOP_DEF(iemOp_Grp3_Ev)
15304{
15305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15306 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15307 {
15308 case 0:
15309 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
15310 case 1:
15311 return IEMOP_RAISE_INVALID_OPCODE();
15312 case 2:
15313 IEMOP_MNEMONIC("not Ev");
15314 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
15315 case 3:
15316 IEMOP_MNEMONIC("neg Ev");
15317 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
15318 case 4:
15319 IEMOP_MNEMONIC("mul Ev");
15320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15321 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
15322 case 5:
15323 IEMOP_MNEMONIC("imul Ev");
15324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15325 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
15326 case 6:
15327 IEMOP_MNEMONIC("div Ev");
15328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15329 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
15330 case 7:
15331 IEMOP_MNEMONIC("idiv Ev");
15332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15333 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
15334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15335 }
15336}
15337
15338
15339/** Opcode 0xf8. */
15340FNIEMOP_DEF(iemOp_clc)
15341{
15342 IEMOP_MNEMONIC("clc");
15343 IEMOP_HLP_NO_LOCK_PREFIX();
15344 IEM_MC_BEGIN(0, 0);
15345 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
15346 IEM_MC_ADVANCE_RIP();
15347 IEM_MC_END();
15348 return VINF_SUCCESS;
15349}
15350
15351
15352/** Opcode 0xf9. */
15353FNIEMOP_DEF(iemOp_stc)
15354{
15355 IEMOP_MNEMONIC("stc");
15356 IEMOP_HLP_NO_LOCK_PREFIX();
15357 IEM_MC_BEGIN(0, 0);
15358 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
15359 IEM_MC_ADVANCE_RIP();
15360 IEM_MC_END();
15361 return VINF_SUCCESS;
15362}
15363
15364
15365/** Opcode 0xfa. */
15366FNIEMOP_DEF(iemOp_cli)
15367{
15368 IEMOP_MNEMONIC("cli");
15369 IEMOP_HLP_NO_LOCK_PREFIX();
15370 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
15371}
15372
15373
15374FNIEMOP_DEF(iemOp_sti)
15375{
15376 IEMOP_MNEMONIC("sti");
15377 IEMOP_HLP_NO_LOCK_PREFIX();
15378 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
15379}
15380
15381
15382/** Opcode 0xfc. */
15383FNIEMOP_DEF(iemOp_cld)
15384{
15385 IEMOP_MNEMONIC("cld");
15386 IEMOP_HLP_NO_LOCK_PREFIX();
15387 IEM_MC_BEGIN(0, 0);
15388 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
15389 IEM_MC_ADVANCE_RIP();
15390 IEM_MC_END();
15391 return VINF_SUCCESS;
15392}
15393
15394
15395/** Opcode 0xfd. */
15396FNIEMOP_DEF(iemOp_std)
15397{
15398 IEMOP_MNEMONIC("std");
15399 IEMOP_HLP_NO_LOCK_PREFIX();
15400 IEM_MC_BEGIN(0, 0);
15401 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
15402 IEM_MC_ADVANCE_RIP();
15403 IEM_MC_END();
15404 return VINF_SUCCESS;
15405}
15406
15407
15408/** Opcode 0xfe. */
15409FNIEMOP_DEF(iemOp_Grp4)
15410{
15411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15412 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15413 {
15414 case 0:
15415 IEMOP_MNEMONIC("inc Ev");
15416 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
15417 case 1:
15418 IEMOP_MNEMONIC("dec Ev");
15419 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
15420 default:
15421 IEMOP_MNEMONIC("grp4-ud");
15422 return IEMOP_RAISE_INVALID_OPCODE();
15423 }
15424}
15425
15426
15427/**
15428 * Opcode 0xff /2.
15429 * @param bRm The RM byte.
15430 */
15431FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
15432{
15433 IEMOP_MNEMONIC("calln Ev");
15434 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15435 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15436
15437 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15438 {
15439 /* The new RIP is taken from a register. */
15440 switch (pIemCpu->enmEffOpSize)
15441 {
15442 case IEMMODE_16BIT:
15443 IEM_MC_BEGIN(1, 0);
15444 IEM_MC_ARG(uint16_t, u16Target, 0);
15445 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15446 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
15447 IEM_MC_END()
15448 return VINF_SUCCESS;
15449
15450 case IEMMODE_32BIT:
15451 IEM_MC_BEGIN(1, 0);
15452 IEM_MC_ARG(uint32_t, u32Target, 0);
15453 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15454 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
15455 IEM_MC_END()
15456 return VINF_SUCCESS;
15457
15458 case IEMMODE_64BIT:
15459 IEM_MC_BEGIN(1, 0);
15460 IEM_MC_ARG(uint64_t, u64Target, 0);
15461 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15462 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
15463 IEM_MC_END()
15464 return VINF_SUCCESS;
15465
15466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15467 }
15468 }
15469 else
15470 {
15471 /* The new RIP is taken from a register. */
15472 switch (pIemCpu->enmEffOpSize)
15473 {
15474 case IEMMODE_16BIT:
15475 IEM_MC_BEGIN(1, 1);
15476 IEM_MC_ARG(uint16_t, u16Target, 0);
15477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15479 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15480 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
15481 IEM_MC_END()
15482 return VINF_SUCCESS;
15483
15484 case IEMMODE_32BIT:
15485 IEM_MC_BEGIN(1, 1);
15486 IEM_MC_ARG(uint32_t, u32Target, 0);
15487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15489 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15490 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
15491 IEM_MC_END()
15492 return VINF_SUCCESS;
15493
15494 case IEMMODE_64BIT:
15495 IEM_MC_BEGIN(1, 1);
15496 IEM_MC_ARG(uint64_t, u64Target, 0);
15497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15499 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15500 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
15501 IEM_MC_END()
15502 return VINF_SUCCESS;
15503
15504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15505 }
15506 }
15507}
15508
15509typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
15510
15511FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
15512{
15513 /* Registers? How?? */
15514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15515 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
15516
15517 /* Far pointer loaded from memory. */
15518 switch (pIemCpu->enmEffOpSize)
15519 {
15520 case IEMMODE_16BIT:
15521 IEM_MC_BEGIN(3, 1);
15522 IEM_MC_ARG(uint16_t, u16Sel, 0);
15523 IEM_MC_ARG(uint16_t, offSeg, 1);
15524 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
15525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15528 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
15529 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
15530 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
15531 IEM_MC_END();
15532 return VINF_SUCCESS;
15533
15534 case IEMMODE_32BIT:
15535 IEM_MC_BEGIN(3, 1);
15536 IEM_MC_ARG(uint16_t, u16Sel, 0);
15537 IEM_MC_ARG(uint32_t, offSeg, 1);
15538 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
15539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15542 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
15543 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
15544 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
15545 IEM_MC_END();
15546 return VINF_SUCCESS;
15547
15548 case IEMMODE_64BIT:
15549 IEM_MC_BEGIN(3, 1);
15550 IEM_MC_ARG(uint16_t, u16Sel, 0);
15551 IEM_MC_ARG(uint64_t, offSeg, 1);
15552 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
15553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15556 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
15557 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
15558 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
15559 IEM_MC_END();
15560 return VINF_SUCCESS;
15561
15562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15563 }
15564}
15565
15566
15567/**
15568 * Opcode 0xff /3.
15569 * @param bRm The RM byte.
15570 */
15571FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15572{
15573 IEMOP_MNEMONIC("callf Ep");
15574 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
15575}
15576
15577
15578/**
15579 * Opcode 0xff /4.
15580 * @param bRm The RM byte.
15581 */
15582FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15583{
15584 IEMOP_MNEMONIC("jmpn Ev");
15585 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15586 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15587
15588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15589 {
15590 /* The new RIP is taken from a register. */
15591 switch (pIemCpu->enmEffOpSize)
15592 {
15593 case IEMMODE_16BIT:
15594 IEM_MC_BEGIN(0, 1);
15595 IEM_MC_LOCAL(uint16_t, u16Target);
15596 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15597 IEM_MC_SET_RIP_U16(u16Target);
15598 IEM_MC_END()
15599 return VINF_SUCCESS;
15600
15601 case IEMMODE_32BIT:
15602 IEM_MC_BEGIN(0, 1);
15603 IEM_MC_LOCAL(uint32_t, u32Target);
15604 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15605 IEM_MC_SET_RIP_U32(u32Target);
15606 IEM_MC_END()
15607 return VINF_SUCCESS;
15608
15609 case IEMMODE_64BIT:
15610 IEM_MC_BEGIN(0, 1);
15611 IEM_MC_LOCAL(uint64_t, u64Target);
15612 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15613 IEM_MC_SET_RIP_U64(u64Target);
15614 IEM_MC_END()
15615 return VINF_SUCCESS;
15616
15617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15618 }
15619 }
15620 else
15621 {
15622 /* The new RIP is taken from a register. */
15623 switch (pIemCpu->enmEffOpSize)
15624 {
15625 case IEMMODE_16BIT:
15626 IEM_MC_BEGIN(0, 2);
15627 IEM_MC_LOCAL(uint16_t, u16Target);
15628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15630 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15631 IEM_MC_SET_RIP_U16(u16Target);
15632 IEM_MC_END()
15633 return VINF_SUCCESS;
15634
15635 case IEMMODE_32BIT:
15636 IEM_MC_BEGIN(0, 2);
15637 IEM_MC_LOCAL(uint32_t, u32Target);
15638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15640 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15641 IEM_MC_SET_RIP_U32(u32Target);
15642 IEM_MC_END()
15643 return VINF_SUCCESS;
15644
15645 case IEMMODE_64BIT:
15646 IEM_MC_BEGIN(0, 2);
15647 IEM_MC_LOCAL(uint32_t, u32Target);
15648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15650 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15651 IEM_MC_SET_RIP_U32(u32Target);
15652 IEM_MC_END()
15653 return VINF_SUCCESS;
15654
15655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15656 }
15657 }
15658}
15659
15660
15661/**
15662 * Opcode 0xff /5.
15663 * @param bRm The RM byte.
15664 */
15665FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15666{
15667 IEMOP_MNEMONIC("jmp Ep");
15668 IEMOP_HLP_NO_64BIT();
15669 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
15670}
15671
15672
15673/**
15674 * Opcode 0xff /6.
15675 * @param bRm The RM byte.
15676 */
15677FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15678{
15679 IEMOP_MNEMONIC("push Ev");
15680 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15681
15682 /* Registers are handled by a common worker. */
15683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15684 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15685
15686 /* Memory we do here. */
15687 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15688 switch (pIemCpu->enmEffOpSize)
15689 {
15690 case IEMMODE_16BIT:
15691 IEM_MC_BEGIN(0, 2);
15692 IEM_MC_LOCAL(uint16_t, u16Src);
15693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15695 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15696 IEM_MC_PUSH_U16(u16Src);
15697 IEM_MC_ADVANCE_RIP();
15698 IEM_MC_END();
15699 return VINF_SUCCESS;
15700
15701 case IEMMODE_32BIT:
15702 IEM_MC_BEGIN(0, 2);
15703 IEM_MC_LOCAL(uint32_t, u32Src);
15704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15706 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15707 IEM_MC_PUSH_U32(u32Src);
15708 IEM_MC_ADVANCE_RIP();
15709 IEM_MC_END();
15710 return VINF_SUCCESS;
15711
15712 case IEMMODE_64BIT:
15713 IEM_MC_BEGIN(0, 2);
15714 IEM_MC_LOCAL(uint64_t, u64Src);
15715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15717 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15718 IEM_MC_PUSH_U64(u64Src);
15719 IEM_MC_ADVANCE_RIP();
15720 IEM_MC_END();
15721 return VINF_SUCCESS;
15722
15723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15724 }
15725}
15726
15727
15728/** Opcode 0xff. */
15729FNIEMOP_DEF(iemOp_Grp5)
15730{
15731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15732 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15733 {
15734 case 0:
15735 IEMOP_MNEMONIC("inc Ev");
15736 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
15737 case 1:
15738 IEMOP_MNEMONIC("dec Ev");
15739 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
15740 case 2:
15741 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15742 case 3:
15743 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15744 case 4:
15745 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15746 case 5:
15747 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15748 case 6:
15749 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15750 case 7:
15751 IEMOP_MNEMONIC("grp5-ud");
15752 return IEMOP_RAISE_INVALID_OPCODE();
15753 }
15754 AssertFailedReturn(VERR_INTERNAL_ERROR_2);
15755}
15756
15757
15758
15759const PFNIEMOP g_apfnOneByteMap[256] =
15760{
15761 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15762 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15763 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15764 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15765 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15766 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15767 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15768 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15769 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15770 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15771 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15772 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15773 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15774 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15775 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15776 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15777 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15778 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15779 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15780 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15781 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15782 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15783 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15784 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15785 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15786 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15787 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15788 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15789 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15790 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15791 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15792 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15793 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15794 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15795 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15796 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
15797 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15798 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15799 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15800 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15801 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15802 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15803 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15804 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15805 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15806 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15807 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15808 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15809 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15810 /* 0xc4 */ iemOp_les_Gv_Mp, iemOp_lds_Gv_Mp, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15811 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15812 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15813 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15814 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_Invalid, iemOp_xlat,
15815 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15816 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15817 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15818 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15819 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15820 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15821 /* 0xf0 */ iemOp_lock, iemOp_Invalid, iemOp_repne, iemOp_repe, /** @todo 0xf1 is INT1 / ICEBP. */
15822 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15823 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15824 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15825};
15826
15827
15828/** @} */
15829
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette